OLD | NEW |
(Empty) | |
| 1 # Copyright 2010 Google Inc. |
| 2 # Copyright (c) 2011, Nexenta Systems Inc. |
| 3 # |
| 4 # Permission is hereby granted, free of charge, to any person obtaining a |
| 5 # copy of this software and associated documentation files (the |
| 6 # "Software"), to deal in the Software without restriction, including |
| 7 # without limitation the rights to use, copy, modify, merge, publish, dis- |
| 8 # tribute, sublicense, and/or sell copies of the Software, and to permit |
| 9 # persons to whom the Software is furnished to do so, subject to the fol- |
| 10 # lowing conditions: |
| 11 # |
| 12 # The above copyright notice and this permission notice shall be included |
| 13 # in all copies or substantial portions of the Software. |
| 14 # |
| 15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- |
| 17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT |
| 18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
| 19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 # IN THE SOFTWARE. |
| 22 |
| 23 import boto |
| 24 import os |
| 25 import sys |
| 26 from boto.s3.deletemarker import DeleteMarker |
| 27 from boto.exception import BotoClientError |
| 28 from boto.exception import InvalidUriError |
| 29 |
| 30 |
| 31 class StorageUri(object): |
| 32 """ |
| 33 Base class for representing storage provider-independent bucket and |
| 34 object name with a shorthand URI-like syntax. |
| 35 |
| 36 This is an abstract class: the constructor cannot be called (throws an |
| 37 exception if you try). |
| 38 """ |
| 39 |
| 40 connection = None |
| 41 # Optional args that can be set from one of the concrete subclass |
| 42 # constructors, to change connection behavior (e.g., to override |
| 43 # https_connection_factory). |
| 44 connection_args = None |
| 45 |
| 46 # Map of provider scheme ('s3' or 'gs') to AWSAuthConnection object. We |
| 47 # maintain a pool here in addition to the connection pool implemented |
| 48 # in AWSAuthConnection because the latter re-creates its connection pool |
| 49 # every time that class is instantiated (so the current pool is used to |
| 50 # avoid re-instantiating AWSAuthConnection). |
| 51 provider_pool = {} |
| 52 |
| 53 def __init__(self): |
| 54 """Uncallable constructor on abstract base StorageUri class. |
| 55 """ |
| 56 raise BotoClientError('Attempt to instantiate abstract StorageUri ' |
| 57 'class') |
| 58 |
| 59 def __repr__(self): |
| 60 """Returns string representation of URI.""" |
| 61 return self.uri |
| 62 |
| 63 def equals(self, uri): |
| 64 """Returns true if two URIs are equal.""" |
| 65 return self.uri == uri.uri |
| 66 |
| 67 def check_response(self, resp, level, uri): |
| 68 if resp is None: |
| 69 raise InvalidUriError('Attempt to get %s for "%s" failed.\nThis ' |
| 70 'can happen if the URI refers to a non-' |
| 71 'existent object or if you meant to\noperate ' |
| 72 'on a directory (e.g., leaving off -R option ' |
| 73 'on gsutil cp, mv, or ls of a\nbucket). If a ' |
| 74 'version-ful object was specified, you may ' |
| 75 'have neglected to\nuse a -v flag.' % |
| 76 (level, uri)) |
| 77 |
| 78 def _check_bucket_uri(self, function_name): |
| 79 if issubclass(type(self), BucketStorageUri) and not self.bucket_name: |
| 80 raise InvalidUriError( |
| 81 '%s on bucket-less URI (%s)' % (function_name, self.uri)) |
| 82 |
| 83 def _check_object_uri(self, function_name): |
| 84 if issubclass(type(self), BucketStorageUri) and not self.object_name: |
| 85 raise InvalidUriError('%s on object-less URI (%s)' % |
| 86 (function_name, self.uri)) |
| 87 |
| 88 def _warn_about_args(self, function_name, **args): |
| 89 for arg in args: |
| 90 if args[arg]: |
| 91 sys.stderr.write( |
| 92 'Warning: %s ignores argument: %s=%s\n' % |
| 93 (function_name, arg, str(args[arg]))) |
| 94 |
| 95 def connect(self, access_key_id=None, secret_access_key=None, **kwargs): |
| 96 """ |
| 97 Opens a connection to appropriate provider, depending on provider |
| 98 portion of URI. Requires Credentials defined in boto config file (see |
| 99 boto/pyami/config.py). |
| 100 @type storage_uri: StorageUri |
| 101 @param storage_uri: StorageUri specifying a bucket or a bucket+object |
| 102 @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>} |
| 103 @return: A connection to storage service provider of the given URI. |
| 104 """ |
| 105 connection_args = dict(self.connection_args or ()) |
| 106 # Use OrdinaryCallingFormat instead of boto-default |
| 107 # SubdomainCallingFormat because the latter changes the hostname |
| 108 # that's checked during cert validation for HTTPS connections, |
| 109 # which will fail cert validation (when cert validation is enabled). |
| 110 # Note: the following import can't be moved up to the start of |
| 111 # this file else it causes a config import failure when run from |
| 112 # the resumable upload/download tests. |
| 113 from boto.s3.connection import OrdinaryCallingFormat |
| 114 connection_args['calling_format'] = OrdinaryCallingFormat() |
| 115 if (hasattr(self, 'suppress_consec_slashes') and |
| 116 'suppress_consec_slashes' not in connection_args): |
| 117 connection_args['suppress_consec_slashes'] = ( |
| 118 self.suppress_consec_slashes) |
| 119 connection_args.update(kwargs) |
| 120 if not self.connection: |
| 121 if self.scheme in self.provider_pool: |
| 122 self.connection = self.provider_pool[self.scheme] |
| 123 elif self.scheme == 's3': |
| 124 from boto.s3.connection import S3Connection |
| 125 self.connection = S3Connection(access_key_id, |
| 126 secret_access_key, |
| 127 **connection_args) |
| 128 self.provider_pool[self.scheme] = self.connection |
| 129 elif self.scheme == 'gs': |
| 130 from boto.gs.connection import GSConnection |
| 131 self.connection = GSConnection(access_key_id, |
| 132 secret_access_key, |
| 133 **connection_args) |
| 134 self.provider_pool[self.scheme] = self.connection |
| 135 elif self.scheme == 'file': |
| 136 from boto.file.connection import FileConnection |
| 137 self.connection = FileConnection(self) |
| 138 else: |
| 139 raise InvalidUriError('Unrecognized scheme "%s"' % |
| 140 self.scheme) |
| 141 self.connection.debug = self.debug |
| 142 return self.connection |
| 143 |
| 144 def delete_key(self, validate=False, headers=None, version_id=None, |
| 145 mfa_token=None): |
| 146 self._check_object_uri('delete_key') |
| 147 bucket = self.get_bucket(validate, headers) |
| 148 return bucket.delete_key(self.object_name, headers, version_id, |
| 149 mfa_token) |
| 150 |
| 151 def list_bucket(self, prefix='', delimiter='', headers=None, |
| 152 all_versions=False): |
| 153 self._check_bucket_uri('list_bucket') |
| 154 bucket = self.get_bucket(headers=headers) |
| 155 if all_versions: |
| 156 return (v for v in bucket.list_versions( |
| 157 prefix=prefix, delimiter=delimiter, headers=headers) |
| 158 if not isinstance(v, DeleteMarker)) |
| 159 else: |
| 160 return bucket.list(prefix=prefix, delimiter=delimiter, |
| 161 headers=headers) |
| 162 |
| 163 def get_all_keys(self, validate=False, headers=None, prefix=None): |
| 164 bucket = self.get_bucket(validate, headers) |
| 165 return bucket.get_all_keys(headers) |
| 166 |
| 167 def get_bucket(self, validate=False, headers=None): |
| 168 self._check_bucket_uri('get_bucket') |
| 169 conn = self.connect() |
| 170 bucket = conn.get_bucket(self.bucket_name, validate, headers) |
| 171 self.check_response(bucket, 'bucket', self.uri) |
| 172 return bucket |
| 173 |
| 174 def get_key(self, validate=False, headers=None, version_id=None): |
| 175 self._check_object_uri('get_key') |
| 176 bucket = self.get_bucket(validate, headers) |
| 177 key = bucket.get_key(self.object_name, headers, version_id) |
| 178 self.check_response(key, 'key', self.uri) |
| 179 return key |
| 180 |
| 181 def new_key(self, validate=False, headers=None): |
| 182 self._check_object_uri('new_key') |
| 183 bucket = self.get_bucket(validate, headers) |
| 184 return bucket.new_key(self.object_name) |
| 185 |
| 186 def get_contents_to_stream(self, fp, headers=None, version_id=None): |
| 187 self._check_object_uri('get_key') |
| 188 self._warn_about_args('get_key', validate=False) |
| 189 key = self.get_key(None, headers) |
| 190 self.check_response(key, 'key', self.uri) |
| 191 return key.get_contents_to_file(fp, headers, version_id=version_id) |
| 192 |
| 193 def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10, |
| 194 torrent=False, version_id=None, |
| 195 res_download_handler=None, response_headers=None): |
| 196 self._check_object_uri('get_contents_to_file') |
| 197 key = self.get_key(None, headers) |
| 198 self.check_response(key, 'key', self.uri) |
| 199 key.get_contents_to_file(fp, headers, cb, num_cb, torrent, version_id, |
| 200 res_download_handler, response_headers) |
| 201 |
| 202 def get_contents_as_string(self, validate=False, headers=None, cb=None, |
| 203 num_cb=10, torrent=False, version_id=None): |
| 204 self._check_object_uri('get_contents_as_string') |
| 205 key = self.get_key(validate, headers) |
| 206 self.check_response(key, 'key', self.uri) |
| 207 return key.get_contents_as_string(headers, cb, num_cb, torrent, |
| 208 version_id) |
| 209 |
| 210 def acl_class(self): |
| 211 conn = self.connect() |
| 212 acl_class = conn.provider.acl_class |
| 213 self.check_response(acl_class, 'acl_class', self.uri) |
| 214 return acl_class |
| 215 |
| 216 def canned_acls(self): |
| 217 conn = self.connect() |
| 218 canned_acls = conn.provider.canned_acls |
| 219 self.check_response(canned_acls, 'canned_acls', self.uri) |
| 220 return canned_acls |
| 221 |
| 222 |
| 223 class BucketStorageUri(StorageUri): |
| 224 """ |
| 225 StorageUri subclass that handles bucket storage providers. |
| 226 Callers should instantiate this class by calling boto.storage_uri(). |
| 227 """ |
| 228 |
| 229 delim = '/' |
| 230 capabilities = set([]) # A set of additional capabilities. |
| 231 |
| 232 def __init__(self, scheme, bucket_name=None, object_name=None, |
| 233 debug=0, connection_args=None, suppress_consec_slashes=True, |
| 234 version_id=None, generation=None, meta_generation=None): |
| 235 """Instantiate a BucketStorageUri from scheme,bucket,object tuple. |
| 236 |
| 237 @type scheme: string |
| 238 @param scheme: URI scheme naming the storage provider (gs, s3, etc.) |
| 239 @type bucket_name: string |
| 240 @param bucket_name: bucket name |
| 241 @type object_name: string |
| 242 @param object_name: object name |
| 243 @type debug: int |
| 244 @param debug: debug level to pass in to connection (range 0..2) |
| 245 @type connection_args: map |
| 246 @param connection_args: optional map containing args to be |
| 247 passed to {S3,GS}Connection constructor (e.g., to override |
| 248 https_connection_factory). |
| 249 @param suppress_consec_slashes: If provided, controls whether |
| 250 consecutive slashes will be suppressed in key paths. |
| 251 @param version_id: Object version id (S3-specific). |
| 252 @param generation: Object generation number (GCS-specific). |
| 253 @param meta_generation: Object meta-generation number (GCS-specific). |
| 254 |
| 255 After instantiation the components are available in the following |
| 256 fields: uri, scheme, bucket_name, object_name. |
| 257 """ |
| 258 |
| 259 self.scheme = scheme |
| 260 self.bucket_name = bucket_name |
| 261 self.object_name = object_name |
| 262 if connection_args: |
| 263 self.connection_args = connection_args |
| 264 self.suppress_consec_slashes = suppress_consec_slashes |
| 265 if self.bucket_name and self.object_name: |
| 266 self.uri = ('%s://%s/%s' % (self.scheme, self.bucket_name, |
| 267 self.object_name)) |
| 268 elif self.bucket_name: |
| 269 self.uri = ('%s://%s/' % (self.scheme, self.bucket_name)) |
| 270 else: |
| 271 self.uri = ('%s://' % self.scheme) |
| 272 self.debug = debug |
| 273 |
| 274 self.version_id = version_id |
| 275 self.generation = generation and int(generation) |
| 276 self.meta_generation = meta_generation and int(meta_generation) |
| 277 |
| 278 def get_key(self, validate=False, headers=None, version_id=None): |
| 279 self._check_object_uri('get_key') |
| 280 bucket = self.get_bucket(validate, headers) |
| 281 if self.get_provider().name == 'aws': |
| 282 key = bucket.get_key(self.object_name, headers, |
| 283 version_id=(version_id or self.version_id)) |
| 284 elif self.get_provider().name == 'google': |
| 285 key = bucket.get_key(self.object_name, headers, |
| 286 generation=self.generation) |
| 287 self.check_response(key, 'key', self.uri) |
| 288 return key |
| 289 |
| 290 def delete_key(self, validate=False, headers=None, version_id=None, |
| 291 mfa_token=None): |
| 292 self._check_object_uri('delete_key') |
| 293 bucket = self.get_bucket(validate, headers) |
| 294 if self.get_provider().name == 'aws': |
| 295 version_id = version_id or self.version_id |
| 296 return bucket.delete_key(self.object_name, headers, version_id, |
| 297 mfa_token) |
| 298 elif self.get_provider().name == 'google': |
| 299 return bucket.delete_key(self.object_name, headers, |
| 300 generation=self.generation) |
| 301 |
| 302 def clone_replace_name(self, new_name): |
| 303 """Instantiate a BucketStorageUri from the current BucketStorageUri, |
| 304 but replacing the object_name. |
| 305 |
| 306 @type new_name: string |
| 307 @param new_name: new object name |
| 308 """ |
| 309 self._check_bucket_uri('clone_replace_name') |
| 310 return BucketStorageUri( |
| 311 self.scheme, bucket_name=self.bucket_name, object_name=new_name, |
| 312 debug=self.debug, |
| 313 suppress_consec_slashes=self.suppress_consec_slashes) |
| 314 |
| 315 def get_acl(self, validate=False, headers=None, version_id=None): |
| 316 """returns a bucket's acl""" |
| 317 self._check_bucket_uri('get_acl') |
| 318 bucket = self.get_bucket(validate, headers) |
| 319 # This works for both bucket- and object- level ACLs (former passes |
| 320 # key_name=None): |
| 321 key_name = self.object_name or '' |
| 322 if self.get_provider().name == 'aws': |
| 323 version_id = version_id or self.version_id |
| 324 acl = bucket.get_acl(key_name, headers, version_id) |
| 325 else: |
| 326 acl = bucket.get_acl(key_name, headers, generation=self.generation) |
| 327 self.check_response(acl, 'acl', self.uri) |
| 328 return acl |
| 329 |
| 330 def get_def_acl(self, validate=False, headers=None): |
| 331 """returns a bucket's default object acl""" |
| 332 self._check_bucket_uri('get_def_acl') |
| 333 bucket = self.get_bucket(validate, headers) |
| 334 # This works for both bucket- and object- level ACLs (former passes |
| 335 # key_name=None): |
| 336 acl = bucket.get_def_acl('', headers) |
| 337 self.check_response(acl, 'acl', self.uri) |
| 338 return acl |
| 339 |
| 340 def get_cors(self, validate=False, headers=None): |
| 341 """returns a bucket's CORS XML""" |
| 342 self._check_bucket_uri('get_cors') |
| 343 bucket = self.get_bucket(validate, headers) |
| 344 cors = bucket.get_cors(headers) |
| 345 self.check_response(cors, 'cors', self.uri) |
| 346 return cors |
| 347 |
| 348 def set_cors(self, cors, validate=False, headers=None): |
| 349 """sets or updates a bucket's CORS XML""" |
| 350 self._check_bucket_uri('set_cors ') |
| 351 bucket = self.get_bucket(validate, headers) |
| 352 bucket.set_cors(cors.to_xml(), headers) |
| 353 |
| 354 def get_location(self, validate=False, headers=None): |
| 355 self._check_bucket_uri('get_location') |
| 356 bucket = self.get_bucket(validate, headers) |
| 357 return bucket.get_location() |
| 358 |
| 359 def get_storage_class(self, validate=False, headers=None): |
| 360 self._check_bucket_uri('get_storage_class') |
| 361 # StorageClass is defined as a bucket param for GCS, but as a key |
| 362 # param for S3. |
| 363 if self.scheme != 'gs': |
| 364 raise ValueError('get_storage_class() not supported for %s ' |
| 365 'URIs.' % self.scheme) |
| 366 bucket = self.get_bucket(validate, headers) |
| 367 return bucket.get_storage_class() |
| 368 |
| 369 def get_subresource(self, subresource, validate=False, headers=None, |
| 370 version_id=None): |
| 371 self._check_bucket_uri('get_subresource') |
| 372 bucket = self.get_bucket(validate, headers) |
| 373 return bucket.get_subresource(subresource, self.object_name, headers, |
| 374 version_id) |
| 375 |
| 376 def add_group_email_grant(self, permission, email_address, recursive=False, |
| 377 validate=False, headers=None): |
| 378 self._check_bucket_uri('add_group_email_grant') |
| 379 if self.scheme != 'gs': |
| 380 raise ValueError('add_group_email_grant() not supported for %s ' |
| 381 'URIs.' % self.scheme) |
| 382 if self.object_name: |
| 383 if recursive: |
| 384 raise ValueError('add_group_email_grant() on key-ful URI cannot ' |
| 385 'specify recursive=True') |
| 386 key = self.get_key(validate, headers) |
| 387 self.check_response(key, 'key', self.uri) |
| 388 key.add_group_email_grant(permission, email_address, headers) |
| 389 elif self.bucket_name: |
| 390 bucket = self.get_bucket(validate, headers) |
| 391 bucket.add_group_email_grant(permission, email_address, recursive, |
| 392 headers) |
| 393 else: |
| 394 raise InvalidUriError('add_group_email_grant() on bucket-less URI ' |
| 395 '%s' % self.uri) |
| 396 |
| 397 def add_email_grant(self, permission, email_address, recursive=False, |
| 398 validate=False, headers=None): |
| 399 self._check_bucket_uri('add_email_grant') |
| 400 if not self.object_name: |
| 401 bucket = self.get_bucket(validate, headers) |
| 402 bucket.add_email_grant(permission, email_address, recursive, |
| 403 headers) |
| 404 else: |
| 405 key = self.get_key(validate, headers) |
| 406 self.check_response(key, 'key', self.uri) |
| 407 key.add_email_grant(permission, email_address) |
| 408 |
| 409 def add_user_grant(self, permission, user_id, recursive=False, |
| 410 validate=False, headers=None): |
| 411 self._check_bucket_uri('add_user_grant') |
| 412 if not self.object_name: |
| 413 bucket = self.get_bucket(validate, headers) |
| 414 bucket.add_user_grant(permission, user_id, recursive, headers) |
| 415 else: |
| 416 key = self.get_key(validate, headers) |
| 417 self.check_response(key, 'key', self.uri) |
| 418 key.add_user_grant(permission, user_id) |
| 419 |
| 420 def list_grants(self, headers=None): |
| 421 self._check_bucket_uri('list_grants ') |
| 422 bucket = self.get_bucket(headers) |
| 423 return bucket.list_grants(headers) |
| 424 |
| 425 def is_file_uri(self): |
| 426 """Returns True if this URI names a file or directory.""" |
| 427 return False |
| 428 |
| 429 def is_cloud_uri(self): |
| 430 """Returns True if this URI names a bucket or object.""" |
| 431 return True |
| 432 |
| 433 def names_container(self): |
| 434 """ |
| 435 Returns True if this URI names a directory or bucket. Will return |
| 436 False for bucket subdirs; providing bucket subdir semantics needs to |
| 437 be done by the caller (like gsutil does). |
| 438 """ |
| 439 return bool(not self.object_name) |
| 440 |
| 441 def names_singleton(self): |
| 442 """Returns True if this URI names a file or object.""" |
| 443 return bool(self.object_name) |
| 444 |
| 445 def names_directory(self): |
| 446 """Returns True if this URI names a directory.""" |
| 447 return False |
| 448 |
| 449 def names_provider(self): |
| 450 """Returns True if this URI names a provider.""" |
| 451 return bool(not self.bucket_name) |
| 452 |
| 453 def names_bucket(self): |
| 454 """Returns True if this URI names a bucket.""" |
| 455 return self.names_container() |
| 456 |
| 457 def names_file(self): |
| 458 """Returns True if this URI names a file.""" |
| 459 return False |
| 460 |
| 461 def names_object(self): |
| 462 """Returns True if this URI names an object.""" |
| 463 return self.names_singleton() |
| 464 |
| 465 def is_stream(self): |
| 466 """Returns True if this URI represents input/output stream.""" |
| 467 return False |
| 468 |
| 469 def create_bucket(self, headers=None, location='', policy=None, |
| 470 storage_class=None): |
| 471 self._check_bucket_uri('create_bucket ') |
| 472 conn = self.connect() |
| 473 # Pass storage_class param only if this is a GCS bucket. (In S3 the |
| 474 # storage class is specified on the key object.) |
| 475 if self.scheme == 'gs': |
| 476 return conn.create_bucket(self.bucket_name, headers, location, policy, |
| 477 storage_class) |
| 478 else: |
| 479 return conn.create_bucket(self.bucket_name, headers, location, policy) |
| 480 |
| 481 def delete_bucket(self, headers=None): |
| 482 self._check_bucket_uri('delete_bucket') |
| 483 conn = self.connect() |
| 484 return conn.delete_bucket(self.bucket_name, headers) |
| 485 |
| 486 def get_all_buckets(self, headers=None): |
| 487 conn = self.connect() |
| 488 return conn.get_all_buckets(headers) |
| 489 |
| 490 def get_provider(self): |
| 491 conn = self.connect() |
| 492 provider = conn.provider |
| 493 self.check_response(provider, 'provider', self.uri) |
| 494 return provider |
| 495 |
| 496 def set_acl(self, acl_or_str, key_name='', validate=False, headers=None, |
| 497 version_id=None): |
| 498 """sets or updates a bucket's acl""" |
| 499 self._check_bucket_uri('set_acl') |
| 500 key_name = key_name or self.object_name or '' |
| 501 bucket = self.get_bucket(validate, headers) |
| 502 if self.generation: |
| 503 bucket.set_acl( |
| 504 acl_or_str, key_name, headers, generation=self.generation) |
| 505 else: |
| 506 version_id = version_id or self.version_id |
| 507 bucket.set_acl(acl_or_str, key_name, headers, version_id) |
| 508 |
| 509 def set_def_acl(self, acl_or_str, key_name='', validate=False, |
| 510 headers=None, version_id=None): |
| 511 """sets or updates a bucket's default object acl""" |
| 512 self._check_bucket_uri('set_def_acl') |
| 513 self.get_bucket(validate, headers).set_def_acl(acl_or_str, '', headers) |
| 514 |
| 515 def set_canned_acl(self, acl_str, validate=False, headers=None, |
| 516 version_id=None): |
| 517 """sets or updates a bucket's acl to a predefined (canned) value""" |
| 518 self._check_object_uri('set_canned_acl') |
| 519 self._warn_about_args('set_canned_acl', version_id=version_id) |
| 520 key = self.get_key(validate, headers) |
| 521 self.check_response(key, 'key', self.uri) |
| 522 key.set_canned_acl(acl_str, headers) |
| 523 |
| 524 def set_def_canned_acl(self, acl_str, validate=False, headers=None, |
| 525 version_id=None): |
| 526 """sets or updates a bucket's default object acl to a predefined |
| 527 (canned) value""" |
| 528 self._check_bucket_uri('set_def_canned_acl ') |
| 529 key = self.get_key(validate, headers) |
| 530 self.check_response(key, 'key', self.uri) |
| 531 key.set_def_canned_acl(acl_str, headers, version_id) |
| 532 |
| 533 def set_subresource(self, subresource, value, validate=False, headers=None, |
| 534 version_id=None): |
| 535 self._check_bucket_uri('set_subresource') |
| 536 bucket = self.get_bucket(validate, headers) |
| 537 bucket.set_subresource(subresource, value, self.object_name, headers, |
| 538 version_id) |
| 539 |
| 540 def set_contents_from_string(self, s, headers=None, replace=True, |
| 541 cb=None, num_cb=10, policy=None, md5=None, |
| 542 reduced_redundancy=False): |
| 543 self._check_object_uri('set_contents_from_string') |
| 544 key = self.new_key(headers=headers) |
| 545 if self.scheme == 'gs': |
| 546 if reduced_redundancy: |
| 547 sys.stderr.write('Warning: GCS does not support ' |
| 548 'reduced_redundancy; argument ignored by ' |
| 549 'set_contents_from_string') |
| 550 key.set_contents_from_string(s, headers, replace, cb, num_cb, |
| 551 policy, md5) |
| 552 else: |
| 553 key.set_contents_from_string(s, headers, replace, cb, num_cb, |
| 554 policy, md5, reduced_redundancy) |
| 555 |
| 556 def set_contents_from_file(self, fp, headers=None, replace=True, cb=None, |
| 557 num_cb=10, policy=None, md5=None, size=None, |
| 558 rewind=False, res_upload_handler=None): |
| 559 self._check_object_uri('set_contents_from_file') |
| 560 key = self.new_key(headers=headers) |
| 561 if self.scheme == 'gs': |
| 562 return key.set_contents_from_file( |
| 563 fp, headers, replace, cb, num_cb, policy, md5, size=size, |
| 564 rewind=rewind, res_upload_handler=res_upload_handler) |
| 565 else: |
| 566 self._warn_about_args('set_contents_from_file', |
| 567 res_upload_handler=res_upload_handler) |
| 568 return key.set_contents_from_file(fp, headers, replace, cb, num_cb, |
| 569 policy, md5, size=size, |
| 570 rewind=rewind) |
| 571 |
| 572 def set_contents_from_stream(self, fp, headers=None, replace=True, cb=None, |
| 573 policy=None, reduced_redundancy=False): |
| 574 self._check_object_uri('set_contents_from_stream') |
| 575 dst_key = self.new_key(False, headers) |
| 576 dst_key.set_contents_from_stream(fp, headers, replace, cb, |
| 577 policy=policy, |
| 578 reduced_redundancy=reduced_redundancy) |
| 579 |
| 580 def copy_key(self, src_bucket_name, src_key_name, metadata=None, |
| 581 src_version_id=None, storage_class='STANDARD', |
| 582 preserve_acl=False, encrypt_key=False, headers=None, |
| 583 query_args=None, src_generation=None): |
| 584 self._check_object_uri('copy_key') |
| 585 dst_bucket = self.get_bucket(validate=False, headers=headers) |
| 586 if src_generation: |
| 587 dst_bucket.copy_key(new_key_name=self.object_name, |
| 588 src_bucket_name=src_bucket_name, |
| 589 src_key_name=src_key_name, metadata=metadata, |
| 590 storage_class=storage_class, |
| 591 preserve_acl=preserve_acl, |
| 592 encrypt_key=encrypt_key, |
| 593 headers=headers, query_args=query_args, |
| 594 src_generation=src_generation) |
| 595 else: |
| 596 dst_bucket.copy_key(new_key_name=self.object_name, |
| 597 src_bucket_name=src_bucket_name, |
| 598 src_key_name=src_key_name, metadata=metadata, |
| 599 src_version_id=src_version_id, |
| 600 storage_class=storage_class, |
| 601 preserve_acl=preserve_acl, |
| 602 encrypt_key=encrypt_key, |
| 603 headers=headers, query_args=query_args) |
| 604 |
| 605 def enable_logging(self, target_bucket, target_prefix=None, validate=False, |
| 606 headers=None, version_id=None): |
| 607 self._check_bucket_uri('enable_logging') |
| 608 bucket = self.get_bucket(validate, headers) |
| 609 bucket.enable_logging(target_bucket, target_prefix, headers=headers) |
| 610 |
| 611 def disable_logging(self, validate=False, headers=None, version_id=None): |
| 612 self._check_bucket_uri('disable_logging') |
| 613 bucket = self.get_bucket(validate, headers) |
| 614 bucket.disable_logging(headers=headers) |
| 615 |
| 616 def set_website_config(self, main_page_suffix=None, error_key=None, |
| 617 validate=False, headers=None): |
| 618 bucket = self.get_bucket(validate, headers) |
| 619 if not (main_page_suffix or error_key): |
| 620 bucket.delete_website_configuration(headers) |
| 621 else: |
| 622 bucket.configure_website(main_page_suffix, error_key, headers) |
| 623 |
| 624 def get_website_config(self, validate=False, headers=None): |
| 625 bucket = self.get_bucket(validate, headers) |
| 626 return bucket.get_website_configuration_with_xml(headers) |
| 627 |
| 628 def get_versioning_config(self, headers=None): |
| 629 bucket = self.get_bucket(False, headers) |
| 630 return bucket.get_versioning_status(headers) |
| 631 |
| 632 def configure_versioning(self, enabled, headers=None): |
| 633 self._check_bucket_uri('configure_versioning') |
| 634 bucket = self.get_bucket(False, headers) |
| 635 return bucket.configure_versioning(enabled, headers) |
| 636 |
| 637 def set_metadata(self, metadata_plus, metadata_minus, preserve_acl, |
| 638 headers=None): |
| 639 return self.get_key(False).set_remote_metadata(metadata_plus, |
| 640 metadata_minus, |
| 641 preserve_acl, |
| 642 headers=headers) |
| 643 |
| 644 def exists(self, headers=None): |
| 645 """Returns True if the object exists or False if it doesn't""" |
| 646 if not self.object_name: |
| 647 raise InvalidUriError('exists on object-less URI (%s)' % self.uri) |
| 648 bucket = self.get_bucket() |
| 649 key = bucket.get_key(self.object_name, headers=headers) |
| 650 return bool(key) |
| 651 |
| 652 class FileStorageUri(StorageUri): |
| 653 """ |
| 654 StorageUri subclass that handles files in the local file system. |
| 655 Callers should instantiate this class by calling boto.storage_uri(). |
| 656 |
| 657 See file/README about how we map StorageUri operations onto a file system. |
| 658 """ |
| 659 |
| 660 delim = os.sep |
| 661 |
| 662 def __init__(self, object_name, debug, is_stream=False): |
| 663 """Instantiate a FileStorageUri from a path name. |
| 664 |
| 665 @type object_name: string |
| 666 @param object_name: object name |
| 667 @type debug: boolean |
| 668 @param debug: whether to enable debugging on this StorageUri |
| 669 |
| 670 After instantiation the components are available in the following |
| 671 fields: uri, scheme, bucket_name (always blank for this "anonymous" |
| 672 bucket), object_name. |
| 673 """ |
| 674 |
| 675 self.scheme = 'file' |
| 676 self.bucket_name = '' |
| 677 self.object_name = object_name |
| 678 self.uri = 'file://' + object_name |
| 679 self.debug = debug |
| 680 self.stream = is_stream |
| 681 |
| 682 def clone_replace_name(self, new_name): |
| 683 """Instantiate a FileStorageUri from the current FileStorageUri, |
| 684 but replacing the object_name. |
| 685 |
| 686 @type new_name: string |
| 687 @param new_name: new object name |
| 688 """ |
| 689 return FileStorageUri(new_name, self.debug, self.stream) |
| 690 |
| 691 def is_file_uri(self): |
| 692 """Returns True if this URI names a file or directory.""" |
| 693 return True |
| 694 |
| 695 def is_cloud_uri(self): |
| 696 """Returns True if this URI names a bucket or object.""" |
| 697 return False |
| 698 |
| 699 def names_container(self): |
| 700 """Returns True if this URI names a directory or bucket.""" |
| 701 return self.names_directory() |
| 702 |
| 703 def names_singleton(self): |
| 704 """Returns True if this URI names a file (or stream) or object.""" |
| 705 return not self.names_container() |
| 706 |
| 707 def names_directory(self): |
| 708 """Returns True if this URI names a directory.""" |
| 709 if self.stream: |
| 710 return False |
| 711 return os.path.isdir(self.object_name) |
| 712 |
| 713 def names_provider(self): |
| 714 """Returns True if this URI names a provider.""" |
| 715 return False |
| 716 |
| 717 def names_bucket(self): |
| 718 """Returns True if this URI names a bucket.""" |
| 719 return False |
| 720 |
| 721 def names_file(self): |
| 722 """Returns True if this URI names a file.""" |
| 723 return self.names_singleton() |
| 724 |
| 725 def names_object(self): |
| 726 """Returns True if this URI names an object.""" |
| 727 return False |
| 728 |
| 729 def is_stream(self): |
| 730 """Returns True if this URI represents input/output stream. |
| 731 """ |
| 732 return bool(self.stream) |
| 733 |
| 734 def close(self): |
| 735 """Closes the underlying file. |
| 736 """ |
| 737 self.get_key().close() |
| 738 |
| 739 def exists(self, _headers_not_used=None): |
| 740 """Returns True if the file exists or False if it doesn't""" |
| 741 # The _headers_not_used parameter is ignored. It is only there to ensure |
| 742 # that this method's signature is identical to the exists method on the |
| 743 # BucketStorageUri class. |
| 744 return os.path.exists(self.object_name) |
OLD | NEW |