| OLD | NEW |
| (Empty) | |
| 1 # -*- coding: utf-8 -*- |
| 2 # Copyright 2014 Google Inc. All Rights Reserved. |
| 3 # |
| 4 # Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 # you may not use this file except in compliance with the License. |
| 6 # You may obtain a copy of the License at |
| 7 # |
| 8 # http://www.apache.org/licenses/LICENSE-2.0 |
| 9 # |
| 10 # Unless required by applicable law or agreed to in writing, software |
| 11 # distributed under the License is distributed on an "AS IS" BASIS, |
| 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 # See the License for the specific language governing permissions and |
| 14 # limitations under the License. |
| 15 """JSON gsutil Cloud API implementation for Google Cloud Storage.""" |
| 16 |
| 17 from __future__ import absolute_import |
| 18 |
| 19 import httplib |
| 20 import json |
| 21 import socket |
| 22 import ssl |
| 23 import time |
| 24 |
| 25 import boto |
| 26 from boto import config |
| 27 from gcs_oauth2_boto_plugin import oauth2_helper |
| 28 import httplib2 |
| 29 from oauth2client import multistore_file |
| 30 |
| 31 from gslib.cloud_api import AccessDeniedException |
| 32 from gslib.cloud_api import ArgumentException |
| 33 from gslib.cloud_api import BadRequestException |
| 34 from gslib.cloud_api import CloudApi |
| 35 from gslib.cloud_api import NotEmptyException |
| 36 from gslib.cloud_api import NotFoundException |
| 37 from gslib.cloud_api import PreconditionException |
| 38 from gslib.cloud_api import Preconditions |
| 39 from gslib.cloud_api import ResumableDownloadException |
| 40 from gslib.cloud_api import ResumableUploadAbortException |
| 41 from gslib.cloud_api import ResumableUploadException |
| 42 from gslib.cloud_api import ServiceException |
| 43 from gslib.cloud_api_helper import ValidateDstObjectMetadata |
| 44 from gslib.cred_types import CredTypes |
| 45 from gslib.exception import CommandException |
| 46 from gslib.gcs_json_media import BytesTransferredContainer |
| 47 from gslib.gcs_json_media import DownloadCallbackConnectionClassFactory |
| 48 from gslib.gcs_json_media import HttpWithDownloadStream |
| 49 from gslib.gcs_json_media import UploadCallbackConnectionClassFactory |
| 50 from gslib.gcs_json_media import WrapDownloadHttpRequest |
| 51 from gslib.gcs_json_media import WrapUploadHttpRequest |
| 52 from gslib.no_op_credentials import NoOpCredentials |
| 53 from gslib.project_id import PopulateProjectId |
| 54 from gslib.third_party.storage_apitools import credentials_lib as credentials_li
b |
| 55 from gslib.third_party.storage_apitools import encoding as encoding |
| 56 from gslib.third_party.storage_apitools import exceptions as apitools_exceptions |
| 57 from gslib.third_party.storage_apitools import http_wrapper as apitools_http_wra
pper |
| 58 from gslib.third_party.storage_apitools import storage_v1_client as apitools_cli
ent |
| 59 from gslib.third_party.storage_apitools import storage_v1_messages as apitools_m
essages |
| 60 from gslib.third_party.storage_apitools import transfer as apitools_transfer |
| 61 from gslib.third_party.storage_apitools.util import CalculateWaitForRetry |
| 62 from gslib.translation_helper import CreateBucketNotFoundException |
| 63 from gslib.translation_helper import CreateObjectNotFoundException |
| 64 from gslib.translation_helper import DEFAULT_CONTENT_TYPE |
| 65 from gslib.translation_helper import REMOVE_CORS_CONFIG |
| 66 from gslib.util import GetCertsFile |
| 67 from gslib.util import GetCredentialStoreFilename |
| 68 from gslib.util import GetMaxRetryDelay |
| 69 from gslib.util import GetNewHttp |
| 70 from gslib.util import GetNumRetries |
| 71 |
| 72 |
| 73 # Implementation supports only 'gs' URLs, so provider is unused. |
| 74 # pylint: disable=unused-argument |
| 75 |
| 76 DEFAULT_GCS_JSON_VERSION = 'v1' |
| 77 |
| 78 NUM_BUCKETS_PER_LIST_PAGE = 100 |
| 79 NUM_OBJECTS_PER_LIST_PAGE = 500 |
| 80 |
| 81 |
| 82 # Resumable downloads and uploads make one HTTP call per chunk (and must be |
| 83 # in multiples of 256KB). Overridable for testing. |
| 84 def _ResumableChunkSize(): |
| 85 chunk_size = config.getint('GSUtil', 'json_resumable_chunk_size', |
| 86 1024*1024*100L) |
| 87 if chunk_size == 0: |
| 88 chunk_size = 1024*256L |
| 89 elif chunk_size % 1024*256L != 0: |
| 90 chunk_size += (1024*256L - (chunk_size % 1024*256L)) |
| 91 return chunk_size |
| 92 |
| 93 TRANSLATABLE_APITOOLS_EXCEPTIONS = (apitools_exceptions.HttpError, |
| 94 apitools_exceptions.TransferError, |
| 95 apitools_exceptions.TransferInvalidError) |
| 96 |
| 97 # TODO: Distribute these exceptions better through apitools and here. |
| 98 # Right now, apitools is configured not to handle any exceptions on |
| 99 # uploads/downloads. |
| 100 # oauth2_client tries to JSON-decode the response, which can result |
| 101 # in a ValueError if the response was invalid. Until that is fixed in |
| 102 # oauth2_client, need to handle it here. |
| 103 HTTP_TRANSFER_EXCEPTIONS = (apitools_exceptions.TransferRetryError, |
| 104 apitools_exceptions.BadStatusCodeError, |
| 105 # TODO: Honor retry-after headers. |
| 106 apitools_exceptions.RetryAfterError, |
| 107 apitools_exceptions.RequestError, |
| 108 httplib.BadStatusLine, |
| 109 httplib.IncompleteRead, |
| 110 httplib.ResponseNotReady, |
| 111 httplib2.ServerNotFoundError, |
| 112 socket.error, |
| 113 socket.gaierror, |
| 114 socket.timeout, |
| 115 ssl.SSLError, |
| 116 ValueError) |
| 117 |
| 118 |
| 119 _VALIDATE_CERTIFICATES_503_MESSAGE = ( |
| 120 """Service Unavailable. If you have recently changed |
| 121 https_validate_certificates from True to False in your boto configuration |
| 122 file, please delete any cached access tokens in your filesystem (at %s) |
| 123 and try again.""" % GetCredentialStoreFilename()) |
| 124 |
| 125 |
| 126 class GcsJsonApi(CloudApi): |
| 127 """Google Cloud Storage JSON implementation of gsutil Cloud API.""" |
| 128 |
| 129 def __init__(self, bucket_storage_uri_class, logger, provider=None, |
| 130 credentials=None, debug=0): |
| 131 """Performs necessary setup for interacting with Google Cloud Storage. |
| 132 |
| 133 Args: |
| 134 bucket_storage_uri_class: Unused. |
| 135 logger: logging.logger for outputting log messages. |
| 136 provider: Unused. This implementation supports only Google Cloud Storage. |
| 137 credentials: Credentials to be used for interacting with Google Cloud |
| 138 Storage. |
| 139 debug: Debug level for the API implementation (0..3). |
| 140 """ |
| 141 # TODO: Plumb host_header for perfdiag / test_perfdiag. |
| 142 # TODO: Add jitter to apitools' http_wrapper retry mechanism. |
| 143 super(GcsJsonApi, self).__init__(bucket_storage_uri_class, logger, |
| 144 provider='gs', debug=debug) |
| 145 no_op_credentials = False |
| 146 if not credentials: |
| 147 loaded_credentials = self._CheckAndGetCredentials(logger) |
| 148 |
| 149 if not loaded_credentials: |
| 150 loaded_credentials = NoOpCredentials() |
| 151 no_op_credentials = True |
| 152 else: |
| 153 if isinstance(credentials, NoOpCredentials): |
| 154 no_op_credentials = True |
| 155 |
| 156 self.credentials = credentials or loaded_credentials |
| 157 |
| 158 self.certs_file = GetCertsFile() |
| 159 |
| 160 self.http = GetNewHttp() |
| 161 self.http_base = 'https://' |
| 162 gs_json_host = config.get('Credentials', 'gs_json_host', None) |
| 163 self.host_base = gs_json_host or 'www.googleapis.com' |
| 164 |
| 165 if not gs_json_host: |
| 166 gs_host = config.get('Credentials', 'gs_host', None) |
| 167 if gs_host: |
| 168 raise ArgumentException( |
| 169 'JSON API is selected but gs_json_host is not configured, ' |
| 170 'while gs_host is configured to %s. Please also configure ' |
| 171 'gs_json_host and gs_json_port to match your desired endpoint.' |
| 172 % gs_host) |
| 173 |
| 174 gs_json_port = config.get('Credentials', 'gs_json_port', None) |
| 175 |
| 176 if not gs_json_port: |
| 177 gs_port = config.get('Credentials', 'gs_port', None) |
| 178 if gs_port: |
| 179 raise ArgumentException( |
| 180 'JSON API is selected but gs_json_port is not configured, ' |
| 181 'while gs_port is configured to %s. Please also configure ' |
| 182 'gs_json_host and gs_json_port to match your desired endpoint.' |
| 183 % gs_port) |
| 184 self.host_port = '' |
| 185 else: |
| 186 self.host_port = ':' + config.get('Credentials', 'gs_json_port') |
| 187 |
| 188 self.api_version = config.get('GSUtil', 'json_api_version', |
| 189 DEFAULT_GCS_JSON_VERSION) |
| 190 self.url_base = (self.http_base + self.host_base + self.host_port + '/' + |
| 191 'storage/' + self.api_version + '/') |
| 192 |
| 193 self.credentials.set_store( |
| 194 multistore_file.get_credential_storage_custom_string_key( |
| 195 GetCredentialStoreFilename(), self.api_version)) |
| 196 |
| 197 self.num_retries = GetNumRetries() |
| 198 |
| 199 log_request = (debug >= 3) |
| 200 log_response = (debug >= 3) |
| 201 |
| 202 self.api_client = apitools_client.StorageV1( |
| 203 url=self.url_base, http=self.http, log_request=log_request, |
| 204 log_response=log_response, credentials=self.credentials, |
| 205 version=self.api_version) |
| 206 |
| 207 if no_op_credentials: |
| 208 # This API key is not secret and is used to identify gsutil during |
| 209 # anonymous requests. |
| 210 self.api_client.AddGlobalParam('key', |
| 211 u'AIzaSyDnacJHrKma0048b13sh8cgxNUwulubmJM') |
| 212 |
| 213 def _CheckAndGetCredentials(self, logger): |
| 214 configured_cred_types = [] |
| 215 try: |
| 216 if self._HasOauth2UserAccountCreds(): |
| 217 configured_cred_types.append(CredTypes.OAUTH2_USER_ACCOUNT) |
| 218 if self._HasOauth2ServiceAccountCreds(): |
| 219 configured_cred_types.append(CredTypes.OAUTH2_SERVICE_ACCOUNT) |
| 220 if len(configured_cred_types) > 1: |
| 221 # We only allow one set of configured credentials. Otherwise, we're |
| 222 # choosing one arbitrarily, which can be very confusing to the user |
| 223 # (e.g., if only one is authorized to perform some action) and can |
| 224 # also mask errors. |
| 225 # Because boto merges config files, GCE credentials show up by default |
| 226 # for GCE VMs. We don't want to fail when a user creates a boto file |
| 227 # with their own credentials, so in this case we'll use the OAuth2 |
| 228 # user credentials. |
| 229 failed_cred_type = None |
| 230 raise CommandException( |
| 231 ('You have multiple types of configured credentials (%s), which is ' |
| 232 'not supported. For more help, see "gsutil help creds".') |
| 233 % configured_cred_types) |
| 234 |
| 235 failed_cred_type = CredTypes.OAUTH2_USER_ACCOUNT |
| 236 user_creds = self._GetOauth2UserAccountCreds() |
| 237 failed_cred_type = CredTypes.OAUTH2_SERVICE_ACCOUNT |
| 238 service_account_creds = self._GetOauth2ServiceAccountCreds() |
| 239 failed_cred_type = CredTypes.GCE |
| 240 gce_creds = self._GetGceCreds() |
| 241 return user_creds or service_account_creds or gce_creds |
| 242 except: # pylint: disable=bare-except |
| 243 |
| 244 # If we didn't actually try to authenticate because there were multiple |
| 245 # types of configured credentials, don't emit this warning. |
| 246 if failed_cred_type: |
| 247 logger.warn( |
| 248 'Your "%s" credentials are invalid. For more help, see ' |
| 249 '"gsutil help creds", or re-run the gsutil config command (see ' |
| 250 '"gsutil help config").', failed_cred_type) |
| 251 |
| 252 # If there's any set of configured credentials, we'll fail if they're |
| 253 # invalid, rather than silently falling back to anonymous config (as |
| 254 # boto does). That approach leads to much confusion if users don't |
| 255 # realize their credentials are invalid. |
| 256 raise |
| 257 |
| 258 def _HasOauth2ServiceAccountCreds(self): |
| 259 return (config.has_option('Credentials', 'gs_service_client_id') and |
| 260 config.has_option('Credentials', 'gs_service_key_file')) |
| 261 |
| 262 def _HasOauth2UserAccountCreds(self): |
| 263 return config.has_option('Credentials', 'gs_oauth2_refresh_token') |
| 264 |
| 265 def _HasGceCreds(self): |
| 266 return config.has_option('GoogleCompute', 'service_account') |
| 267 |
| 268 def _GetOauth2ServiceAccountCreds(self): |
| 269 if self._HasOauth2ServiceAccountCreds(): |
| 270 return oauth2_helper.OAuth2ClientFromBotoConfig( |
| 271 boto.config, |
| 272 cred_type=CredTypes.OAUTH2_SERVICE_ACCOUNT).GetCredentials() |
| 273 |
| 274 def _GetOauth2UserAccountCreds(self): |
| 275 if self._HasOauth2UserAccountCreds(): |
| 276 return oauth2_helper.OAuth2ClientFromBotoConfig( |
| 277 boto.config).GetCredentials() |
| 278 |
| 279 def _GetGceCreds(self): |
| 280 if self._HasGceCreds(): |
| 281 try: |
| 282 return credentials_lib.GceAssertionCredentials() |
| 283 except apitools_exceptions.ResourceUnavailableError, e: |
| 284 if 'service account' in str(e) and 'does not exist' in str(e): |
| 285 return None |
| 286 raise |
| 287 |
| 288 def _GetNewDownloadHttp(self, download_stream): |
| 289 return GetNewHttp(http_class=HttpWithDownloadStream, stream=download_stream) |
| 290 |
| 291 def GetBucket(self, bucket_name, provider=None, fields=None): |
| 292 """See CloudApi class for function doc strings.""" |
| 293 projection = (apitools_messages.StorageBucketsGetRequest |
| 294 .ProjectionValueValuesEnum.full) |
| 295 apitools_request = apitools_messages.StorageBucketsGetRequest( |
| 296 bucket=bucket_name, projection=projection) |
| 297 global_params = apitools_messages.StandardQueryParameters() |
| 298 if fields: |
| 299 global_params.fields = ','.join(set(fields)) |
| 300 |
| 301 # Here and in list buckets, we have no way of knowing |
| 302 # whether we requested a field and didn't get it because it didn't exist |
| 303 # or because we didn't have permission to access it. |
| 304 try: |
| 305 return self.api_client.buckets.Get(apitools_request, |
| 306 global_params=global_params) |
| 307 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 308 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) |
| 309 |
| 310 def PatchBucket(self, bucket_name, metadata, preconditions=None, |
| 311 provider=None, fields=None): |
| 312 """See CloudApi class for function doc strings.""" |
| 313 projection = (apitools_messages.StorageBucketsPatchRequest |
| 314 .ProjectionValueValuesEnum.full) |
| 315 bucket_metadata = metadata |
| 316 |
| 317 if not preconditions: |
| 318 preconditions = Preconditions() |
| 319 |
| 320 # For blank metadata objects, we need to explicitly call |
| 321 # them out to apitools so it will send/erase them. |
| 322 apitools_include_fields = [] |
| 323 for metadata_field in ('metadata', 'lifecycle', 'logging', 'versioning', |
| 324 'website'): |
| 325 attr = getattr(bucket_metadata, metadata_field, None) |
| 326 if attr and not encoding.MessageToDict(attr): |
| 327 setattr(bucket_metadata, metadata_field, None) |
| 328 apitools_include_fields.append(metadata_field) |
| 329 |
| 330 if bucket_metadata.cors and bucket_metadata.cors == REMOVE_CORS_CONFIG: |
| 331 bucket_metadata.cors = [] |
| 332 apitools_include_fields.append('cors') |
| 333 |
| 334 apitools_request = apitools_messages.StorageBucketsPatchRequest( |
| 335 bucket=bucket_name, bucketResource=bucket_metadata, |
| 336 projection=projection, |
| 337 ifMetagenerationMatch=preconditions.meta_gen_match) |
| 338 global_params = apitools_messages.StandardQueryParameters() |
| 339 if fields: |
| 340 global_params.fields = ','.join(set(fields)) |
| 341 with self.api_client.IncludeFields(apitools_include_fields): |
| 342 try: |
| 343 return self.api_client.buckets.Patch(apitools_request, |
| 344 global_params=global_params) |
| 345 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 346 self._TranslateExceptionAndRaise(e) |
| 347 |
| 348 def CreateBucket(self, bucket_name, project_id=None, metadata=None, |
| 349 provider=None, fields=None): |
| 350 """See CloudApi class for function doc strings.""" |
| 351 projection = (apitools_messages.StorageBucketsInsertRequest |
| 352 .ProjectionValueValuesEnum.full) |
| 353 if not metadata: |
| 354 metadata = apitools_messages.Bucket() |
| 355 metadata.name = bucket_name |
| 356 |
| 357 if metadata.location: |
| 358 metadata.location = metadata.location.upper() |
| 359 if metadata.storageClass: |
| 360 metadata.storageClass = metadata.storageClass.upper() |
| 361 |
| 362 project_id = PopulateProjectId(project_id) |
| 363 |
| 364 apitools_request = apitools_messages.StorageBucketsInsertRequest( |
| 365 bucket=metadata, project=project_id, projection=projection) |
| 366 global_params = apitools_messages.StandardQueryParameters() |
| 367 if fields: |
| 368 global_params.fields = ','.join(set(fields)) |
| 369 try: |
| 370 return self.api_client.buckets.Insert(apitools_request, |
| 371 global_params=global_params) |
| 372 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 373 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) |
| 374 |
| 375 def DeleteBucket(self, bucket_name, preconditions=None, provider=None): |
| 376 """See CloudApi class for function doc strings.""" |
| 377 if not preconditions: |
| 378 preconditions = Preconditions() |
| 379 |
| 380 apitools_request = apitools_messages.StorageBucketsDeleteRequest( |
| 381 bucket=bucket_name, ifMetagenerationMatch=preconditions.meta_gen_match) |
| 382 |
| 383 try: |
| 384 self.api_client.buckets.Delete(apitools_request) |
| 385 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 386 if isinstance( |
| 387 self._TranslateApitoolsException(e, bucket_name=bucket_name), |
| 388 NotEmptyException): |
| 389 # If bucket is not empty, check to see if versioning is enabled and |
| 390 # signal that in the exception if it is. |
| 391 bucket_metadata = self.GetBucket(bucket_name, |
| 392 fields=['versioning']) |
| 393 if bucket_metadata.versioning and bucket_metadata.versioning.enabled: |
| 394 raise NotEmptyException('VersionedBucketNotEmpty', |
| 395 status=e.status_code) |
| 396 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) |
| 397 |
| 398 def ListBuckets(self, project_id=None, provider=None, fields=None): |
| 399 """See CloudApi class for function doc strings.""" |
| 400 projection = (apitools_messages.StorageBucketsListRequest |
| 401 .ProjectionValueValuesEnum.full) |
| 402 project_id = PopulateProjectId(project_id) |
| 403 |
| 404 apitools_request = apitools_messages.StorageBucketsListRequest( |
| 405 project=project_id, maxResults=NUM_BUCKETS_PER_LIST_PAGE, |
| 406 projection=projection) |
| 407 global_params = apitools_messages.StandardQueryParameters() |
| 408 if fields: |
| 409 if 'nextPageToken' not in fields: |
| 410 fields.add('nextPageToken') |
| 411 global_params.fields = ','.join(set(fields)) |
| 412 try: |
| 413 bucket_list = self.api_client.buckets.List(apitools_request, |
| 414 global_params=global_params) |
| 415 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 416 self._TranslateExceptionAndRaise(e) |
| 417 |
| 418 for bucket in self._YieldBuckets(bucket_list): |
| 419 yield bucket |
| 420 |
| 421 while bucket_list.nextPageToken: |
| 422 apitools_request = apitools_messages.StorageBucketsListRequest( |
| 423 project=project_id, pageToken=bucket_list.nextPageToken, |
| 424 maxResults=NUM_BUCKETS_PER_LIST_PAGE, projection=projection) |
| 425 try: |
| 426 bucket_list = self.api_client.buckets.List(apitools_request, |
| 427 global_params=global_params) |
| 428 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 429 self._TranslateExceptionAndRaise(e) |
| 430 |
| 431 for bucket in self._YieldBuckets(bucket_list): |
| 432 yield bucket |
| 433 |
| 434 def _YieldBuckets(self, bucket_list): |
| 435 """Yields buckets from a list returned by apitools.""" |
| 436 if bucket_list.items: |
| 437 for bucket in bucket_list.items: |
| 438 yield bucket |
| 439 |
| 440 def ListObjects(self, bucket_name, prefix=None, delimiter=None, |
| 441 all_versions=None, provider=None, fields=None): |
| 442 """See CloudApi class for function doc strings.""" |
| 443 projection = (apitools_messages.StorageObjectsListRequest |
| 444 .ProjectionValueValuesEnum.full) |
| 445 apitools_request = apitools_messages.StorageObjectsListRequest( |
| 446 bucket=bucket_name, prefix=prefix, delimiter=delimiter, |
| 447 versions=all_versions, projection=projection, |
| 448 maxResults=NUM_OBJECTS_PER_LIST_PAGE) |
| 449 global_params = apitools_messages.StandardQueryParameters() |
| 450 |
| 451 if fields: |
| 452 fields = set(fields) |
| 453 if 'nextPageToken' not in fields: |
| 454 fields.add('nextPageToken') |
| 455 global_params.fields = ','.join(fields) |
| 456 |
| 457 try: |
| 458 object_list = self.api_client.objects.List(apitools_request, |
| 459 global_params=global_params) |
| 460 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 461 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) |
| 462 |
| 463 for object_or_prefix in self._YieldObjectsAndPrefixes(object_list): |
| 464 yield object_or_prefix |
| 465 |
| 466 while object_list.nextPageToken: |
| 467 apitools_request = apitools_messages.StorageObjectsListRequest( |
| 468 bucket=bucket_name, prefix=prefix, delimiter=delimiter, |
| 469 versions=all_versions, projection=projection, |
| 470 pageToken=object_list.nextPageToken, |
| 471 maxResults=NUM_OBJECTS_PER_LIST_PAGE) |
| 472 try: |
| 473 object_list = self.api_client.objects.List(apitools_request, |
| 474 global_params=global_params) |
| 475 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 476 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) |
| 477 |
| 478 for object_or_prefix in self._YieldObjectsAndPrefixes(object_list): |
| 479 yield object_or_prefix |
| 480 |
| 481 def _YieldObjectsAndPrefixes(self, object_list): |
| 482 if object_list.items: |
| 483 for cloud_obj in object_list.items: |
| 484 yield CloudApi.CsObjectOrPrefix(cloud_obj, |
| 485 CloudApi.CsObjectOrPrefixType.OBJECT) |
| 486 if object_list.prefixes: |
| 487 for prefix in object_list.prefixes: |
| 488 yield CloudApi.CsObjectOrPrefix(prefix, |
| 489 CloudApi.CsObjectOrPrefixType.PREFIX) |
| 490 |
| 491 def GetObjectMetadata(self, bucket_name, object_name, generation=None, |
| 492 provider=None, fields=None): |
| 493 """See CloudApi class for function doc strings.""" |
| 494 projection = (apitools_messages.StorageObjectsGetRequest |
| 495 .ProjectionValueValuesEnum.full) |
| 496 |
| 497 if generation: |
| 498 generation = long(generation) |
| 499 |
| 500 apitools_request = apitools_messages.StorageObjectsGetRequest( |
| 501 bucket=bucket_name, object=object_name, projection=projection, |
| 502 generation=generation) |
| 503 global_params = apitools_messages.StandardQueryParameters() |
| 504 if fields: |
| 505 global_params.fields = ','.join(set(fields)) |
| 506 |
| 507 try: |
| 508 return self.api_client.objects.Get(apitools_request, |
| 509 global_params=global_params) |
| 510 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 511 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name, |
| 512 object_name=object_name, |
| 513 generation=generation) |
| 514 |
| 515 def GetObjectMedia( |
| 516 self, bucket_name, object_name, download_stream, |
| 517 provider=None, generation=None, object_size=None, |
| 518 download_strategy=CloudApi.DownloadStrategy.ONE_SHOT, start_byte=0, |
| 519 end_byte=None, progress_callback=None, serialization_data=None, |
| 520 digesters=None): |
| 521 """See CloudApi class for function doc strings.""" |
| 522 # This implementation will get the object metadata first if we don't pass it |
| 523 # in via serialization_data. |
| 524 if generation: |
| 525 generation = long(generation) |
| 526 |
| 527 outer_total_size = object_size |
| 528 if serialization_data: |
| 529 outer_total_size = json.loads(serialization_data)['total_size'] |
| 530 |
| 531 if progress_callback: |
| 532 if outer_total_size is None: |
| 533 raise ArgumentException('Download size is required when callbacks are ' |
| 534 'requested for a download, but no size was ' |
| 535 'provided.') |
| 536 progress_callback(0, outer_total_size) |
| 537 |
| 538 bytes_downloaded_container = BytesTransferredContainer() |
| 539 bytes_downloaded_container.bytes_transferred = start_byte |
| 540 |
| 541 callback_class_factory = DownloadCallbackConnectionClassFactory( |
| 542 bytes_downloaded_container, total_size=outer_total_size, |
| 543 progress_callback=progress_callback, digesters=digesters) |
| 544 download_http_class = callback_class_factory.GetConnectionClass() |
| 545 |
| 546 download_http = self._GetNewDownloadHttp(download_stream) |
| 547 download_http.connections = {'https': download_http_class} |
| 548 authorized_download_http = self.credentials.authorize(download_http) |
| 549 WrapDownloadHttpRequest(authorized_download_http) |
| 550 |
| 551 if serialization_data: |
| 552 apitools_download = apitools_transfer.Download.FromData( |
| 553 download_stream, serialization_data, self.api_client.http) |
| 554 else: |
| 555 apitools_download = apitools_transfer.Download.FromStream( |
| 556 download_stream, auto_transfer=False, total_size=object_size) |
| 557 |
| 558 apitools_download.bytes_http = authorized_download_http |
| 559 apitools_request = apitools_messages.StorageObjectsGetRequest( |
| 560 bucket=bucket_name, object=object_name, generation=generation) |
| 561 |
| 562 try: |
| 563 if download_strategy == CloudApi.DownloadStrategy.RESUMABLE: |
| 564 # Disable retries in apitools. We will handle them explicitly here. |
| 565 apitools_download.retry_func = ( |
| 566 apitools_http_wrapper.RethrowExceptionHandler) |
| 567 return self._PerformResumableDownload( |
| 568 bucket_name, object_name, download_stream, apitools_request, |
| 569 apitools_download, bytes_downloaded_container, |
| 570 generation=generation, start_byte=start_byte, end_byte=end_byte, |
| 571 serialization_data=serialization_data) |
| 572 else: |
| 573 return self._PerformDownload( |
| 574 bucket_name, object_name, download_stream, apitools_request, |
| 575 apitools_download, generation=generation, start_byte=start_byte, |
| 576 end_byte=end_byte, serialization_data=serialization_data) |
| 577 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 578 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name, |
| 579 object_name=object_name, |
| 580 generation=generation) |
| 581 |
| 582 def _PerformResumableDownload( |
| 583 self, bucket_name, object_name, download_stream, apitools_request, |
| 584 apitools_download, bytes_downloaded_container, generation=None, |
| 585 start_byte=0, end_byte=None, serialization_data=None): |
| 586 retries = 0 |
| 587 last_progress_byte = start_byte |
| 588 while retries <= self.num_retries: |
| 589 try: |
| 590 return self._PerformDownload( |
| 591 bucket_name, object_name, download_stream, apitools_request, |
| 592 apitools_download, generation=generation, start_byte=start_byte, |
| 593 end_byte=end_byte, serialization_data=serialization_data) |
| 594 except HTTP_TRANSFER_EXCEPTIONS, e: |
| 595 start_byte = download_stream.tell() |
| 596 bytes_downloaded_container.bytes_transferred = start_byte |
| 597 if start_byte > last_progress_byte: |
| 598 # We've made progress, so allow a fresh set of retries. |
| 599 last_progress_byte = start_byte |
| 600 retries = 0 |
| 601 retries += 1 |
| 602 if retries > self.num_retries: |
| 603 raise ResumableDownloadException( |
| 604 'Transfer failed after %d retries. Final exception: %s' % |
| 605 self.num_retries, str(e)) |
| 606 time.sleep(CalculateWaitForRetry(retries, max_wait=GetMaxRetryDelay())) |
| 607 self.logger.info( |
| 608 'Retrying download from byte %s after exception.', start_byte) |
| 609 apitools_http_wrapper.RebuildHttpConnections( |
| 610 apitools_download.bytes_http) |
| 611 |
| 612 def _PerformDownload( |
| 613 self, bucket_name, object_name, download_stream, apitools_request, |
| 614 apitools_download, generation=None, start_byte=0, end_byte=None, |
| 615 serialization_data=None): |
| 616 if not serialization_data: |
| 617 try: |
| 618 self.api_client.objects.Get(apitools_request, |
| 619 download=apitools_download) |
| 620 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 621 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name, |
| 622 object_name=object_name, |
| 623 generation=generation) |
| 624 |
| 625 # Disable apitools' default print callbacks. |
| 626 def _NoopCallback(unused_response, unused_download_object): |
| 627 pass |
| 628 |
| 629 # TODO: If we have a resumable download with accept-encoding:gzip |
| 630 # on a object that is compressible but not in gzip form in the cloud, |
| 631 # on-the-fly compression will gzip the object. In this case if our |
| 632 # download breaks, future requests will ignore the range header and just |
| 633 # return the object (gzipped) in its entirety. Ideally, we would unzip |
| 634 # the bytes that we have locally and send a range request without |
| 635 # accept-encoding:gzip so that we can download only the (uncompressed) bytes |
| 636 # that we don't yet have. |
| 637 |
| 638 # Since bytes_http is created in this function, we don't get the |
| 639 # user-agent header from api_client's http automatically. |
| 640 additional_headers = { |
| 641 'accept-encoding': 'gzip', |
| 642 'user-agent': self.api_client.user_agent |
| 643 } |
| 644 if start_byte or end_byte: |
| 645 apitools_download.GetRange(additional_headers=additional_headers, |
| 646 start=start_byte, end=end_byte) |
| 647 else: |
| 648 apitools_download.StreamInChunks( |
| 649 callback=_NoopCallback, finish_callback=_NoopCallback, |
| 650 additional_headers=additional_headers) |
| 651 return apitools_download.encoding |
| 652 |
| 653 def PatchObjectMetadata(self, bucket_name, object_name, metadata, |
| 654 generation=None, preconditions=None, provider=None, |
| 655 fields=None): |
| 656 """See CloudApi class for function doc strings.""" |
| 657 projection = (apitools_messages.StorageObjectsPatchRequest |
| 658 .ProjectionValueValuesEnum.full) |
| 659 |
| 660 if not preconditions: |
| 661 preconditions = Preconditions() |
| 662 |
| 663 if generation: |
| 664 generation = long(generation) |
| 665 |
| 666 apitools_request = apitools_messages.StorageObjectsPatchRequest( |
| 667 bucket=bucket_name, object=object_name, objectResource=metadata, |
| 668 generation=generation, projection=projection, |
| 669 ifGenerationMatch=preconditions.gen_match, |
| 670 ifMetagenerationMatch=preconditions.meta_gen_match) |
| 671 global_params = apitools_messages.StandardQueryParameters() |
| 672 if fields: |
| 673 global_params.fields = ','.join(set(fields)) |
| 674 |
| 675 try: |
| 676 return self.api_client.objects.Patch(apitools_request, |
| 677 global_params=global_params) |
| 678 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 679 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name, |
| 680 object_name=object_name, |
| 681 generation=generation) |
| 682 |
| 683 def _UploadObject(self, upload_stream, object_metadata, canned_acl=None, |
| 684 size=None, preconditions=None, provider=None, fields=None, |
| 685 serialization_data=None, tracker_callback=None, |
| 686 progress_callback=None, apitools_strategy='simple'): |
| 687 """Upload implementation, apitools_strategy plus gsutil Cloud API args.""" |
| 688 ValidateDstObjectMetadata(object_metadata) |
| 689 assert not canned_acl, 'Canned ACLs not supported by JSON API.' |
| 690 |
| 691 bytes_uploaded_container = BytesTransferredContainer() |
| 692 |
| 693 total_size = 0 |
| 694 if progress_callback and size: |
| 695 total_size = size |
| 696 progress_callback(0, size) |
| 697 |
| 698 callback_class_factory = UploadCallbackConnectionClassFactory( |
| 699 bytes_uploaded_container, total_size=total_size, |
| 700 progress_callback=progress_callback) |
| 701 |
| 702 upload_http = GetNewHttp() |
| 703 upload_http_class = callback_class_factory.GetConnectionClass() |
| 704 upload_http.connections = {'http': upload_http_class, |
| 705 'https': upload_http_class} |
| 706 |
| 707 # Disable apitools' default print callbacks. |
| 708 def _NoopCallback(unused_response, unused_upload_object): |
| 709 pass |
| 710 |
| 711 authorized_upload_http = self.credentials.authorize(upload_http) |
| 712 WrapUploadHttpRequest(authorized_upload_http) |
| 713 # Since bytes_http is created in this function, we don't get the |
| 714 # user-agent header from api_client's http automatically. |
| 715 additional_headers = { |
| 716 'user-agent': self.api_client.user_agent |
| 717 } |
| 718 |
| 719 try: |
| 720 content_type = None |
| 721 apitools_request = None |
| 722 global_params = None |
| 723 if not serialization_data: |
| 724 # This is a new upload, set up initial upload state. |
| 725 content_type = object_metadata.contentType |
| 726 if not content_type: |
| 727 content_type = DEFAULT_CONTENT_TYPE |
| 728 |
| 729 if not preconditions: |
| 730 preconditions = Preconditions() |
| 731 |
| 732 apitools_request = apitools_messages.StorageObjectsInsertRequest( |
| 733 bucket=object_metadata.bucket, object=object_metadata, |
| 734 ifGenerationMatch=preconditions.gen_match, |
| 735 ifMetagenerationMatch=preconditions.meta_gen_match) |
| 736 |
| 737 global_params = apitools_messages.StandardQueryParameters() |
| 738 if fields: |
| 739 global_params.fields = ','.join(set(fields)) |
| 740 |
| 741 if apitools_strategy == 'simple': # One-shot upload. |
| 742 apitools_upload = apitools_transfer.Upload( |
| 743 upload_stream, content_type, total_size=size, auto_transfer=True) |
| 744 apitools_upload.strategy = apitools_strategy |
| 745 apitools_upload.bytes_http = authorized_upload_http |
| 746 |
| 747 return self.api_client.objects.Insert( |
| 748 apitools_request, |
| 749 upload=apitools_upload, |
| 750 global_params=global_params) |
| 751 else: # Resumable upload. |
| 752 return self._PerformResumableUpload( |
| 753 upload_stream, authorized_upload_http, content_type, size, |
| 754 serialization_data, apitools_strategy, apitools_request, |
| 755 global_params, bytes_uploaded_container, tracker_callback, |
| 756 _NoopCallback, additional_headers) |
| 757 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 758 self._TranslateExceptionAndRaise(e, bucket_name=object_metadata.bucket, |
| 759 object_name=object_metadata.name) |
| 760 |
| 761 def _PerformResumableUpload( |
| 762 self, upload_stream, authorized_upload_http, content_type, size, |
| 763 serialization_data, apitools_strategy, apitools_request, global_params, |
| 764 bytes_uploaded_container, tracker_callback, noop_callback, addl_headers): |
| 765 try: |
| 766 if serialization_data: |
| 767 # Resuming an existing upload. |
| 768 apitools_upload = apitools_transfer.Upload.FromData( |
| 769 upload_stream, serialization_data, self.api_client.http) |
| 770 apitools_upload.chunksize = _ResumableChunkSize() |
| 771 apitools_upload.bytes_http = authorized_upload_http |
| 772 else: |
| 773 # New resumable upload. |
| 774 apitools_upload = apitools_transfer.Upload( |
| 775 upload_stream, content_type, total_size=size, |
| 776 chunksize=_ResumableChunkSize(), auto_transfer=False) |
| 777 apitools_upload.strategy = apitools_strategy |
| 778 apitools_upload.bytes_http = authorized_upload_http |
| 779 self.api_client.objects.Insert( |
| 780 apitools_request, |
| 781 upload=apitools_upload, |
| 782 global_params=global_params) |
| 783 # Disable retries in apitools. We will handle them explicitly here. |
| 784 apitools_upload.retry_func = ( |
| 785 apitools_http_wrapper.RethrowExceptionHandler) |
| 786 |
| 787 # If we're resuming an upload, apitools has at this point received |
| 788 # from the server how many bytes it already has. Update our |
| 789 # callback class with this information. |
| 790 bytes_uploaded_container.bytes_transferred = apitools_upload.progress |
| 791 if tracker_callback: |
| 792 tracker_callback(json.dumps(apitools_upload.serialization_data)) |
| 793 |
| 794 retries = 0 |
| 795 last_progress_byte = apitools_upload.progress |
| 796 while retries <= self.num_retries: |
| 797 try: |
| 798 # TODO: On retry, this will seek to the bytes that the server has, |
| 799 # causing the hash to be recalculated. Make HashingFileUploadWrapper |
| 800 # save a digest according to json_resumable_chunk_size. |
| 801 http_response = apitools_upload.StreamInChunks( |
| 802 callback=noop_callback, finish_callback=noop_callback, |
| 803 additional_headers=addl_headers) |
| 804 return self.api_client.objects.ProcessHttpResponse( |
| 805 self.api_client.objects.GetMethodConfig('Insert'), http_response) |
| 806 except HTTP_TRANSFER_EXCEPTIONS, e: |
| 807 apitools_http_wrapper.RebuildHttpConnections( |
| 808 apitools_upload.bytes_http) |
| 809 while retries <= self.num_retries: |
| 810 try: |
| 811 # TODO: Simulate the refresh case in tests. Right now, our |
| 812 # mocks are not complex enough to simulate a failure. |
| 813 apitools_upload.RefreshResumableUploadState() |
| 814 start_byte = apitools_upload.progress |
| 815 bytes_uploaded_container.bytes_transferred = start_byte |
| 816 break |
| 817 except HTTP_TRANSFER_EXCEPTIONS, e2: |
| 818 apitools_http_wrapper.RebuildHttpConnections( |
| 819 apitools_upload.bytes_http) |
| 820 retries += 1 |
| 821 if retries > self.num_retries: |
| 822 raise ResumableUploadException( |
| 823 'Transfer failed after %d retries. Final exception: %s' % |
| 824 (self.num_retries, e2)) |
| 825 time.sleep( |
| 826 CalculateWaitForRetry(retries, max_wait=GetMaxRetryDelay())) |
| 827 if start_byte > last_progress_byte: |
| 828 # We've made progress, so allow a fresh set of retries. |
| 829 last_progress_byte = start_byte |
| 830 retries = 0 |
| 831 else: |
| 832 retries += 1 |
| 833 if retries > self.num_retries: |
| 834 raise ResumableUploadException( |
| 835 'Transfer failed after %d retries. Final exception: %s' % |
| 836 (self.num_retries, e)) |
| 837 time.sleep( |
| 838 CalculateWaitForRetry(retries, max_wait=GetMaxRetryDelay())) |
| 839 self.logger.info( |
| 840 'Retrying upload from byte %s after exception.', start_byte) |
| 841 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 842 resumable_ex = self._TranslateApitoolsResumableUploadException(e) |
| 843 if resumable_ex: |
| 844 raise resumable_ex |
| 845 else: |
| 846 raise |
| 847 |
| 848 def UploadObject(self, upload_stream, object_metadata, canned_acl=None, |
| 849 size=None, preconditions=None, progress_callback=None, |
| 850 provider=None, fields=None): |
| 851 """See CloudApi class for function doc strings.""" |
| 852 return self._UploadObject( |
| 853 upload_stream, object_metadata, canned_acl=canned_acl, |
| 854 size=size, preconditions=preconditions, |
| 855 progress_callback=progress_callback, fields=fields, |
| 856 apitools_strategy='simple') |
| 857 |
| 858 def UploadObjectStreaming(self, upload_stream, object_metadata, |
| 859 canned_acl=None, preconditions=None, |
| 860 progress_callback=None, provider=None, |
| 861 fields=None): |
| 862 """See CloudApi class for function doc strings.""" |
| 863 # Streaming indicated by not passing a size. |
| 864 return self._UploadObject( |
| 865 upload_stream, object_metadata, canned_acl=canned_acl, |
| 866 preconditions=preconditions, progress_callback=progress_callback, |
| 867 fields=fields, apitools_strategy='simple') |
| 868 |
| 869 def UploadObjectResumable( |
| 870 self, upload_stream, object_metadata, canned_acl=None, preconditions=None, |
| 871 provider=None, fields=None, size=None, serialization_data=None, |
| 872 tracker_callback=None, progress_callback=None): |
| 873 """See CloudApi class for function doc strings.""" |
| 874 return self._UploadObject( |
| 875 upload_stream, object_metadata, canned_acl=canned_acl, |
| 876 preconditions=preconditions, fields=fields, size=size, |
| 877 serialization_data=serialization_data, |
| 878 tracker_callback=tracker_callback, progress_callback=progress_callback, |
| 879 apitools_strategy='resumable') |
| 880 |
| 881 def CopyObject(self, src_bucket_name, src_obj_name, dst_obj_metadata, |
| 882 src_generation=None, canned_acl=None, preconditions=None, |
| 883 provider=None, fields=None): |
| 884 """See CloudApi class for function doc strings.""" |
| 885 ValidateDstObjectMetadata(dst_obj_metadata) |
| 886 assert not canned_acl, 'Canned ACLs not supported by JSON API.' |
| 887 |
| 888 if src_generation: |
| 889 src_generation = long(src_generation) |
| 890 |
| 891 if not preconditions: |
| 892 preconditions = Preconditions() |
| 893 |
| 894 projection = (apitools_messages.StorageObjectsCopyRequest |
| 895 .ProjectionValueValuesEnum.full) |
| 896 global_params = apitools_messages.StandardQueryParameters() |
| 897 if fields: |
| 898 global_params.fields = ','.join(set(fields)) |
| 899 |
| 900 apitools_request = apitools_messages.StorageObjectsCopyRequest( |
| 901 sourceBucket=src_bucket_name, sourceObject=src_obj_name, |
| 902 destinationBucket=dst_obj_metadata.bucket, |
| 903 destinationObject=dst_obj_metadata.name, |
| 904 projection=projection, object=dst_obj_metadata, |
| 905 sourceGeneration=src_generation, |
| 906 ifGenerationMatch=preconditions.gen_match, |
| 907 ifMetagenerationMatch=preconditions.meta_gen_match) |
| 908 try: |
| 909 return self.api_client.objects.Copy(apitools_request, |
| 910 global_params=global_params) |
| 911 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 912 self._TranslateExceptionAndRaise(e, bucket_name=dst_obj_metadata.bucket, |
| 913 object_name=dst_obj_metadata.name) |
| 914 |
| 915 def DeleteObject(self, bucket_name, object_name, preconditions=None, |
| 916 generation=None, provider=None): |
| 917 """See CloudApi class for function doc strings.""" |
| 918 if not preconditions: |
| 919 preconditions = Preconditions() |
| 920 |
| 921 if generation: |
| 922 generation = long(generation) |
| 923 |
| 924 apitools_request = apitools_messages.StorageObjectsDeleteRequest( |
| 925 bucket=bucket_name, object=object_name, generation=generation, |
| 926 ifGenerationMatch=preconditions.gen_match, |
| 927 ifMetagenerationMatch=preconditions.meta_gen_match) |
| 928 try: |
| 929 return self.api_client.objects.Delete(apitools_request) |
| 930 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 931 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name, |
| 932 object_name=object_name, |
| 933 generation=generation) |
| 934 |
| 935 def ComposeObject(self, src_objs_metadata, dst_obj_metadata, |
| 936 preconditions=None, provider=None, fields=None): |
| 937 """See CloudApi class for function doc strings.""" |
| 938 ValidateDstObjectMetadata(dst_obj_metadata) |
| 939 |
| 940 dst_obj_name = dst_obj_metadata.name |
| 941 dst_obj_metadata.name = None |
| 942 dst_bucket_name = dst_obj_metadata.bucket |
| 943 dst_obj_metadata.bucket = None |
| 944 if not dst_obj_metadata.contentType: |
| 945 dst_obj_metadata.contentType = DEFAULT_CONTENT_TYPE |
| 946 |
| 947 if not preconditions: |
| 948 preconditions = Preconditions() |
| 949 |
| 950 global_params = apitools_messages.StandardQueryParameters() |
| 951 if fields: |
| 952 global_params.fields = ','.join(set(fields)) |
| 953 |
| 954 src_objs_compose_request = apitools_messages.ComposeRequest( |
| 955 sourceObjects=src_objs_metadata, destination=dst_obj_metadata) |
| 956 |
| 957 apitools_request = apitools_messages.StorageObjectsComposeRequest( |
| 958 composeRequest=src_objs_compose_request, |
| 959 destinationBucket=dst_bucket_name, |
| 960 destinationObject=dst_obj_name, |
| 961 ifGenerationMatch=preconditions.gen_match, |
| 962 ifMetagenerationMatch=preconditions.meta_gen_match) |
| 963 try: |
| 964 return self.api_client.objects.Compose(apitools_request, |
| 965 global_params=global_params) |
| 966 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 967 self._TranslateExceptionAndRaise(e, bucket_name=dst_bucket_name, |
| 968 object_name=dst_obj_name) |
| 969 |
| 970 def WatchBucket(self, bucket_name, address, channel_id, token=None, |
| 971 provider=None, fields=None): |
| 972 """See CloudApi class for function doc strings.""" |
| 973 projection = (apitools_messages.StorageObjectsWatchAllRequest |
| 974 .ProjectionValueValuesEnum.full) |
| 975 |
| 976 channel = apitools_messages.Channel(address=address, id=channel_id, |
| 977 token=token, type='WEB_HOOK') |
| 978 |
| 979 apitools_request = apitools_messages.StorageObjectsWatchAllRequest( |
| 980 bucket=bucket_name, channel=channel, projection=projection) |
| 981 |
| 982 global_params = apitools_messages.StandardQueryParameters() |
| 983 if fields: |
| 984 global_params.fields = ','.join(set(fields)) |
| 985 |
| 986 try: |
| 987 return self.api_client.objects.WatchAll(apitools_request, |
| 988 global_params=global_params) |
| 989 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 990 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) |
| 991 |
| 992 def StopChannel(self, channel_id, resource_id, provider=None): |
| 993 """See CloudApi class for function doc strings.""" |
| 994 channel = apitools_messages.Channel(id=channel_id, resourceId=resource_id) |
| 995 try: |
| 996 self.api_client.channels.Stop(channel) |
| 997 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 998 self._TranslateExceptionAndRaise(e) |
| 999 |
| 1000 def _TranslateExceptionAndRaise(self, e, bucket_name=None, object_name=None, |
| 1001 generation=None): |
| 1002 """Translates an HTTP exception and raises the translated or original value. |
| 1003 |
| 1004 Args: |
| 1005 e: Any Exception. |
| 1006 bucket_name: Optional bucket name in request that caused the exception. |
| 1007 object_name: Optional object name in request that caused the exception. |
| 1008 generation: Optional generation in request that caused the exception. |
| 1009 |
| 1010 Raises: |
| 1011 Translated CloudApi exception, or the original exception if it was not |
| 1012 translatable. |
| 1013 """ |
| 1014 translated_exception = self._TranslateApitoolsException( |
| 1015 e, bucket_name=bucket_name, object_name=object_name, |
| 1016 generation=generation) |
| 1017 if translated_exception: |
| 1018 raise translated_exception |
| 1019 else: |
| 1020 raise |
| 1021 |
| 1022 def _GetMessageFromHttpError(self, http_error): |
| 1023 if isinstance(http_error, apitools_exceptions.HttpError): |
| 1024 if getattr(http_error, 'content', None): |
| 1025 try: |
| 1026 json_obj = json.loads(http_error.content) |
| 1027 if 'error' in json_obj and 'message' in json_obj['error']: |
| 1028 return json_obj['error']['message'] |
| 1029 except Exception: # pylint: disable=broad-except |
| 1030 # If we couldn't decode anything, just leave the message as None. |
| 1031 pass |
| 1032 |
| 1033 def _TranslateApitoolsResumableUploadException( |
| 1034 self, e, bucket_name=None, object_name=None, generation=None): |
| 1035 if isinstance(e, apitools_exceptions.HttpError): |
| 1036 message = self._GetMessageFromHttpError(e) |
| 1037 if (e.status_code == 503 and |
| 1038 self.http.disable_ssl_certificate_validation): |
| 1039 return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE, |
| 1040 status=e.status_code) |
| 1041 elif e.status_code >= 500: |
| 1042 return ResumableUploadException( |
| 1043 message or 'Server Error', status=e.status_code) |
| 1044 elif e.status_code >= 400: |
| 1045 return ResumableUploadAbortException( |
| 1046 message or 'Bad Request', status=e.status_code) |
| 1047 if (isinstance(e, apitools_exceptions.TransferError) and |
| 1048 ('Aborting transfer' in e.message or |
| 1049 'Not enough bytes in stream' in e.message or |
| 1050 'additional bytes left in stream' in e.message)): |
| 1051 return ResumableUploadAbortException(e.message) |
| 1052 |
| 1053 def _TranslateApitoolsException(self, e, bucket_name=None, object_name=None, |
| 1054 generation=None): |
| 1055 """Translates apitools exceptions into their gsutil Cloud Api equivalents. |
| 1056 |
| 1057 Args: |
| 1058 e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS. |
| 1059 bucket_name: Optional bucket name in request that caused the exception. |
| 1060 object_name: Optional object name in request that caused the exception. |
| 1061 generation: Optional generation in request that caused the exception. |
| 1062 |
| 1063 Returns: |
| 1064 CloudStorageApiServiceException for translatable exceptions, None |
| 1065 otherwise. |
| 1066 """ |
| 1067 if isinstance(e, apitools_exceptions.HttpError): |
| 1068 message = self._GetMessageFromHttpError(e) |
| 1069 if e.status_code == 400: |
| 1070 # It is possible that the Project ID is incorrect. Unfortunately the |
| 1071 # JSON API does not give us much information about what part of the |
| 1072 # request was bad. |
| 1073 return BadRequestException(message or 'Bad Request', |
| 1074 status=e.status_code) |
| 1075 elif e.status_code == 401: |
| 1076 if 'Login Required' in str(e): |
| 1077 return AccessDeniedException( |
| 1078 message or 'Access denied: login required.', |
| 1079 status=e.status_code) |
| 1080 elif e.status_code == 403: |
| 1081 if 'The account for the specified project has been disabled' in str(e): |
| 1082 return AccessDeniedException(message or 'Account disabled.', |
| 1083 status=e.status_code) |
| 1084 elif 'Daily Limit for Unauthenticated Use Exceeded' in str(e): |
| 1085 return AccessDeniedException( |
| 1086 message or 'Access denied: quota exceeded. ' |
| 1087 'Is your project ID valid?', |
| 1088 status=e.status_code) |
| 1089 elif 'The bucket you tried to delete was not empty.' in str(e): |
| 1090 return NotEmptyException('BucketNotEmpty (%s)' % bucket_name, |
| 1091 status=e.status_code) |
| 1092 elif ('The bucket you tried to create requires domain ownership ' |
| 1093 'verification.' in str(e)): |
| 1094 return AccessDeniedException( |
| 1095 'The bucket you tried to create requires domain ownership ' |
| 1096 'verification. Please see ' |
| 1097 'https://developers.google.com/storage/docs/bucketnaming' |
| 1098 '?hl=en#verification for more details.', status=e.status_code) |
| 1099 elif 'User Rate Limit Exceeded' in str(e): |
| 1100 return AccessDeniedException('Rate limit exceeded. Please retry this ' |
| 1101 'request later.', status=e.status_code) |
| 1102 elif 'Access Not Configured' in str(e): |
| 1103 return AccessDeniedException( |
| 1104 'Access Not Configured. Please go to the Google Developers ' |
| 1105 'Console (https://cloud.google.com/console#/project) for your ' |
| 1106 'project, select APIs and Auth and enable the ' |
| 1107 'Google Cloud Storage JSON API.', |
| 1108 status=e.status_code) |
| 1109 else: |
| 1110 return AccessDeniedException(message or e.message, |
| 1111 status=e.status_code) |
| 1112 elif e.status_code == 404: |
| 1113 if bucket_name: |
| 1114 if object_name: |
| 1115 return CreateObjectNotFoundException(e.status_code, self.provider, |
| 1116 bucket_name, object_name, |
| 1117 generation=generation) |
| 1118 return CreateBucketNotFoundException(e.status_code, self.provider, |
| 1119 bucket_name) |
| 1120 return NotFoundException(e.message, status=e.status_code) |
| 1121 elif e.status_code == 409 and bucket_name: |
| 1122 if 'The bucket you tried to delete was not empty.' in str(e): |
| 1123 return NotEmptyException('BucketNotEmpty (%s)' % bucket_name, |
| 1124 status=e.status_code) |
| 1125 return ServiceException( |
| 1126 'Bucket %s already exists.' % bucket_name, status=e.status_code) |
| 1127 elif e.status_code == 412: |
| 1128 return PreconditionException(message, status=e.status_code) |
| 1129 elif (e.status_code == 503 and |
| 1130 not self.http.disable_ssl_certificate_validation): |
| 1131 return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE, |
| 1132 status=e.status_code) |
| 1133 return ServiceException(message, status=e.status_code) |
| 1134 elif isinstance(e, apitools_exceptions.TransferInvalidError): |
| 1135 return ServiceException('Transfer invalid (possible encoding error: %s)' |
| 1136 % str(e)) |
| OLD | NEW |