OLD | NEW |
1 # -*- coding: utf-8 -*- | 1 # -*- coding: utf-8 -*- |
2 # Copyright 2014 Google Inc. All Rights Reserved. | 2 # Copyright 2014 Google Inc. All Rights Reserved. |
3 # | 3 # |
4 # Licensed under the Apache License, Version 2.0 (the "License"); | 4 # Licensed under the Apache License, Version 2.0 (the "License"); |
5 # you may not use this file except in compliance with the License. | 5 # you may not use this file except in compliance with the License. |
6 # You may obtain a copy of the License at | 6 # You may obtain a copy of the License at |
7 # | 7 # |
8 # http://www.apache.org/licenses/LICENSE-2.0 | 8 # http://www.apache.org/licenses/LICENSE-2.0 |
9 # | 9 # |
10 # Unless required by applicable law or agreed to in writing, software | 10 # Unless required by applicable law or agreed to in writing, software |
(...skipping 18 matching lines...) Expand all Loading... |
29 from apitools.base.py import encoding | 29 from apitools.base.py import encoding |
30 from apitools.base.py import exceptions as apitools_exceptions | 30 from apitools.base.py import exceptions as apitools_exceptions |
31 from apitools.base.py import http_wrapper as apitools_http_wrapper | 31 from apitools.base.py import http_wrapper as apitools_http_wrapper |
32 from apitools.base.py import transfer as apitools_transfer | 32 from apitools.base.py import transfer as apitools_transfer |
33 from apitools.base.py.util import CalculateWaitForRetry | 33 from apitools.base.py.util import CalculateWaitForRetry |
34 | 34 |
35 import boto | 35 import boto |
36 from boto import config | 36 from boto import config |
37 from gcs_oauth2_boto_plugin import oauth2_helper | 37 from gcs_oauth2_boto_plugin import oauth2_helper |
38 import httplib2 | 38 import httplib2 |
| 39 import oauth2client |
39 from oauth2client import devshell | 40 from oauth2client import devshell |
40 from oauth2client import multistore_file | 41 from oauth2client import multistore_file |
41 | 42 |
42 from gslib.cloud_api import AccessDeniedException | 43 from gslib.cloud_api import AccessDeniedException |
43 from gslib.cloud_api import ArgumentException | 44 from gslib.cloud_api import ArgumentException |
44 from gslib.cloud_api import BadRequestException | 45 from gslib.cloud_api import BadRequestException |
45 from gslib.cloud_api import CloudApi | 46 from gslib.cloud_api import CloudApi |
46 from gslib.cloud_api import NotEmptyException | 47 from gslib.cloud_api import NotEmptyException |
47 from gslib.cloud_api import NotFoundException | 48 from gslib.cloud_api import NotFoundException |
48 from gslib.cloud_api import PreconditionException | 49 from gslib.cloud_api import PreconditionException |
(...skipping 17 matching lines...) Expand all Loading... |
66 from gslib.progress_callback import ProgressCallbackWithBackoff | 67 from gslib.progress_callback import ProgressCallbackWithBackoff |
67 from gslib.project_id import PopulateProjectId | 68 from gslib.project_id import PopulateProjectId |
68 from gslib.third_party.storage_apitools import storage_v1_client as apitools_cli
ent | 69 from gslib.third_party.storage_apitools import storage_v1_client as apitools_cli
ent |
69 from gslib.third_party.storage_apitools import storage_v1_messages as apitools_m
essages | 70 from gslib.third_party.storage_apitools import storage_v1_messages as apitools_m
essages |
70 from gslib.tracker_file import DeleteTrackerFile | 71 from gslib.tracker_file import DeleteTrackerFile |
71 from gslib.tracker_file import GetRewriteTrackerFilePath | 72 from gslib.tracker_file import GetRewriteTrackerFilePath |
72 from gslib.tracker_file import HashRewriteParameters | 73 from gslib.tracker_file import HashRewriteParameters |
73 from gslib.tracker_file import ReadRewriteTrackerFile | 74 from gslib.tracker_file import ReadRewriteTrackerFile |
74 from gslib.tracker_file import WriteRewriteTrackerFile | 75 from gslib.tracker_file import WriteRewriteTrackerFile |
75 from gslib.translation_helper import CreateBucketNotFoundException | 76 from gslib.translation_helper import CreateBucketNotFoundException |
| 77 from gslib.translation_helper import CreateNotFoundExceptionForObjectWrite |
76 from gslib.translation_helper import CreateObjectNotFoundException | 78 from gslib.translation_helper import CreateObjectNotFoundException |
77 from gslib.translation_helper import DEFAULT_CONTENT_TYPE | 79 from gslib.translation_helper import DEFAULT_CONTENT_TYPE |
| 80 from gslib.translation_helper import PRIVATE_DEFAULT_OBJ_ACL |
78 from gslib.translation_helper import REMOVE_CORS_CONFIG | 81 from gslib.translation_helper import REMOVE_CORS_CONFIG |
79 from gslib.util import GetBotoConfigFileList | 82 from gslib.util import GetBotoConfigFileList |
80 from gslib.util import GetCertsFile | 83 from gslib.util import GetCertsFile |
81 from gslib.util import GetCredentialStoreFilename | 84 from gslib.util import GetCredentialStoreFilename |
82 from gslib.util import GetGceCredentialCacheFilename | 85 from gslib.util import GetGceCredentialCacheFilename |
83 from gslib.util import GetJsonResumableChunkSize | 86 from gslib.util import GetJsonResumableChunkSize |
84 from gslib.util import GetMaxRetryDelay | 87 from gslib.util import GetMaxRetryDelay |
85 from gslib.util import GetNewHttp | 88 from gslib.util import GetNewHttp |
86 from gslib.util import GetNumRetries | 89 from gslib.util import GetNumRetries |
87 from gslib.util import UTF8 | 90 from gslib.util import UTF8 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
125 """Service Unavailable. If you have recently changed | 128 """Service Unavailable. If you have recently changed |
126 https_validate_certificates from True to False in your boto configuration | 129 https_validate_certificates from True to False in your boto configuration |
127 file, please delete any cached access tokens in your filesystem (at %s) | 130 file, please delete any cached access tokens in your filesystem (at %s) |
128 and try again.""" % GetCredentialStoreFilename()) | 131 and try again.""" % GetCredentialStoreFilename()) |
129 | 132 |
130 | 133 |
131 class GcsJsonApi(CloudApi): | 134 class GcsJsonApi(CloudApi): |
132 """Google Cloud Storage JSON implementation of gsutil Cloud API.""" | 135 """Google Cloud Storage JSON implementation of gsutil Cloud API.""" |
133 | 136 |
134 def __init__(self, bucket_storage_uri_class, logger, provider=None, | 137 def __init__(self, bucket_storage_uri_class, logger, provider=None, |
135 credentials=None, debug=0): | 138 credentials=None, debug=0, trace_token=None): |
136 """Performs necessary setup for interacting with Google Cloud Storage. | 139 """Performs necessary setup for interacting with Google Cloud Storage. |
137 | 140 |
138 Args: | 141 Args: |
139 bucket_storage_uri_class: Unused. | 142 bucket_storage_uri_class: Unused. |
140 logger: logging.logger for outputting log messages. | 143 logger: logging.logger for outputting log messages. |
141 provider: Unused. This implementation supports only Google Cloud Storage. | 144 provider: Unused. This implementation supports only Google Cloud Storage. |
142 credentials: Credentials to be used for interacting with Google Cloud | 145 credentials: Credentials to be used for interacting with Google Cloud |
143 Storage. | 146 Storage. |
144 debug: Debug level for the API implementation (0..3). | 147 debug: Debug level for the API implementation (0..3). |
| 148 trace_token: Trace token to pass to the API implementation. |
145 """ | 149 """ |
146 # TODO: Plumb host_header for perfdiag / test_perfdiag. | 150 # TODO: Plumb host_header for perfdiag / test_perfdiag. |
147 # TODO: Add jitter to apitools' http_wrapper retry mechanism. | 151 # TODO: Add jitter to apitools' http_wrapper retry mechanism. |
148 super(GcsJsonApi, self).__init__(bucket_storage_uri_class, logger, | 152 super(GcsJsonApi, self).__init__(bucket_storage_uri_class, logger, |
149 provider='gs', debug=debug) | 153 provider='gs', debug=debug) |
150 no_op_credentials = False | 154 no_op_credentials = False |
151 if not credentials: | 155 if not credentials: |
152 loaded_credentials = self._CheckAndGetCredentials(logger) | 156 loaded_credentials = self._CheckAndGetCredentials(logger) |
153 | 157 |
154 if not loaded_credentials: | 158 if not loaded_credentials: |
155 loaded_credentials = NoOpCredentials() | 159 loaded_credentials = NoOpCredentials() |
156 no_op_credentials = True | 160 no_op_credentials = True |
157 else: | 161 else: |
158 if isinstance(credentials, NoOpCredentials): | 162 if isinstance(credentials, NoOpCredentials): |
159 no_op_credentials = True | 163 no_op_credentials = True |
160 | 164 |
161 self.credentials = credentials or loaded_credentials | 165 self.credentials = credentials or loaded_credentials |
162 | 166 |
163 self.certs_file = GetCertsFile() | 167 self.certs_file = GetCertsFile() |
164 | 168 |
165 self.http = GetNewHttp() | 169 self.http = GetNewHttp() |
| 170 |
| 171 # Re-use download and upload connections. This class is only called |
| 172 # sequentially, but we can share TCP warmed-up connections across calls. |
| 173 self.download_http = self._GetNewDownloadHttp() |
| 174 self.upload_http = self._GetNewUploadHttp() |
| 175 if self.credentials: |
| 176 self.authorized_download_http = self.credentials.authorize( |
| 177 self.download_http) |
| 178 self.authorized_upload_http = self.credentials.authorize(self.upload_http) |
| 179 else: |
| 180 self.authorized_download_http = self.download_http |
| 181 self.authorized_upload_http = self.upload_http |
| 182 |
| 183 WrapDownloadHttpRequest(self.authorized_download_http) |
| 184 WrapUploadHttpRequest(self.authorized_upload_http) |
| 185 |
166 self.http_base = 'https://' | 186 self.http_base = 'https://' |
167 gs_json_host = config.get('Credentials', 'gs_json_host', None) | 187 gs_json_host = config.get('Credentials', 'gs_json_host', None) |
168 self.host_base = gs_json_host or 'www.googleapis.com' | 188 self.host_base = gs_json_host or 'www.googleapis.com' |
169 | 189 |
170 if not gs_json_host: | 190 if not gs_json_host: |
171 gs_host = config.get('Credentials', 'gs_host', None) | 191 gs_host = config.get('Credentials', 'gs_host', None) |
172 if gs_host: | 192 if gs_host: |
173 raise ArgumentException( | 193 raise ArgumentException( |
174 'JSON API is selected but gs_json_host is not configured, ' | 194 'JSON API is selected but gs_json_host is not configured, ' |
175 'while gs_host is configured to %s. Please also configure ' | 195 'while gs_host is configured to %s. Please also configure ' |
(...skipping 12 matching lines...) Expand all Loading... |
188 % gs_port) | 208 % gs_port) |
189 self.host_port = '' | 209 self.host_port = '' |
190 else: | 210 else: |
191 self.host_port = ':' + config.get('Credentials', 'gs_json_port') | 211 self.host_port = ':' + config.get('Credentials', 'gs_json_port') |
192 | 212 |
193 self.api_version = config.get('GSUtil', 'json_api_version', | 213 self.api_version = config.get('GSUtil', 'json_api_version', |
194 DEFAULT_GCS_JSON_VERSION) | 214 DEFAULT_GCS_JSON_VERSION) |
195 self.url_base = (self.http_base + self.host_base + self.host_port + '/' + | 215 self.url_base = (self.http_base + self.host_base + self.host_port + '/' + |
196 'storage/' + self.api_version + '/') | 216 'storage/' + self.api_version + '/') |
197 | 217 |
| 218 credential_store_key_dict = self._GetCredentialStoreKeyDict( |
| 219 self.credentials) |
| 220 |
198 self.credentials.set_store( | 221 self.credentials.set_store( |
199 multistore_file.get_credential_storage_custom_string_key( | 222 multistore_file.get_credential_storage_custom_key( |
200 GetCredentialStoreFilename(), self.api_version)) | 223 GetCredentialStoreFilename(), credential_store_key_dict)) |
201 | 224 |
202 self.num_retries = GetNumRetries() | 225 self.num_retries = GetNumRetries() |
| 226 self.max_retry_wait = GetMaxRetryDelay() |
203 | 227 |
204 log_request = (debug >= 3) | 228 log_request = (debug >= 3) |
205 log_response = (debug >= 3) | 229 log_response = (debug >= 3) |
206 | 230 |
| 231 self.global_params = apitools_messages.StandardQueryParameters( |
| 232 trace='token:%s' % trace_token) if trace_token else None |
| 233 |
207 self.api_client = apitools_client.StorageV1( | 234 self.api_client = apitools_client.StorageV1( |
208 url=self.url_base, http=self.http, log_request=log_request, | 235 url=self.url_base, http=self.http, log_request=log_request, |
209 log_response=log_response, credentials=self.credentials, | 236 log_response=log_response, credentials=self.credentials, |
210 version=self.api_version) | 237 version=self.api_version, default_global_params=self.global_params) |
| 238 self.api_client.max_retry_wait = self.max_retry_wait |
211 self.api_client.num_retries = self.num_retries | 239 self.api_client.num_retries = self.num_retries |
212 | 240 |
213 if no_op_credentials: | 241 if no_op_credentials: |
214 # This API key is not secret and is used to identify gsutil during | 242 # This API key is not secret and is used to identify gsutil during |
215 # anonymous requests. | 243 # anonymous requests. |
216 self.api_client.AddGlobalParam('key', | 244 self.api_client.AddGlobalParam('key', |
217 u'AIzaSyDnacJHrKma0048b13sh8cgxNUwulubmJM') | 245 u'AIzaSyDnacJHrKma0048b13sh8cgxNUwulubmJM') |
218 | 246 |
219 def _CheckAndGetCredentials(self, logger): | 247 def _CheckAndGetCredentials(self, logger): |
220 configured_cred_types = [] | 248 configured_cred_types = [] |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
302 raise | 330 raise |
303 | 331 |
304 def _GetDevshellCreds(self): | 332 def _GetDevshellCreds(self): |
305 try: | 333 try: |
306 return devshell.DevshellCredentials() | 334 return devshell.DevshellCredentials() |
307 except devshell.NoDevshellServer: | 335 except devshell.NoDevshellServer: |
308 return None | 336 return None |
309 except: | 337 except: |
310 raise | 338 raise |
311 | 339 |
312 def _GetNewDownloadHttp(self, download_stream): | 340 def _GetCredentialStoreKeyDict(self, credentials): |
313 return GetNewHttp(http_class=HttpWithDownloadStream, stream=download_stream) | 341 """Disambiguates a credential for caching in a credential store. |
| 342 |
| 343 Different credential types have different fields that identify them. |
| 344 This function assembles relevant information in a dict and returns it. |
| 345 |
| 346 Args: |
| 347 credentials: An OAuth2Credentials object. |
| 348 |
| 349 Returns: |
| 350 Dict of relevant identifiers for credentials. |
| 351 """ |
| 352 # TODO: If scopes ever become available in the credentials themselves, |
| 353 # include them in the key dict. |
| 354 key_dict = {'api_version': self.api_version} |
| 355 # pylint: disable=protected-access |
| 356 if isinstance(credentials, devshell.DevshellCredentials): |
| 357 key_dict['user_email'] = credentials.user_email |
| 358 elif isinstance(credentials, |
| 359 oauth2client.service_account._ServiceAccountCredentials): |
| 360 key_dict['_service_account_email'] = credentials._service_account_email |
| 361 elif isinstance(credentials, |
| 362 oauth2client.client.SignedJwtAssertionCredentials): |
| 363 key_dict['service_account_name'] = credentials.service_account_name |
| 364 elif isinstance(credentials, oauth2client.client.OAuth2Credentials): |
| 365 if credentials.client_id and credentials.client_id != 'null': |
| 366 key_dict['client_id'] = credentials.client_id |
| 367 key_dict['refresh_token'] = credentials.refresh_token |
| 368 # pylint: enable=protected-access |
| 369 |
| 370 return key_dict |
| 371 |
| 372 def _GetNewDownloadHttp(self): |
| 373 return GetNewHttp(http_class=HttpWithDownloadStream) |
314 | 374 |
315 def _GetNewUploadHttp(self): | 375 def _GetNewUploadHttp(self): |
316 """Returns an upload-safe Http object (by disabling httplib2 retries).""" | 376 """Returns an upload-safe Http object (by disabling httplib2 retries).""" |
317 return GetNewHttp(http_class=HttpWithNoRetries) | 377 return GetNewHttp(http_class=HttpWithNoRetries) |
318 | 378 |
319 def GetBucket(self, bucket_name, provider=None, fields=None): | 379 def GetBucket(self, bucket_name, provider=None, fields=None): |
320 """See CloudApi class for function doc strings.""" | 380 """See CloudApi class for function doc strings.""" |
321 projection = (apitools_messages.StorageBucketsGetRequest | 381 projection = (apitools_messages.StorageBucketsGetRequest |
322 .ProjectionValueValuesEnum.full) | 382 .ProjectionValueValuesEnum.full) |
323 apitools_request = apitools_messages.StorageBucketsGetRequest( | 383 apitools_request = apitools_messages.StorageBucketsGetRequest( |
(...skipping 29 matching lines...) Expand all Loading... |
353 'website'): | 413 'website'): |
354 attr = getattr(bucket_metadata, metadata_field, None) | 414 attr = getattr(bucket_metadata, metadata_field, None) |
355 if attr and not encoding.MessageToDict(attr): | 415 if attr and not encoding.MessageToDict(attr): |
356 setattr(bucket_metadata, metadata_field, None) | 416 setattr(bucket_metadata, metadata_field, None) |
357 apitools_include_fields.append(metadata_field) | 417 apitools_include_fields.append(metadata_field) |
358 | 418 |
359 if bucket_metadata.cors and bucket_metadata.cors == REMOVE_CORS_CONFIG: | 419 if bucket_metadata.cors and bucket_metadata.cors == REMOVE_CORS_CONFIG: |
360 bucket_metadata.cors = [] | 420 bucket_metadata.cors = [] |
361 apitools_include_fields.append('cors') | 421 apitools_include_fields.append('cors') |
362 | 422 |
| 423 if (bucket_metadata.defaultObjectAcl and |
| 424 bucket_metadata.defaultObjectAcl[0] == PRIVATE_DEFAULT_OBJ_ACL): |
| 425 bucket_metadata.defaultObjectAcl = [] |
| 426 apitools_include_fields.append('defaultObjectAcl') |
| 427 |
363 predefined_acl = None | 428 predefined_acl = None |
364 if canned_acl: | 429 if canned_acl: |
365 # Must null out existing ACLs to apply a canned ACL. | 430 # Must null out existing ACLs to apply a canned ACL. |
366 apitools_include_fields.append('acl') | 431 apitools_include_fields.append('acl') |
367 predefined_acl = ( | 432 predefined_acl = ( |
368 apitools_messages.StorageBucketsPatchRequest. | 433 apitools_messages.StorageBucketsPatchRequest. |
369 PredefinedAclValueValuesEnum( | 434 PredefinedAclValueValuesEnum( |
370 self._BucketCannedAclToPredefinedAcl(canned_acl))) | 435 self._BucketCannedAclToPredefinedAcl(canned_acl))) |
371 | 436 |
372 predefined_def_acl = None | 437 predefined_def_acl = None |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
521 try: | 586 try: |
522 object_list = self.api_client.objects.List(apitools_request, | 587 object_list = self.api_client.objects.List(apitools_request, |
523 global_params=global_params) | 588 global_params=global_params) |
524 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: | 589 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
525 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) | 590 self._TranslateExceptionAndRaise(e, bucket_name=bucket_name) |
526 | 591 |
527 for object_or_prefix in self._YieldObjectsAndPrefixes(object_list): | 592 for object_or_prefix in self._YieldObjectsAndPrefixes(object_list): |
528 yield object_or_prefix | 593 yield object_or_prefix |
529 | 594 |
530 def _YieldObjectsAndPrefixes(self, object_list): | 595 def _YieldObjectsAndPrefixes(self, object_list): |
| 596 # Yield prefixes first so that checking for the presence of a subdirectory |
| 597 # is fast. |
| 598 if object_list.prefixes: |
| 599 for prefix in object_list.prefixes: |
| 600 yield CloudApi.CsObjectOrPrefix(prefix, |
| 601 CloudApi.CsObjectOrPrefixType.PREFIX) |
531 if object_list.items: | 602 if object_list.items: |
532 for cloud_obj in object_list.items: | 603 for cloud_obj in object_list.items: |
533 yield CloudApi.CsObjectOrPrefix(cloud_obj, | 604 yield CloudApi.CsObjectOrPrefix(cloud_obj, |
534 CloudApi.CsObjectOrPrefixType.OBJECT) | 605 CloudApi.CsObjectOrPrefixType.OBJECT) |
535 if object_list.prefixes: | |
536 for prefix in object_list.prefixes: | |
537 yield CloudApi.CsObjectOrPrefix(prefix, | |
538 CloudApi.CsObjectOrPrefixType.PREFIX) | |
539 | 606 |
540 def GetObjectMetadata(self, bucket_name, object_name, generation=None, | 607 def GetObjectMetadata(self, bucket_name, object_name, generation=None, |
541 provider=None, fields=None): | 608 provider=None, fields=None): |
542 """See CloudApi class for function doc strings.""" | 609 """See CloudApi class for function doc strings.""" |
543 projection = (apitools_messages.StorageObjectsGetRequest | 610 projection = (apitools_messages.StorageObjectsGetRequest |
544 .ProjectionValueValuesEnum.full) | 611 .ProjectionValueValuesEnum.full) |
545 | 612 |
546 if generation: | 613 if generation: |
547 generation = long(generation) | 614 generation = long(generation) |
548 | 615 |
(...skipping 17 matching lines...) Expand all Loading... |
566 provider=None, generation=None, object_size=None, | 633 provider=None, generation=None, object_size=None, |
567 download_strategy=CloudApi.DownloadStrategy.ONE_SHOT, start_byte=0, | 634 download_strategy=CloudApi.DownloadStrategy.ONE_SHOT, start_byte=0, |
568 end_byte=None, progress_callback=None, serialization_data=None, | 635 end_byte=None, progress_callback=None, serialization_data=None, |
569 digesters=None): | 636 digesters=None): |
570 """See CloudApi class for function doc strings.""" | 637 """See CloudApi class for function doc strings.""" |
571 # This implementation will get the object metadata first if we don't pass it | 638 # This implementation will get the object metadata first if we don't pass it |
572 # in via serialization_data. | 639 # in via serialization_data. |
573 if generation: | 640 if generation: |
574 generation = long(generation) | 641 generation = long(generation) |
575 | 642 |
| 643 # 'outer_total_size' is only used for formatting user output, and is |
| 644 # expected to be one higher than the last byte that should be downloaded. |
| 645 # TODO: Change DownloadCallbackConnectionClassFactory and progress callbacks |
| 646 # to more elegantly handle total size for components of files. |
576 outer_total_size = object_size | 647 outer_total_size = object_size |
577 if serialization_data: | 648 if end_byte: |
| 649 outer_total_size = end_byte + 1 |
| 650 elif serialization_data: |
578 outer_total_size = json.loads(serialization_data)['total_size'] | 651 outer_total_size = json.loads(serialization_data)['total_size'] |
579 | 652 |
580 if progress_callback: | 653 if progress_callback: |
581 if outer_total_size is None: | 654 if outer_total_size is None: |
582 raise ArgumentException('Download size is required when callbacks are ' | 655 raise ArgumentException('Download size is required when callbacks are ' |
583 'requested for a download, but no size was ' | 656 'requested for a download, but no size was ' |
584 'provided.') | 657 'provided.') |
585 progress_callback(0, outer_total_size) | 658 progress_callback(start_byte, outer_total_size) |
586 | 659 |
587 bytes_downloaded_container = BytesTransferredContainer() | 660 bytes_downloaded_container = BytesTransferredContainer() |
588 bytes_downloaded_container.bytes_transferred = start_byte | 661 bytes_downloaded_container.bytes_transferred = start_byte |
589 | 662 |
590 callback_class_factory = DownloadCallbackConnectionClassFactory( | 663 callback_class_factory = DownloadCallbackConnectionClassFactory( |
591 bytes_downloaded_container, total_size=outer_total_size, | 664 bytes_downloaded_container, total_size=outer_total_size, |
592 progress_callback=progress_callback, digesters=digesters) | 665 progress_callback=progress_callback, digesters=digesters) |
593 download_http_class = callback_class_factory.GetConnectionClass() | 666 download_http_class = callback_class_factory.GetConnectionClass() |
594 | 667 |
595 download_http = self._GetNewDownloadHttp(download_stream) | 668 # Point our download HTTP at our download stream. |
596 download_http.connections = {'https': download_http_class} | 669 self.download_http.stream = download_stream |
597 authorized_download_http = self.credentials.authorize(download_http) | 670 self.download_http.connections = {'https': download_http_class} |
598 WrapDownloadHttpRequest(authorized_download_http) | |
599 | 671 |
600 if serialization_data: | 672 if serialization_data: |
601 apitools_download = apitools_transfer.Download.FromData( | 673 apitools_download = apitools_transfer.Download.FromData( |
602 download_stream, serialization_data, self.api_client.http, | 674 download_stream, serialization_data, self.api_client.http, |
603 num_retries=self.num_retries) | 675 num_retries=self.num_retries) |
604 else: | 676 else: |
605 apitools_download = apitools_transfer.Download.FromStream( | 677 apitools_download = apitools_transfer.Download.FromStream( |
606 download_stream, auto_transfer=False, total_size=object_size, | 678 download_stream, auto_transfer=False, total_size=object_size, |
607 num_retries=self.num_retries) | 679 num_retries=self.num_retries) |
608 | 680 |
609 apitools_download.bytes_http = authorized_download_http | 681 apitools_download.bytes_http = self.authorized_download_http |
610 apitools_request = apitools_messages.StorageObjectsGetRequest( | 682 apitools_request = apitools_messages.StorageObjectsGetRequest( |
611 bucket=bucket_name, object=object_name, generation=generation) | 683 bucket=bucket_name, object=object_name, generation=generation) |
612 | 684 |
613 try: | 685 try: |
614 if download_strategy == CloudApi.DownloadStrategy.RESUMABLE: | 686 if download_strategy == CloudApi.DownloadStrategy.RESUMABLE: |
615 # Disable retries in apitools. We will handle them explicitly here. | 687 # Disable retries in apitools. We will handle them explicitly here. |
616 apitools_download.retry_func = ( | 688 apitools_download.retry_func = ( |
617 apitools_http_wrapper.RethrowExceptionHandler) | 689 apitools_http_wrapper.RethrowExceptionHandler) |
618 return self._PerformResumableDownload( | 690 return self._PerformResumableDownload( |
619 bucket_name, object_name, download_stream, apitools_request, | 691 bucket_name, object_name, download_stream, apitools_request, |
(...skipping 27 matching lines...) Expand all Loading... |
647 bytes_downloaded_container.bytes_transferred = start_byte | 719 bytes_downloaded_container.bytes_transferred = start_byte |
648 if start_byte > last_progress_byte: | 720 if start_byte > last_progress_byte: |
649 # We've made progress, so allow a fresh set of retries. | 721 # We've made progress, so allow a fresh set of retries. |
650 last_progress_byte = start_byte | 722 last_progress_byte = start_byte |
651 retries = 0 | 723 retries = 0 |
652 retries += 1 | 724 retries += 1 |
653 if retries > self.num_retries: | 725 if retries > self.num_retries: |
654 raise ResumableDownloadException( | 726 raise ResumableDownloadException( |
655 'Transfer failed after %d retries. Final exception: %s' % | 727 'Transfer failed after %d retries. Final exception: %s' % |
656 (self.num_retries, unicode(e).encode(UTF8))) | 728 (self.num_retries, unicode(e).encode(UTF8))) |
657 time.sleep(CalculateWaitForRetry(retries, max_wait=GetMaxRetryDelay())) | 729 time.sleep(CalculateWaitForRetry(retries, max_wait=self.max_retry_wait)) |
658 if self.logger.isEnabledFor(logging.DEBUG): | 730 if self.logger.isEnabledFor(logging.DEBUG): |
659 self.logger.debug( | 731 self.logger.debug( |
660 'Retrying download from byte %s after exception: %s. Trace: %s', | 732 'Retrying download from byte %s after exception: %s. Trace: %s', |
661 start_byte, unicode(e).encode(UTF8), traceback.format_exc()) | 733 start_byte, unicode(e).encode(UTF8), traceback.format_exc()) |
662 apitools_http_wrapper.RebuildHttpConnections( | 734 apitools_http_wrapper.RebuildHttpConnections( |
663 apitools_download.bytes_http) | 735 apitools_download.bytes_http) |
664 | 736 |
665 def _PerformDownload( | 737 def _PerformDownload( |
666 self, bucket_name, object_name, download_stream, apitools_request, | 738 self, bucket_name, object_name, download_stream, apitools_request, |
667 apitools_download, generation=None, start_byte=0, end_byte=None, | 739 apitools_download, generation=None, start_byte=0, end_byte=None, |
(...skipping 19 matching lines...) Expand all Loading... |
687 # the bytes that we have locally and send a range request without | 759 # the bytes that we have locally and send a range request without |
688 # accept-encoding:gzip so that we can download only the (uncompressed) bytes | 760 # accept-encoding:gzip so that we can download only the (uncompressed) bytes |
689 # that we don't yet have. | 761 # that we don't yet have. |
690 | 762 |
691 # Since bytes_http is created in this function, we don't get the | 763 # Since bytes_http is created in this function, we don't get the |
692 # user-agent header from api_client's http automatically. | 764 # user-agent header from api_client's http automatically. |
693 additional_headers = { | 765 additional_headers = { |
694 'accept-encoding': 'gzip', | 766 'accept-encoding': 'gzip', |
695 'user-agent': self.api_client.user_agent | 767 'user-agent': self.api_client.user_agent |
696 } | 768 } |
697 if start_byte or end_byte: | 769 if start_byte or end_byte is not None: |
698 apitools_download.GetRange(additional_headers=additional_headers, | 770 apitools_download.GetRange(additional_headers=additional_headers, |
699 start=start_byte, end=end_byte, | 771 start=start_byte, end=end_byte, |
700 use_chunks=False) | 772 use_chunks=False) |
701 else: | 773 else: |
702 apitools_download.StreamMedia( | 774 apitools_download.StreamMedia( |
703 callback=_NoOpCallback, finish_callback=_NoOpCallback, | 775 callback=_NoOpCallback, finish_callback=_NoOpCallback, |
704 additional_headers=additional_headers, use_chunks=False) | 776 additional_headers=additional_headers, use_chunks=False) |
705 return apitools_download.encoding | 777 return apitools_download.encoding |
706 | 778 |
707 def PatchObjectMetadata(self, bucket_name, object_name, metadata, | 779 def PatchObjectMetadata(self, bucket_name, object_name, metadata, |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
774 bytes_uploaded_container = BytesTransferredContainer() | 846 bytes_uploaded_container = BytesTransferredContainer() |
775 | 847 |
776 if progress_callback and size: | 848 if progress_callback and size: |
777 total_size = size | 849 total_size = size |
778 progress_callback(0, size) | 850 progress_callback(0, size) |
779 | 851 |
780 callback_class_factory = UploadCallbackConnectionClassFactory( | 852 callback_class_factory = UploadCallbackConnectionClassFactory( |
781 bytes_uploaded_container, total_size=total_size, | 853 bytes_uploaded_container, total_size=total_size, |
782 progress_callback=progress_callback) | 854 progress_callback=progress_callback) |
783 | 855 |
784 upload_http = self._GetNewUploadHttp() | |
785 upload_http_class = callback_class_factory.GetConnectionClass() | 856 upload_http_class = callback_class_factory.GetConnectionClass() |
786 upload_http.connections = {'http': upload_http_class, | 857 self.upload_http.connections = {'http': upload_http_class, |
787 'https': upload_http_class} | 858 'https': upload_http_class} |
788 | 859 |
789 authorized_upload_http = self.credentials.authorize(upload_http) | |
790 WrapUploadHttpRequest(authorized_upload_http) | |
791 # Since bytes_http is created in this function, we don't get the | 860 # Since bytes_http is created in this function, we don't get the |
792 # user-agent header from api_client's http automatically. | 861 # user-agent header from api_client's http automatically. |
793 additional_headers = { | 862 additional_headers = { |
794 'user-agent': self.api_client.user_agent | 863 'user-agent': self.api_client.user_agent |
795 } | 864 } |
796 | 865 |
797 try: | 866 try: |
798 content_type = None | 867 content_type = None |
799 apitools_request = None | 868 apitools_request = None |
800 global_params = None | 869 global_params = None |
(...skipping 14 matching lines...) Expand all Loading... |
815 global_params = apitools_messages.StandardQueryParameters() | 884 global_params = apitools_messages.StandardQueryParameters() |
816 if fields: | 885 if fields: |
817 global_params.fields = ','.join(set(fields)) | 886 global_params.fields = ','.join(set(fields)) |
818 | 887 |
819 if apitools_strategy == apitools_transfer.SIMPLE_UPLOAD: | 888 if apitools_strategy == apitools_transfer.SIMPLE_UPLOAD: |
820 # One-shot upload. | 889 # One-shot upload. |
821 apitools_upload = apitools_transfer.Upload( | 890 apitools_upload = apitools_transfer.Upload( |
822 upload_stream, content_type, total_size=size, auto_transfer=True, | 891 upload_stream, content_type, total_size=size, auto_transfer=True, |
823 num_retries=self.num_retries) | 892 num_retries=self.num_retries) |
824 apitools_upload.strategy = apitools_strategy | 893 apitools_upload.strategy = apitools_strategy |
825 apitools_upload.bytes_http = authorized_upload_http | 894 apitools_upload.bytes_http = self.authorized_upload_http |
826 | 895 |
827 return self.api_client.objects.Insert( | 896 return self.api_client.objects.Insert( |
828 apitools_request, | 897 apitools_request, |
829 upload=apitools_upload, | 898 upload=apitools_upload, |
830 global_params=global_params) | 899 global_params=global_params) |
831 else: # Resumable upload. | 900 else: # Resumable upload. |
832 return self._PerformResumableUpload( | 901 return self._PerformResumableUpload( |
833 upload_stream, authorized_upload_http, content_type, size, | 902 upload_stream, self.authorized_upload_http, content_type, size, |
834 serialization_data, apitools_strategy, apitools_request, | 903 serialization_data, apitools_strategy, apitools_request, |
835 global_params, bytes_uploaded_container, tracker_callback, | 904 global_params, bytes_uploaded_container, tracker_callback, |
836 additional_headers, progress_callback) | 905 additional_headers, progress_callback) |
837 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: | 906 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 907 not_found_exception = CreateNotFoundExceptionForObjectWrite( |
| 908 self.provider, object_metadata.bucket) |
838 self._TranslateExceptionAndRaise(e, bucket_name=object_metadata.bucket, | 909 self._TranslateExceptionAndRaise(e, bucket_name=object_metadata.bucket, |
839 object_name=object_metadata.name) | 910 object_name=object_metadata.name, |
| 911 not_found_exception=not_found_exception) |
840 | 912 |
841 def _PerformResumableUpload( | 913 def _PerformResumableUpload( |
842 self, upload_stream, authorized_upload_http, content_type, size, | 914 self, upload_stream, authorized_upload_http, content_type, size, |
843 serialization_data, apitools_strategy, apitools_request, global_params, | 915 serialization_data, apitools_strategy, apitools_request, global_params, |
844 bytes_uploaded_container, tracker_callback, addl_headers, | 916 bytes_uploaded_container, tracker_callback, addl_headers, |
845 progress_callback): | 917 progress_callback): |
846 try: | 918 try: |
847 if serialization_data: | 919 if serialization_data: |
848 # Resuming an existing upload. | 920 # Resuming an existing upload. |
849 apitools_upload = apitools_transfer.Upload.FromData( | 921 apitools_upload = apitools_transfer.Upload.FromData( |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
922 break | 994 break |
923 except HTTP_TRANSFER_EXCEPTIONS, e2: | 995 except HTTP_TRANSFER_EXCEPTIONS, e2: |
924 apitools_http_wrapper.RebuildHttpConnections( | 996 apitools_http_wrapper.RebuildHttpConnections( |
925 apitools_upload.bytes_http) | 997 apitools_upload.bytes_http) |
926 retries += 1 | 998 retries += 1 |
927 if retries > self.num_retries: | 999 if retries > self.num_retries: |
928 raise ResumableUploadException( | 1000 raise ResumableUploadException( |
929 'Transfer failed after %d retries. Final exception: %s' % | 1001 'Transfer failed after %d retries. Final exception: %s' % |
930 (self.num_retries, e2)) | 1002 (self.num_retries, e2)) |
931 time.sleep( | 1003 time.sleep( |
932 CalculateWaitForRetry(retries, max_wait=GetMaxRetryDelay())) | 1004 CalculateWaitForRetry(retries, max_wait=self.max_retry_wait)) |
933 if start_byte > last_progress_byte: | 1005 if start_byte > last_progress_byte: |
934 # We've made progress, so allow a fresh set of retries. | 1006 # We've made progress, so allow a fresh set of retries. |
935 last_progress_byte = start_byte | 1007 last_progress_byte = start_byte |
936 retries = 0 | 1008 retries = 0 |
937 else: | 1009 else: |
938 retries += 1 | 1010 retries += 1 |
939 if retries > self.num_retries: | 1011 if retries > self.num_retries: |
940 raise ResumableUploadException( | 1012 raise ResumableUploadException( |
941 'Transfer failed after %d retries. Final exception: %s' % | 1013 'Transfer failed after %d retries. Final exception: %s' % |
942 (self.num_retries, unicode(e).encode(UTF8))) | 1014 (self.num_retries, unicode(e).encode(UTF8))) |
943 time.sleep( | 1015 time.sleep( |
944 CalculateWaitForRetry(retries, max_wait=GetMaxRetryDelay())) | 1016 CalculateWaitForRetry(retries, max_wait=self.max_retry_wait)) |
945 if self.logger.isEnabledFor(logging.DEBUG): | 1017 if self.logger.isEnabledFor(logging.DEBUG): |
946 self.logger.debug( | 1018 self.logger.debug( |
947 'Retrying upload from byte %s after exception: %s. Trace: %s', | 1019 'Retrying upload from byte %s after exception: %s. Trace: %s', |
948 start_byte, unicode(e).encode(UTF8), traceback.format_exc()) | 1020 start_byte, unicode(e).encode(UTF8), traceback.format_exc()) |
949 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: | 1021 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
950 resumable_ex = self._TranslateApitoolsResumableUploadException(e) | 1022 resumable_ex = self._TranslateApitoolsResumableUploadException(e) |
951 if resumable_ex: | 1023 if resumable_ex: |
952 raise resumable_ex | 1024 raise resumable_ex |
953 else: | 1025 else: |
954 raise | 1026 raise |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1062 elif not resume_rewrite_token: | 1134 elif not resume_rewrite_token: |
1063 # Save the token and make a tracker file if they don't already exist. | 1135 # Save the token and make a tracker file if they don't already exist. |
1064 resume_rewrite_token = rewrite_response.rewriteToken | 1136 resume_rewrite_token = rewrite_response.rewriteToken |
1065 WriteRewriteTrackerFile(tracker_file_name, rewrite_params_hash, | 1137 WriteRewriteTrackerFile(tracker_file_name, rewrite_params_hash, |
1066 rewrite_response.rewriteToken) | 1138 rewrite_response.rewriteToken) |
1067 last_bytes_written = bytes_written | 1139 last_bytes_written = bytes_written |
1068 | 1140 |
1069 DeleteTrackerFile(tracker_file_name) | 1141 DeleteTrackerFile(tracker_file_name) |
1070 return rewrite_response.resource | 1142 return rewrite_response.resource |
1071 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: | 1143 except TRANSLATABLE_APITOOLS_EXCEPTIONS, e: |
| 1144 not_found_exception = CreateNotFoundExceptionForObjectWrite( |
| 1145 self.provider, dst_obj_metadata.bucket, src_provider=self.provider, |
| 1146 src_bucket_name=src_obj_metadata.bucket, |
| 1147 src_object_name=src_obj_metadata.name, src_generation=src_generation) |
1072 self._TranslateExceptionAndRaise(e, bucket_name=dst_obj_metadata.bucket, | 1148 self._TranslateExceptionAndRaise(e, bucket_name=dst_obj_metadata.bucket, |
1073 object_name=dst_obj_metadata.name) | 1149 object_name=dst_obj_metadata.name, |
| 1150 not_found_exception=not_found_exception) |
1074 | 1151 |
1075 def DeleteObject(self, bucket_name, object_name, preconditions=None, | 1152 def DeleteObject(self, bucket_name, object_name, preconditions=None, |
1076 generation=None, provider=None): | 1153 generation=None, provider=None): |
1077 """See CloudApi class for function doc strings.""" | 1154 """See CloudApi class for function doc strings.""" |
1078 if not preconditions: | 1155 if not preconditions: |
1079 preconditions = Preconditions() | 1156 preconditions = Preconditions() |
1080 | 1157 |
1081 if generation: | 1158 if generation: |
1082 generation = long(generation) | 1159 generation = long(generation) |
1083 | 1160 |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1203 'bucket-owner-full-control': 'bucketOwnerFullControl', | 1280 'bucket-owner-full-control': 'bucketOwnerFullControl', |
1204 'private': 'private', | 1281 'private': 'private', |
1205 'project-private': 'projectPrivate', | 1282 'project-private': 'projectPrivate', |
1206 'public-read': 'publicRead' | 1283 'public-read': 'publicRead' |
1207 } | 1284 } |
1208 if canned_acl_string in translation_dict: | 1285 if canned_acl_string in translation_dict: |
1209 return translation_dict[canned_acl_string] | 1286 return translation_dict[canned_acl_string] |
1210 raise ArgumentException('Invalid canned ACL %s' % canned_acl_string) | 1287 raise ArgumentException('Invalid canned ACL %s' % canned_acl_string) |
1211 | 1288 |
1212 def _TranslateExceptionAndRaise(self, e, bucket_name=None, object_name=None, | 1289 def _TranslateExceptionAndRaise(self, e, bucket_name=None, object_name=None, |
1213 generation=None): | 1290 generation=None, not_found_exception=None): |
1214 """Translates an HTTP exception and raises the translated or original value. | 1291 """Translates an HTTP exception and raises the translated or original value. |
1215 | 1292 |
1216 Args: | 1293 Args: |
1217 e: Any Exception. | 1294 e: Any Exception. |
1218 bucket_name: Optional bucket name in request that caused the exception. | 1295 bucket_name: Optional bucket name in request that caused the exception. |
1219 object_name: Optional object name in request that caused the exception. | 1296 object_name: Optional object name in request that caused the exception. |
1220 generation: Optional generation in request that caused the exception. | 1297 generation: Optional generation in request that caused the exception. |
| 1298 not_found_exception: Optional exception to raise in the not-found case. |
1221 | 1299 |
1222 Raises: | 1300 Raises: |
1223 Translated CloudApi exception, or the original exception if it was not | 1301 Translated CloudApi exception, or the original exception if it was not |
1224 translatable. | 1302 translatable. |
1225 """ | 1303 """ |
1226 translated_exception = self._TranslateApitoolsException( | 1304 translated_exception = self._TranslateApitoolsException( |
1227 e, bucket_name=bucket_name, object_name=object_name, | 1305 e, bucket_name=bucket_name, object_name=object_name, |
1228 generation=generation) | 1306 generation=generation, not_found_exception=not_found_exception) |
1229 if translated_exception: | 1307 if translated_exception: |
1230 raise translated_exception | 1308 raise translated_exception |
1231 else: | 1309 else: |
1232 raise | 1310 raise |
1233 | 1311 |
1234 def _GetMessageFromHttpError(self, http_error): | 1312 def _GetMessageFromHttpError(self, http_error): |
1235 if isinstance(http_error, apitools_exceptions.HttpError): | 1313 if isinstance(http_error, apitools_exceptions.HttpError): |
1236 if getattr(http_error, 'content', None): | 1314 if getattr(http_error, 'content', None): |
1237 try: | 1315 try: |
1238 json_obj = json.loads(http_error.content) | 1316 json_obj = json.loads(http_error.content) |
1239 if 'error' in json_obj and 'message' in json_obj['error']: | 1317 if 'error' in json_obj and 'message' in json_obj['error']: |
1240 return json_obj['error']['message'] | 1318 return json_obj['error']['message'] |
1241 except Exception: # pylint: disable=broad-except | 1319 except Exception: # pylint: disable=broad-except |
1242 # If we couldn't decode anything, just leave the message as None. | 1320 # If we couldn't decode anything, just leave the message as None. |
1243 pass | 1321 pass |
1244 | 1322 |
1245 def _TranslateApitoolsResumableUploadException( | 1323 def _TranslateApitoolsResumableUploadException(self, e): |
1246 self, e, bucket_name=None, object_name=None, generation=None): | |
1247 if isinstance(e, apitools_exceptions.HttpError): | 1324 if isinstance(e, apitools_exceptions.HttpError): |
1248 message = self._GetMessageFromHttpError(e) | 1325 message = self._GetMessageFromHttpError(e) |
1249 if (e.status_code == 503 and | 1326 if (e.status_code == 503 and |
1250 self.http.disable_ssl_certificate_validation): | 1327 self.http.disable_ssl_certificate_validation): |
1251 return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE, | 1328 return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE, |
1252 status=e.status_code) | 1329 status=e.status_code) |
1253 elif e.status_code >= 500: | 1330 elif e.status_code >= 500: |
1254 return ResumableUploadException( | 1331 return ResumableUploadException( |
1255 message or 'Server Error', status=e.status_code) | 1332 message or 'Server Error', status=e.status_code) |
1256 elif e.status_code == 429: | 1333 elif e.status_code == 429: |
(...skipping 10 matching lines...) Expand all Loading... |
1267 message or 'Bad Request', status=e.status_code) | 1344 message or 'Bad Request', status=e.status_code) |
1268 if isinstance(e, apitools_exceptions.StreamExhausted): | 1345 if isinstance(e, apitools_exceptions.StreamExhausted): |
1269 return ResumableUploadAbortException(e.message) | 1346 return ResumableUploadAbortException(e.message) |
1270 if (isinstance(e, apitools_exceptions.TransferError) and | 1347 if (isinstance(e, apitools_exceptions.TransferError) and |
1271 ('Aborting transfer' in e.message or | 1348 ('Aborting transfer' in e.message or |
1272 'Not enough bytes in stream' in e.message or | 1349 'Not enough bytes in stream' in e.message or |
1273 'additional bytes left in stream' in e.message)): | 1350 'additional bytes left in stream' in e.message)): |
1274 return ResumableUploadAbortException(e.message) | 1351 return ResumableUploadAbortException(e.message) |
1275 | 1352 |
1276 def _TranslateApitoolsException(self, e, bucket_name=None, object_name=None, | 1353 def _TranslateApitoolsException(self, e, bucket_name=None, object_name=None, |
1277 generation=None): | 1354 generation=None, not_found_exception=None): |
1278 """Translates apitools exceptions into their gsutil Cloud Api equivalents. | 1355 """Translates apitools exceptions into their gsutil Cloud Api equivalents. |
1279 | 1356 |
1280 Args: | 1357 Args: |
1281 e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS. | 1358 e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS. |
1282 bucket_name: Optional bucket name in request that caused the exception. | 1359 bucket_name: Optional bucket name in request that caused the exception. |
1283 object_name: Optional object name in request that caused the exception. | 1360 object_name: Optional object name in request that caused the exception. |
1284 generation: Optional generation in request that caused the exception. | 1361 generation: Optional generation in request that caused the exception. |
| 1362 not_found_exception: Optional exception to raise in the not-found case. |
1285 | 1363 |
1286 Returns: | 1364 Returns: |
1287 CloudStorageApiServiceException for translatable exceptions, None | 1365 CloudStorageApiServiceException for translatable exceptions, None |
1288 otherwise. | 1366 otherwise. |
1289 """ | 1367 """ |
1290 if isinstance(e, apitools_exceptions.HttpError): | 1368 if isinstance(e, apitools_exceptions.HttpError): |
1291 message = self._GetMessageFromHttpError(e) | 1369 message = self._GetMessageFromHttpError(e) |
1292 if e.status_code == 400: | 1370 if e.status_code == 400: |
1293 # It is possible that the Project ID is incorrect. Unfortunately the | 1371 # It is possible that the Project ID is incorrect. Unfortunately the |
1294 # JSON API does not give us much information about what part of the | 1372 # JSON API does not give us much information about what part of the |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1326 return AccessDeniedException( | 1404 return AccessDeniedException( |
1327 'Access Not Configured. Please go to the Google Developers ' | 1405 'Access Not Configured. Please go to the Google Developers ' |
1328 'Console (https://cloud.google.com/console#/project) for your ' | 1406 'Console (https://cloud.google.com/console#/project) for your ' |
1329 'project, select APIs and Auth and enable the ' | 1407 'project, select APIs and Auth and enable the ' |
1330 'Google Cloud Storage JSON API.', | 1408 'Google Cloud Storage JSON API.', |
1331 status=e.status_code) | 1409 status=e.status_code) |
1332 else: | 1410 else: |
1333 return AccessDeniedException(message or e.message, | 1411 return AccessDeniedException(message or e.message, |
1334 status=e.status_code) | 1412 status=e.status_code) |
1335 elif e.status_code == 404: | 1413 elif e.status_code == 404: |
1336 if bucket_name: | 1414 if not_found_exception: |
| 1415 # The exception is pre-constructed prior to translation; the HTTP |
| 1416 # status code isn't available at that time. |
| 1417 setattr(not_found_exception, 'status', e.status_code) |
| 1418 return not_found_exception |
| 1419 elif bucket_name: |
1337 if object_name: | 1420 if object_name: |
1338 return CreateObjectNotFoundException(e.status_code, self.provider, | 1421 return CreateObjectNotFoundException(e.status_code, self.provider, |
1339 bucket_name, object_name, | 1422 bucket_name, object_name, |
1340 generation=generation) | 1423 generation=generation) |
1341 return CreateBucketNotFoundException(e.status_code, self.provider, | 1424 return CreateBucketNotFoundException(e.status_code, self.provider, |
1342 bucket_name) | 1425 bucket_name) |
1343 return NotFoundException(e.message, status=e.status_code) | 1426 return NotFoundException(e.message, status=e.status_code) |
| 1427 |
1344 elif e.status_code == 409 and bucket_name: | 1428 elif e.status_code == 409 and bucket_name: |
1345 if 'The bucket you tried to delete was not empty.' in str(e): | 1429 if 'The bucket you tried to delete was not empty.' in str(e): |
1346 return NotEmptyException('BucketNotEmpty (%s)' % bucket_name, | 1430 return NotEmptyException('BucketNotEmpty (%s)' % bucket_name, |
1347 status=e.status_code) | 1431 status=e.status_code) |
1348 return ServiceException( | 1432 return ServiceException( |
1349 'Bucket %s already exists.' % bucket_name, status=e.status_code) | 1433 'Bucket %s already exists.' % bucket_name, status=e.status_code) |
1350 elif e.status_code == 412: | 1434 elif e.status_code == 412: |
1351 return PreconditionException(message, status=e.status_code) | 1435 return PreconditionException(message, status=e.status_code) |
1352 elif (e.status_code == 503 and | 1436 elif (e.status_code == 503 and |
1353 not self.http.disable_ssl_certificate_validation): | 1437 not self.http.disable_ssl_certificate_validation): |
1354 return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE, | 1438 return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE, |
1355 status=e.status_code) | 1439 status=e.status_code) |
1356 return ServiceException(message, status=e.status_code) | 1440 return ServiceException(message, status=e.status_code) |
1357 elif isinstance(e, apitools_exceptions.TransferInvalidError): | 1441 elif isinstance(e, apitools_exceptions.TransferInvalidError): |
1358 return ServiceException('Transfer invalid (possible encoding error: %s)' | 1442 return ServiceException('Transfer invalid (possible encoding error: %s)' |
1359 % str(e)) | 1443 % str(e)) |
OLD | NEW |