OLD | NEW |
1 # -*- coding: utf-8 -*- | 1 # -*- coding: utf-8 -*- |
2 # Copyright 2013 Google Inc. All Rights Reserved. | 2 # Copyright 2013 Google Inc. All Rights Reserved. |
3 # | 3 # |
4 # Licensed under the Apache License, Version 2.0 (the "License"); | 4 # Licensed under the Apache License, Version 2.0 (the "License"); |
5 # you may not use this file except in compliance with the License. | 5 # you may not use this file except in compliance with the License. |
6 # You may obtain a copy of the License at | 6 # You may obtain a copy of the License at |
7 # | 7 # |
8 # http://www.apache.org/licenses/LICENSE-2.0 | 8 # http://www.apache.org/licenses/LICENSE-2.0 |
9 # | 9 # |
10 # Unless required by applicable law or agreed to in writing, software | 10 # Unless required by applicable law or agreed to in writing, software |
(...skipping 15 matching lines...) Expand all Loading... |
26 import pkgutil | 26 import pkgutil |
27 import random | 27 import random |
28 import re | 28 import re |
29 import string | 29 import string |
30 import sys | 30 import sys |
31 | 31 |
32 from apitools.base.py import exceptions as apitools_exceptions | 32 from apitools.base.py import exceptions as apitools_exceptions |
33 import boto | 33 import boto |
34 from boto import storage_uri | 34 from boto import storage_uri |
35 from boto.exception import ResumableTransferDisposition | 35 from boto.exception import ResumableTransferDisposition |
36 from boto.exception import ResumableUploadException | |
37 from boto.exception import StorageResponseError | 36 from boto.exception import StorageResponseError |
38 from boto.storage_uri import BucketStorageUri | 37 from boto.storage_uri import BucketStorageUri |
| 38 import crcmod |
39 | 39 |
40 from gslib.cloud_api import ResumableDownloadException | 40 from gslib.cloud_api import ResumableDownloadException |
41 from gslib.cloud_api import ResumableUploadException | 41 from gslib.cloud_api import ResumableUploadException |
42 from gslib.cloud_api import ResumableUploadStartOverException | 42 from gslib.cloud_api import ResumableUploadStartOverException |
| 43 from gslib.commands.config import DEFAULT_SLICED_OBJECT_DOWNLOAD_THRESHOLD |
43 from gslib.copy_helper import GetTrackerFilePath | 44 from gslib.copy_helper import GetTrackerFilePath |
44 from gslib.copy_helper import TrackerFileType | 45 from gslib.copy_helper import TrackerFileType |
45 from gslib.cs_api_map import ApiSelector | 46 from gslib.cs_api_map import ApiSelector |
46 from gslib.gcs_json_api import GcsJsonApi | 47 from gslib.gcs_json_api import GcsJsonApi |
47 from gslib.hashing_helper import CalculateMd5FromContents | 48 from gslib.hashing_helper import CalculateMd5FromContents |
48 from gslib.storage_url import StorageUrlFromString | 49 from gslib.storage_url import StorageUrlFromString |
49 import gslib.tests.testcase as testcase | 50 import gslib.tests.testcase as testcase |
50 from gslib.tests.testcase.base import NotParallelizable | 51 from gslib.tests.testcase.base import NotParallelizable |
51 from gslib.tests.testcase.integration_testcase import SkipForS3 | 52 from gslib.tests.testcase.integration_testcase import SkipForS3 |
52 from gslib.tests.util import GenerationFromURI as urigen | 53 from gslib.tests.util import GenerationFromURI as urigen |
53 from gslib.tests.util import HAS_S3_CREDS | 54 from gslib.tests.util import HAS_S3_CREDS |
54 from gslib.tests.util import ObjectToURI as suri | 55 from gslib.tests.util import ObjectToURI as suri |
55 from gslib.tests.util import PerformsFileToObjectUpload | 56 from gslib.tests.util import SequentialAndParallelTransfer |
56 from gslib.tests.util import SetBotoConfigForTest | 57 from gslib.tests.util import SetBotoConfigForTest |
57 from gslib.tests.util import unittest | 58 from gslib.tests.util import unittest |
58 from gslib.third_party.storage_apitools import storage_v1_messages as apitools_m
essages | 59 from gslib.third_party.storage_apitools import storage_v1_messages as apitools_m
essages |
59 from gslib.tracker_file import DeleteTrackerFile | 60 from gslib.tracker_file import DeleteTrackerFile |
60 from gslib.tracker_file import GetRewriteTrackerFilePath | 61 from gslib.tracker_file import GetRewriteTrackerFilePath |
| 62 from gslib.tracker_file import GetSlicedDownloadTrackerFilePaths |
61 from gslib.util import EIGHT_MIB | 63 from gslib.util import EIGHT_MIB |
| 64 from gslib.util import HumanReadableToBytes |
62 from gslib.util import IS_WINDOWS | 65 from gslib.util import IS_WINDOWS |
63 from gslib.util import MakeHumanReadable | 66 from gslib.util import MakeHumanReadable |
64 from gslib.util import ONE_KIB | 67 from gslib.util import ONE_KIB |
65 from gslib.util import ONE_MIB | 68 from gslib.util import ONE_MIB |
66 from gslib.util import Retry | 69 from gslib.util import Retry |
67 from gslib.util import START_CALLBACK_PER_BYTES | 70 from gslib.util import START_CALLBACK_PER_BYTES |
| 71 from gslib.util import UsingCrcmodExtension |
68 from gslib.util import UTF8 | 72 from gslib.util import UTF8 |
69 | 73 |
70 | 74 |
71 # Custom test callbacks must be pickleable, and therefore at global scope. | 75 # Custom test callbacks must be pickleable, and therefore at global scope. |
72 class _HaltingCopyCallbackHandler(object): | 76 class _HaltingCopyCallbackHandler(object): |
73 """Test callback handler for intentionally stopping a resumable transfer.""" | 77 """Test callback handler for intentionally stopping a resumable transfer.""" |
74 | 78 |
75 def __init__(self, is_upload, halt_at_byte): | 79 def __init__(self, is_upload, halt_at_byte): |
76 self._is_upload = is_upload | 80 self._is_upload = is_upload |
77 self._halt_at_byte = halt_at_byte | 81 self._halt_at_byte = halt_at_byte |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
132 '%s/%s transferred.\r\n' % ( | 136 '%s/%s transferred.\r\n' % ( |
133 self._startover_at_byte, | 137 self._startover_at_byte, |
134 MakeHumanReadable(total_bytes_transferred), | 138 MakeHumanReadable(total_bytes_transferred), |
135 MakeHumanReadable(total_size))) | 139 MakeHumanReadable(total_size))) |
136 self.started_over_once = True | 140 self.started_over_once = True |
137 raise boto.exception.ResumableUploadException( | 141 raise boto.exception.ResumableUploadException( |
138 'Forcing upload start over', | 142 'Forcing upload start over', |
139 ResumableTransferDisposition.START_OVER) | 143 ResumableTransferDisposition.START_OVER) |
140 | 144 |
141 | 145 |
| 146 class _HaltOneComponentCopyCallbackHandler(object): |
| 147 """Test callback handler for stopping part of a sliced download.""" |
| 148 |
| 149 def __init__(self, halt_at_byte): |
| 150 self._last_progress_byte = None |
| 151 self._halt_at_byte = halt_at_byte |
| 152 |
| 153 # pylint: disable=invalid-name |
| 154 # pylint: disable=unused-argument |
| 155 def call(self, current_progress_byte, total_size_unused): |
| 156 """Forcibly exits if the passed the halting point since the last call.""" |
| 157 if (self._last_progress_byte is not None and |
| 158 self._last_progress_byte < self._halt_at_byte < current_progress_byte): |
| 159 sys.stderr.write('Halting transfer.\r\n') |
| 160 raise ResumableDownloadException('Artifically halting download.') |
| 161 self._last_progress_byte = current_progress_byte |
| 162 |
| 163 |
142 class _DeleteBucketThenStartOverCopyCallbackHandler(object): | 164 class _DeleteBucketThenStartOverCopyCallbackHandler(object): |
143 """Test callback handler that deletes bucket then raises start-over.""" | 165 """Test callback handler that deletes bucket then raises start-over.""" |
144 | 166 |
145 def __init__(self, startover_at_byte, bucket_uri): | 167 def __init__(self, startover_at_byte, bucket_uri): |
146 self._startover_at_byte = startover_at_byte | 168 self._startover_at_byte = startover_at_byte |
147 self._bucket_uri = bucket_uri | 169 self._bucket_uri = bucket_uri |
148 self.started_over_once = False | 170 self.started_over_once = False |
149 | 171 |
150 # pylint: disable=invalid-name | 172 # pylint: disable=invalid-name |
151 def call(self, total_bytes_transferred, total_size): | 173 def call(self, total_bytes_transferred, total_size): |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
232 """Integration tests for cp command.""" | 254 """Integration tests for cp command.""" |
233 | 255 |
234 # For tests that artificially halt, we need to ensure at least one callback | 256 # For tests that artificially halt, we need to ensure at least one callback |
235 # occurs. | 257 # occurs. |
236 halt_size = START_CALLBACK_PER_BYTES * 2 | 258 halt_size = START_CALLBACK_PER_BYTES * 2 |
237 | 259 |
238 def _get_test_file(self, name): | 260 def _get_test_file(self, name): |
239 contents = pkgutil.get_data('gslib', 'tests/test_data/%s' % name) | 261 contents = pkgutil.get_data('gslib', 'tests/test_data/%s' % name) |
240 return self.CreateTempFile(file_name=name, contents=contents) | 262 return self.CreateTempFile(file_name=name, contents=contents) |
241 | 263 |
242 @PerformsFileToObjectUpload | 264 @SequentialAndParallelTransfer |
243 def test_noclobber(self): | 265 def test_noclobber(self): |
244 key_uri = self.CreateObject(contents='foo') | 266 key_uri = self.CreateObject(contents='foo') |
245 fpath = self.CreateTempFile(contents='bar') | 267 fpath = self.CreateTempFile(contents='bar') |
246 stderr = self.RunGsUtil(['cp', '-n', fpath, suri(key_uri)], | 268 stderr = self.RunGsUtil(['cp', '-n', fpath, suri(key_uri)], |
247 return_stderr=True) | 269 return_stderr=True) |
248 self.assertIn('Skipping existing item: %s' % suri(key_uri), stderr) | 270 self.assertIn('Skipping existing item: %s' % suri(key_uri), stderr) |
249 self.assertEqual(key_uri.get_contents_as_string(), 'foo') | 271 self.assertEqual(key_uri.get_contents_as_string(), 'foo') |
250 stderr = self.RunGsUtil(['cp', '-n', suri(key_uri), fpath], | 272 stderr = self.RunGsUtil(['cp', '-n', suri(key_uri), fpath], |
251 return_stderr=True) | 273 return_stderr=True) |
252 with open(fpath, 'r') as f: | 274 with open(fpath, 'r') as f: |
253 self.assertIn('Skipping existing item: %s' % suri(f), stderr) | 275 self.assertIn('Skipping existing item: %s' % suri(f), stderr) |
254 self.assertEqual(f.read(), 'bar') | 276 self.assertEqual(f.read(), 'bar') |
255 | 277 |
256 def test_dest_bucket_not_exist(self): | 278 def test_dest_bucket_not_exist(self): |
257 fpath = self.CreateTempFile(contents='foo') | 279 fpath = self.CreateTempFile(contents='foo') |
258 invalid_bucket_uri = ( | 280 invalid_bucket_uri = ( |
259 '%s://%s' % (self.default_provider, self.nonexistent_bucket_name)) | 281 '%s://%s' % (self.default_provider, self.nonexistent_bucket_name)) |
260 stderr = self.RunGsUtil(['cp', fpath, invalid_bucket_uri], | 282 stderr = self.RunGsUtil(['cp', fpath, invalid_bucket_uri], |
261 expected_status=1, return_stderr=True) | 283 expected_status=1, return_stderr=True) |
262 self.assertIn('does not exist.', stderr) | 284 self.assertIn('does not exist', stderr) |
263 | 285 |
264 def test_copy_in_cloud_noclobber(self): | 286 def test_copy_in_cloud_noclobber(self): |
265 bucket1_uri = self.CreateBucket() | 287 bucket1_uri = self.CreateBucket() |
266 bucket2_uri = self.CreateBucket() | 288 bucket2_uri = self.CreateBucket() |
267 key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo') | 289 key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo') |
268 stderr = self.RunGsUtil(['cp', suri(key_uri), suri(bucket2_uri)], | 290 stderr = self.RunGsUtil(['cp', suri(key_uri), suri(bucket2_uri)], |
269 return_stderr=True) | 291 return_stderr=True) |
270 # Rewrite API may output an additional 'Copying' progress notification. | 292 # Rewrite API may output an additional 'Copying' progress notification. |
271 self.assertGreaterEqual(stderr.count('Copying'), 1) | 293 self.assertGreaterEqual(stderr.count('Copying'), 1) |
272 self.assertLessEqual(stderr.count('Copying'), 2) | 294 self.assertLessEqual(stderr.count('Copying'), 2) |
273 stderr = self.RunGsUtil(['cp', '-n', suri(key_uri), suri(bucket2_uri)], | 295 stderr = self.RunGsUtil(['cp', '-n', suri(key_uri), suri(bucket2_uri)], |
274 return_stderr=True) | 296 return_stderr=True) |
275 self.assertIn('Skipping existing item: %s' % | 297 self.assertIn('Skipping existing item: %s' % |
276 suri(bucket2_uri, key_uri.object_name), stderr) | 298 suri(bucket2_uri, key_uri.object_name), stderr) |
277 | 299 |
278 @PerformsFileToObjectUpload | 300 @SequentialAndParallelTransfer |
279 def test_streaming(self): | 301 def test_streaming(self): |
280 bucket_uri = self.CreateBucket() | 302 bucket_uri = self.CreateBucket() |
281 stderr = self.RunGsUtil(['cp', '-', '%s' % suri(bucket_uri, 'foo')], | 303 stderr = self.RunGsUtil(['cp', '-', '%s' % suri(bucket_uri, 'foo')], |
282 stdin='bar', return_stderr=True) | 304 stdin='bar', return_stderr=True) |
283 self.assertIn('Copying from <STDIN>', stderr) | 305 self.assertIn('Copying from <STDIN>', stderr) |
284 key_uri = bucket_uri.clone_replace_name('foo') | 306 key_uri = bucket_uri.clone_replace_name('foo') |
285 self.assertEqual(key_uri.get_contents_as_string(), 'bar') | 307 self.assertEqual(key_uri.get_contents_as_string(), 'bar') |
286 | 308 |
287 def test_streaming_multiple_arguments(self): | 309 def test_streaming_multiple_arguments(self): |
288 bucket_uri = self.CreateBucket() | 310 bucket_uri = self.CreateBucket() |
289 stderr = self.RunGsUtil(['cp', '-', '-', suri(bucket_uri)], | 311 stderr = self.RunGsUtil(['cp', '-', '-', suri(bucket_uri)], |
290 stdin='bar', return_stderr=True, expected_status=1) | 312 stdin='bar', return_stderr=True, expected_status=1) |
291 self.assertIn('Multiple URL strings are not supported with streaming', | 313 self.assertIn('Multiple URL strings are not supported with streaming', |
292 stderr) | 314 stderr) |
293 | 315 |
294 # TODO: Implement a way to test both with and without using magic file. | 316 # TODO: Implement a way to test both with and without using magic file. |
295 | 317 |
296 @PerformsFileToObjectUpload | 318 @SequentialAndParallelTransfer |
297 def test_detect_content_type(self): | 319 def test_detect_content_type(self): |
298 """Tests local detection of content type.""" | 320 """Tests local detection of content type.""" |
299 bucket_uri = self.CreateBucket() | 321 bucket_uri = self.CreateBucket() |
300 dsturi = suri(bucket_uri, 'foo') | 322 dsturi = suri(bucket_uri, 'foo') |
301 | 323 |
302 self.RunGsUtil(['cp', self._get_test_file('test.mp3'), dsturi]) | 324 self.RunGsUtil(['cp', self._get_test_file('test.mp3'), dsturi]) |
303 | 325 |
304 # Use @Retry as hedge against bucket listing eventual consistency. | 326 # Use @Retry as hedge against bucket listing eventual consistency. |
305 @Retry(AssertionError, tries=3, timeout_secs=1) | 327 @Retry(AssertionError, tries=3, timeout_secs=1) |
306 def _Check1(): | 328 def _Check1(): |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
368 self._get_test_file('test.gif'), dsturi]) | 390 self._get_test_file('test.gif'), dsturi]) |
369 | 391 |
370 # Use @Retry as hedge against bucket listing eventual consistency. | 392 # Use @Retry as hedge against bucket listing eventual consistency. |
371 @Retry(AssertionError, tries=3, timeout_secs=1) | 393 @Retry(AssertionError, tries=3, timeout_secs=1) |
372 def _Check2(): | 394 def _Check2(): |
373 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) | 395 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) |
374 self.assertRegexpMatches(stdout, r'Content-Type:\s+text/plain') | 396 self.assertRegexpMatches(stdout, r'Content-Type:\s+text/plain') |
375 _Check2() | 397 _Check2() |
376 | 398 |
377 @unittest.skipIf(IS_WINDOWS, 'magicfile is not available on Windows.') | 399 @unittest.skipIf(IS_WINDOWS, 'magicfile is not available on Windows.') |
378 @PerformsFileToObjectUpload | 400 @SequentialAndParallelTransfer |
379 def test_magicfile_override(self): | 401 def test_magicfile_override(self): |
380 """Tests content type override with magicfile value.""" | 402 """Tests content type override with magicfile value.""" |
381 bucket_uri = self.CreateBucket() | 403 bucket_uri = self.CreateBucket() |
382 dsturi = suri(bucket_uri, 'foo') | 404 dsturi = suri(bucket_uri, 'foo') |
383 fpath = self.CreateTempFile(contents='foo/bar\n') | 405 fpath = self.CreateTempFile(contents='foo/bar\n') |
384 self.RunGsUtil(['cp', fpath, dsturi]) | 406 self.RunGsUtil(['cp', fpath, dsturi]) |
385 | 407 |
386 # Use @Retry as hedge against bucket listing eventual consistency. | 408 # Use @Retry as hedge against bucket listing eventual consistency. |
387 @Retry(AssertionError, tries=3, timeout_secs=1) | 409 @Retry(AssertionError, tries=3, timeout_secs=1) |
388 def _Check1(): | 410 def _Check1(): |
389 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) | 411 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) |
390 use_magicfile = boto.config.getbool('GSUtil', 'use_magicfile', False) | 412 use_magicfile = boto.config.getbool('GSUtil', 'use_magicfile', False) |
391 content_type = ('text/plain' if use_magicfile | 413 content_type = ('text/plain' if use_magicfile |
392 else 'application/octet-stream') | 414 else 'application/octet-stream') |
393 self.assertRegexpMatches(stdout, r'Content-Type:\s+%s' % content_type) | 415 self.assertRegexpMatches(stdout, r'Content-Type:\s+%s' % content_type) |
394 _Check1() | 416 _Check1() |
395 | 417 |
396 @PerformsFileToObjectUpload | 418 @SequentialAndParallelTransfer |
397 def test_content_type_mismatches(self): | 419 def test_content_type_mismatches(self): |
398 """Tests overriding content type when it does not match the file type.""" | 420 """Tests overriding content type when it does not match the file type.""" |
399 bucket_uri = self.CreateBucket() | 421 bucket_uri = self.CreateBucket() |
400 dsturi = suri(bucket_uri, 'foo') | 422 dsturi = suri(bucket_uri, 'foo') |
401 fpath = self.CreateTempFile(contents='foo/bar\n') | 423 fpath = self.CreateTempFile(contents='foo/bar\n') |
402 | 424 |
403 self.RunGsUtil(['-h', 'Content-Type:image/gif', 'cp', | 425 self.RunGsUtil(['-h', 'Content-Type:image/gif', 'cp', |
404 self._get_test_file('test.mp3'), dsturi]) | 426 self._get_test_file('test.mp3'), dsturi]) |
405 | 427 |
406 # Use @Retry as hedge against bucket listing eventual consistency. | 428 # Use @Retry as hedge against bucket listing eventual consistency. |
(...skipping 15 matching lines...) Expand all Loading... |
422 | 444 |
423 self.RunGsUtil(['-h', 'Content-Type:image/gif', 'cp', fpath, dsturi]) | 445 self.RunGsUtil(['-h', 'Content-Type:image/gif', 'cp', fpath, dsturi]) |
424 | 446 |
425 # Use @Retry as hedge against bucket listing eventual consistency. | 447 # Use @Retry as hedge against bucket listing eventual consistency. |
426 @Retry(AssertionError, tries=3, timeout_secs=1) | 448 @Retry(AssertionError, tries=3, timeout_secs=1) |
427 def _Check3(): | 449 def _Check3(): |
428 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) | 450 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) |
429 self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif') | 451 self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif') |
430 _Check3() | 452 _Check3() |
431 | 453 |
432 @PerformsFileToObjectUpload | 454 @SequentialAndParallelTransfer |
433 def test_content_type_header_case_insensitive(self): | 455 def test_content_type_header_case_insensitive(self): |
434 """Tests that content type header is treated with case insensitivity.""" | 456 """Tests that content type header is treated with case insensitivity.""" |
435 bucket_uri = self.CreateBucket() | 457 bucket_uri = self.CreateBucket() |
436 dsturi = suri(bucket_uri, 'foo') | 458 dsturi = suri(bucket_uri, 'foo') |
437 fpath = self._get_test_file('test.gif') | 459 fpath = self._get_test_file('test.gif') |
438 | 460 |
439 self.RunGsUtil(['-h', 'content-Type:text/plain', 'cp', | 461 self.RunGsUtil(['-h', 'content-Type:text/plain', 'cp', |
440 fpath, dsturi]) | 462 fpath, dsturi]) |
441 | 463 |
442 # Use @Retry as hedge against bucket listing eventual consistency. | 464 # Use @Retry as hedge against bucket listing eventual consistency. |
443 @Retry(AssertionError, tries=3, timeout_secs=1) | 465 @Retry(AssertionError, tries=3, timeout_secs=1) |
444 def _Check1(): | 466 def _Check1(): |
445 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) | 467 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) |
446 self.assertRegexpMatches(stdout, r'Content-Type:\s+text/plain') | 468 self.assertRegexpMatches(stdout, r'Content-Type:\s+text/plain') |
447 self.assertNotRegexpMatches(stdout, r'image/gif') | 469 self.assertNotRegexpMatches(stdout, r'image/gif') |
448 _Check1() | 470 _Check1() |
449 | 471 |
450 self.RunGsUtil(['-h', 'CONTENT-TYPE:image/gif', | 472 self.RunGsUtil(['-h', 'CONTENT-TYPE:image/gif', |
451 '-h', 'content-type:image/gif', | 473 '-h', 'content-type:image/gif', |
452 'cp', fpath, dsturi]) | 474 'cp', fpath, dsturi]) |
453 | 475 |
454 # Use @Retry as hedge against bucket listing eventual consistency. | 476 # Use @Retry as hedge against bucket listing eventual consistency. |
455 @Retry(AssertionError, tries=3, timeout_secs=1) | 477 @Retry(AssertionError, tries=3, timeout_secs=1) |
456 def _Check2(): | 478 def _Check2(): |
457 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) | 479 stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True) |
458 self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif') | 480 self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif') |
459 self.assertNotRegexpMatches(stdout, r'image/gif,\s*image/gif') | 481 self.assertNotRegexpMatches(stdout, r'image/gif,\s*image/gif') |
460 _Check2() | 482 _Check2() |
461 | 483 |
462 @PerformsFileToObjectUpload | 484 @SequentialAndParallelTransfer |
463 def test_other_headers(self): | 485 def test_other_headers(self): |
464 """Tests that non-content-type headers are applied successfully on copy.""" | 486 """Tests that non-content-type headers are applied successfully on copy.""" |
465 bucket_uri = self.CreateBucket() | 487 bucket_uri = self.CreateBucket() |
466 dst_uri = suri(bucket_uri, 'foo') | 488 dst_uri = suri(bucket_uri, 'foo') |
467 fpath = self._get_test_file('test.gif') | 489 fpath = self._get_test_file('test.gif') |
468 | 490 |
469 self.RunGsUtil(['-h', 'Cache-Control:public,max-age=12', | 491 self.RunGsUtil(['-h', 'Cache-Control:public,max-age=12', |
470 '-h', 'x-%s-meta-1:abcd' % self.provider_custom_meta, 'cp', | 492 '-h', 'x-%s-meta-1:abcd' % self.provider_custom_meta, 'cp', |
471 fpath, dst_uri]) | 493 fpath, dst_uri]) |
472 | 494 |
473 stdout = self.RunGsUtil(['ls', '-L', dst_uri], return_stdout=True) | 495 stdout = self.RunGsUtil(['ls', '-L', dst_uri], return_stdout=True) |
474 self.assertRegexpMatches(stdout, r'Cache-Control\s*:\s*public,max-age=12') | 496 self.assertRegexpMatches(stdout, r'Cache-Control\s*:\s*public,max-age=12') |
475 self.assertRegexpMatches(stdout, r'Metadata:\s*1:\s*abcd') | 497 self.assertRegexpMatches(stdout, r'Metadata:\s*1:\s*abcd') |
476 | 498 |
477 dst_uri2 = suri(bucket_uri, 'bar') | 499 dst_uri2 = suri(bucket_uri, 'bar') |
478 self.RunGsUtil(['cp', dst_uri, dst_uri2]) | 500 self.RunGsUtil(['cp', dst_uri, dst_uri2]) |
479 # Ensure metadata was preserved across copy. | 501 # Ensure metadata was preserved across copy. |
480 stdout = self.RunGsUtil(['ls', '-L', dst_uri2], return_stdout=True) | 502 stdout = self.RunGsUtil(['ls', '-L', dst_uri2], return_stdout=True) |
481 self.assertRegexpMatches(stdout, r'Cache-Control\s*:\s*public,max-age=12') | 503 self.assertRegexpMatches(stdout, r'Cache-Control\s*:\s*public,max-age=12') |
482 self.assertRegexpMatches(stdout, r'Metadata:\s*1:\s*abcd') | 504 self.assertRegexpMatches(stdout, r'Metadata:\s*1:\s*abcd') |
483 | 505 |
484 @PerformsFileToObjectUpload | 506 @SequentialAndParallelTransfer |
485 def test_versioning(self): | 507 def test_versioning(self): |
486 """Tests copy with versioning.""" | 508 """Tests copy with versioning.""" |
487 bucket_uri = self.CreateVersionedBucket() | 509 bucket_uri = self.CreateVersionedBucket() |
488 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data2') | 510 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data2') |
489 k2_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') | 511 k2_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') |
490 g1 = urigen(k2_uri) | 512 g1 = urigen(k2_uri) |
491 self.RunGsUtil(['cp', suri(k1_uri), suri(k2_uri)]) | 513 self.RunGsUtil(['cp', suri(k1_uri), suri(k2_uri)]) |
492 k2_uri = bucket_uri.clone_replace_name(k2_uri.object_name) | 514 k2_uri = bucket_uri.clone_replace_name(k2_uri.object_name) |
493 k2_uri = bucket_uri.clone_replace_key(k2_uri.get_key()) | 515 k2_uri = bucket_uri.clone_replace_key(k2_uri.get_key()) |
494 g2 = urigen(k2_uri) | 516 g2 = urigen(k2_uri) |
(...skipping 22 matching lines...) Expand all Loading... |
517 k2_uri.versionless_uri]) | 539 k2_uri.versionless_uri]) |
518 self.RunGsUtil(['cp', k2_uri.versionless_uri, fpath]) | 540 self.RunGsUtil(['cp', k2_uri.versionless_uri, fpath]) |
519 with open(fpath, 'r') as f: | 541 with open(fpath, 'r') as f: |
520 self.assertEqual(f.read(), 'data1') | 542 self.assertEqual(f.read(), 'data1') |
521 | 543 |
522 # Attempt to specify a version-specific URI for destination. | 544 # Attempt to specify a version-specific URI for destination. |
523 stderr = self.RunGsUtil(['cp', fpath, k2_uri.uri], return_stderr=True, | 545 stderr = self.RunGsUtil(['cp', fpath, k2_uri.uri], return_stderr=True, |
524 expected_status=1) | 546 expected_status=1) |
525 self.assertIn('cannot be the destination for gsutil cp', stderr) | 547 self.assertIn('cannot be the destination for gsutil cp', stderr) |
526 | 548 |
| 549 def test_versioning_no_parallelism(self): |
| 550 """Tests that copy all-versions errors when parallelism is enabled.""" |
| 551 stderr = self.RunGsUtil( |
| 552 ['-m', 'cp', '-A', suri(self.nonexistent_bucket_name, 'foo'), |
| 553 suri(self.nonexistent_bucket_name, 'bar')], |
| 554 expected_status=1, return_stderr=True) |
| 555 self.assertIn('-m option is not supported with the cp -A flag', stderr) |
| 556 |
527 @SkipForS3('S3 lists versioned objects in reverse timestamp order.') | 557 @SkipForS3('S3 lists versioned objects in reverse timestamp order.') |
528 def test_recursive_copying_versioned_bucket(self): | 558 def test_recursive_copying_versioned_bucket(self): |
529 """Tests that cp -R with versioned buckets copies all versions in order.""" | 559 """Tests cp -R with versioned buckets.""" |
530 bucket1_uri = self.CreateVersionedBucket() | 560 bucket1_uri = self.CreateVersionedBucket() |
531 bucket2_uri = self.CreateVersionedBucket() | 561 bucket2_uri = self.CreateVersionedBucket() |
| 562 bucket3_uri = self.CreateVersionedBucket() |
532 | 563 |
533 # Write two versions of an object to the bucket1. | 564 # Write two versions of an object to the bucket1. |
534 self.CreateObject(bucket_uri=bucket1_uri, object_name='k', contents='data0') | 565 self.CreateObject(bucket_uri=bucket1_uri, object_name='k', contents='data0') |
535 self.CreateObject(bucket_uri=bucket1_uri, object_name='k', | 566 self.CreateObject(bucket_uri=bucket1_uri, object_name='k', |
536 contents='longer_data1') | 567 contents='longer_data1') |
537 | 568 |
538 self.AssertNObjectsInBucket(bucket1_uri, 2, versioned=True) | 569 self.AssertNObjectsInBucket(bucket1_uri, 2, versioned=True) |
539 self.AssertNObjectsInBucket(bucket2_uri, 0, versioned=True) | 570 self.AssertNObjectsInBucket(bucket2_uri, 0, versioned=True) |
| 571 self.AssertNObjectsInBucket(bucket3_uri, 0, versioned=True) |
540 | 572 |
541 # Recursively copy to second versioned bucket. | 573 # Recursively copy to second versioned bucket. |
542 self.RunGsUtil(['cp', '-R', suri(bucket1_uri, '*'), suri(bucket2_uri)]) | 574 # -A flag should copy all versions in order. |
| 575 self.RunGsUtil(['cp', '-R', '-A', suri(bucket1_uri, '*'), |
| 576 suri(bucket2_uri)]) |
543 | 577 |
544 # Use @Retry as hedge against bucket listing eventual consistency. | 578 # Use @Retry as hedge against bucket listing eventual consistency. |
545 @Retry(AssertionError, tries=3, timeout_secs=1) | 579 @Retry(AssertionError, tries=3, timeout_secs=1) |
546 def _Check2(): | 580 def _Check2(): |
547 """Validates the results of the cp -R.""" | 581 """Validates the results of the cp -R.""" |
548 listing1 = self.RunGsUtil(['ls', '-la', suri(bucket1_uri)], | 582 listing1 = self.RunGsUtil(['ls', '-la', suri(bucket1_uri)], |
549 return_stdout=True).split('\n') | 583 return_stdout=True).split('\n') |
550 listing2 = self.RunGsUtil(['ls', '-la', suri(bucket2_uri)], | 584 listing2 = self.RunGsUtil(['ls', '-la', suri(bucket2_uri)], |
551 return_stdout=True).split('\n') | 585 return_stdout=True).split('\n') |
552 # 2 lines of listing output, 1 summary line, 1 empty line from \n split. | 586 # 2 lines of listing output, 1 summary line, 1 empty line from \n split. |
(...skipping 10 matching lines...) Expand all Loading... |
563 | 597 |
564 # Similarly for second object in each bucket. | 598 # Similarly for second object in each bucket. |
565 size1, _, uri_str1, _ = listing1[1].split() | 599 size1, _, uri_str1, _ = listing1[1].split() |
566 self.assertEquals(size1, str(len('longer_data1'))) | 600 self.assertEquals(size1, str(len('longer_data1'))) |
567 self.assertEquals(storage_uri(uri_str1).object_name, 'k') | 601 self.assertEquals(storage_uri(uri_str1).object_name, 'k') |
568 size2, _, uri_str2, _ = listing2[1].split() | 602 size2, _, uri_str2, _ = listing2[1].split() |
569 self.assertEquals(size2, str(len('longer_data1'))) | 603 self.assertEquals(size2, str(len('longer_data1'))) |
570 self.assertEquals(storage_uri(uri_str2).object_name, 'k') | 604 self.assertEquals(storage_uri(uri_str2).object_name, 'k') |
571 _Check2() | 605 _Check2() |
572 | 606 |
573 @PerformsFileToObjectUpload | 607 # Recursively copy to second versioned bucket with no -A flag. |
| 608 # This should copy only the live object. |
| 609 self.RunGsUtil(['cp', '-R', suri(bucket1_uri, '*'), |
| 610 suri(bucket3_uri)]) |
| 611 |
| 612 # Use @Retry as hedge against bucket listing eventual consistency. |
| 613 @Retry(AssertionError, tries=3, timeout_secs=1) |
| 614 def _Check3(): |
| 615 """Validates the results of the cp -R.""" |
| 616 listing1 = self.RunGsUtil(['ls', '-la', suri(bucket1_uri)], |
| 617 return_stdout=True).split('\n') |
| 618 listing2 = self.RunGsUtil(['ls', '-la', suri(bucket3_uri)], |
| 619 return_stdout=True).split('\n') |
| 620 # 2 lines of listing output, 1 summary line, 1 empty line from \n split. |
| 621 self.assertEquals(len(listing1), 4) |
| 622 # 1 lines of listing output, 1 summary line, 1 empty line from \n split. |
| 623 self.assertEquals(len(listing2), 3) |
| 624 |
| 625 # Live (second) object in bucket 1 should match the single live object. |
| 626 size1, _, uri_str1, _ = listing2[0].split() |
| 627 self.assertEquals(size1, str(len('longer_data1'))) |
| 628 self.assertEquals(storage_uri(uri_str1).object_name, 'k') |
| 629 _Check3() |
| 630 |
| 631 @SequentialAndParallelTransfer |
574 @SkipForS3('Preconditions not supported for S3.') | 632 @SkipForS3('Preconditions not supported for S3.') |
575 def test_cp_generation_zero_match(self): | 633 def test_cp_generation_zero_match(self): |
576 """Tests that cp handles an object-not-exists precondition header.""" | 634 """Tests that cp handles an object-not-exists precondition header.""" |
577 bucket_uri = self.CreateBucket() | 635 bucket_uri = self.CreateBucket() |
578 fpath1 = self.CreateTempFile(contents='data1') | 636 fpath1 = self.CreateTempFile(contents='data1') |
579 # Match 0 means only write the object if it doesn't already exist. | 637 # Match 0 means only write the object if it doesn't already exist. |
580 gen_match_header = 'x-goog-if-generation-match:0' | 638 gen_match_header = 'x-goog-if-generation-match:0' |
581 | 639 |
582 # First copy should succeed. | 640 # First copy should succeed. |
583 # TODO: This can fail (rarely) if the server returns a 5xx but actually | 641 # TODO: This can fail (rarely) if the server returns a 5xx but actually |
584 # commits the bytes. If we add restarts on small uploads, handle this | 642 # commits the bytes. If we add restarts on small uploads, handle this |
585 # case. | 643 # case. |
586 self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1, suri(bucket_uri)]) | 644 self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1, suri(bucket_uri)]) |
587 | 645 |
588 # Second copy should fail with a precondition error. | 646 # Second copy should fail with a precondition error. |
589 stderr = self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1, | 647 stderr = self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1, |
590 suri(bucket_uri)], | 648 suri(bucket_uri)], |
591 return_stderr=True, expected_status=1) | 649 return_stderr=True, expected_status=1) |
592 self.assertIn('PreconditionException', stderr) | 650 self.assertIn('PreconditionException', stderr) |
593 | 651 |
594 @PerformsFileToObjectUpload | 652 @SequentialAndParallelTransfer |
595 @SkipForS3('Preconditions not supported for S3.') | 653 @SkipForS3('Preconditions not supported for S3.') |
596 def test_cp_v_generation_match(self): | 654 def test_cp_v_generation_match(self): |
597 """Tests that cp -v option handles the if-generation-match header.""" | 655 """Tests that cp -v option handles the if-generation-match header.""" |
598 bucket_uri = self.CreateVersionedBucket() | 656 bucket_uri = self.CreateVersionedBucket() |
599 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') | 657 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') |
600 g1 = k1_uri.generation | 658 g1 = k1_uri.generation |
601 | 659 |
602 tmpdir = self.CreateTempDir() | 660 tmpdir = self.CreateTempDir() |
603 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data2') | 661 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data2') |
604 | 662 |
(...skipping 11 matching lines...) Expand all Loading... |
616 # Specifiying a generation with -n should fail before the request hits the | 674 # Specifiying a generation with -n should fail before the request hits the |
617 # server. | 675 # server. |
618 stderr = self.RunGsUtil(['-h', gen_match_header, 'cp', '-n', fpath1, | 676 stderr = self.RunGsUtil(['-h', gen_match_header, 'cp', '-n', fpath1, |
619 suri(k1_uri)], | 677 suri(k1_uri)], |
620 return_stderr=True, expected_status=1) | 678 return_stderr=True, expected_status=1) |
621 | 679 |
622 self.assertIn('ArgumentException', stderr) | 680 self.assertIn('ArgumentException', stderr) |
623 self.assertIn('Specifying x-goog-if-generation-match is not supported ' | 681 self.assertIn('Specifying x-goog-if-generation-match is not supported ' |
624 'with cp -n', stderr) | 682 'with cp -n', stderr) |
625 | 683 |
626 @PerformsFileToObjectUpload | 684 @SequentialAndParallelTransfer |
627 def test_cp_nv(self): | 685 def test_cp_nv(self): |
628 """Tests that cp -nv works when skipping existing file.""" | 686 """Tests that cp -nv works when skipping existing file.""" |
629 bucket_uri = self.CreateVersionedBucket() | 687 bucket_uri = self.CreateVersionedBucket() |
630 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') | 688 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') |
631 | 689 |
632 tmpdir = self.CreateTempDir() | 690 tmpdir = self.CreateTempDir() |
633 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data2') | 691 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data2') |
634 | 692 |
635 # First copy should succeed. | 693 # First copy should succeed. |
636 self.RunGsUtil(['cp', '-nv', fpath1, suri(k1_uri)]) | 694 self.RunGsUtil(['cp', '-nv', fpath1, suri(k1_uri)]) |
637 | 695 |
638 # Second copy should skip copying. | 696 # Second copy should skip copying. |
639 stderr = self.RunGsUtil(['cp', '-nv', fpath1, suri(k1_uri)], | 697 stderr = self.RunGsUtil(['cp', '-nv', fpath1, suri(k1_uri)], |
640 return_stderr=True) | 698 return_stderr=True) |
641 self.assertIn('Skipping existing item:', stderr) | 699 self.assertIn('Skipping existing item:', stderr) |
642 | 700 |
643 @PerformsFileToObjectUpload | 701 @SequentialAndParallelTransfer |
644 @SkipForS3('S3 lists versioned objects in reverse timestamp order.') | 702 @SkipForS3('S3 lists versioned objects in reverse timestamp order.') |
645 def test_cp_v_option(self): | 703 def test_cp_v_option(self): |
646 """"Tests that cp -v returns the created object's version-specific URI.""" | 704 """"Tests that cp -v returns the created object's version-specific URI.""" |
647 bucket_uri = self.CreateVersionedBucket() | 705 bucket_uri = self.CreateVersionedBucket() |
648 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') | 706 k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1') |
649 k2_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data2') | 707 k2_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data2') |
650 | 708 |
651 # Case 1: Upload file to object using one-shot PUT. | 709 # Case 1: Upload file to object using one-shot PUT. |
652 tmpdir = self.CreateTempDir() | 710 tmpdir = self.CreateTempDir() |
653 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data1') | 711 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data1') |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
692 @Retry(AssertionError, tries=3, timeout_secs=1) | 750 @Retry(AssertionError, tries=3, timeout_secs=1) |
693 def _Check1(): | 751 def _Check1(): |
694 stdout = self.RunGsUtil(['ls', '-a', dst_str], return_stdout=True) | 752 stdout = self.RunGsUtil(['ls', '-a', dst_str], return_stdout=True) |
695 lines = stdout.split('\n') | 753 lines = stdout.split('\n') |
696 # Final (most recent) object should match the "Created:" URI. This is | 754 # Final (most recent) object should match the "Created:" URI. This is |
697 # in second-to-last line (last line is '\n'). | 755 # in second-to-last line (last line is '\n'). |
698 self.assertGreater(len(lines), 2) | 756 self.assertGreater(len(lines), 2) |
699 self.assertEqual(created_uri, lines[-2]) | 757 self.assertEqual(created_uri, lines[-2]) |
700 _Check1() | 758 _Check1() |
701 | 759 |
702 @PerformsFileToObjectUpload | 760 @SequentialAndParallelTransfer |
703 def test_stdin_args(self): | 761 def test_stdin_args(self): |
704 """Tests cp with the -I option.""" | 762 """Tests cp with the -I option.""" |
705 tmpdir = self.CreateTempDir() | 763 tmpdir = self.CreateTempDir() |
706 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data1') | 764 fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data1') |
707 fpath2 = self.CreateTempFile(tmpdir=tmpdir, contents='data2') | 765 fpath2 = self.CreateTempFile(tmpdir=tmpdir, contents='data2') |
708 bucket_uri = self.CreateBucket() | 766 bucket_uri = self.CreateBucket() |
709 self.RunGsUtil(['cp', '-I', suri(bucket_uri)], | 767 self.RunGsUtil(['cp', '-I', suri(bucket_uri)], |
710 stdin='\n'.join((fpath1, fpath2))) | 768 stdin='\n'.join((fpath1, fpath2))) |
711 | 769 |
712 # Use @Retry as hedge against bucket listing eventual consistency. | 770 # Use @Retry as hedge against bucket listing eventual consistency. |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
837 public_read_acl = self.RunGsUtil(['acl', 'get', suri(key_uri)], | 895 public_read_acl = self.RunGsUtil(['acl', 'get', suri(key_uri)], |
838 return_stdout=True) | 896 return_stdout=True) |
839 | 897 |
840 @Retry(AssertionError, tries=3, timeout_secs=1) | 898 @Retry(AssertionError, tries=3, timeout_secs=1) |
841 def _Check(): | 899 def _Check(): |
842 uri = suri(bucket2_uri, key_uri.object_name) | 900 uri = suri(bucket2_uri, key_uri.object_name) |
843 new_acl_json = self.RunGsUtil(['acl', 'get', uri], return_stdout=True) | 901 new_acl_json = self.RunGsUtil(['acl', 'get', uri], return_stdout=True) |
844 self.assertEqual(public_read_acl, new_acl_json) | 902 self.assertEqual(public_read_acl, new_acl_json) |
845 _Check() | 903 _Check() |
846 | 904 |
847 @PerformsFileToObjectUpload | 905 @SequentialAndParallelTransfer |
848 def test_canned_acl_upload(self): | 906 def test_canned_acl_upload(self): |
849 """Tests uploading a file with a canned ACL.""" | 907 """Tests uploading a file with a canned ACL.""" |
850 bucket1_uri = self.CreateBucket() | 908 bucket1_uri = self.CreateBucket() |
851 key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo') | 909 key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo') |
852 # Set public-read on the object so we can compare the ACLs. | 910 # Set public-read on the object so we can compare the ACLs. |
853 self.RunGsUtil(['acl', 'set', 'public-read', suri(key_uri)]) | 911 self.RunGsUtil(['acl', 'set', 'public-read', suri(key_uri)]) |
854 public_read_acl = self.RunGsUtil(['acl', 'get', suri(key_uri)], | 912 public_read_acl = self.RunGsUtil(['acl', 'get', suri(key_uri)], |
855 return_stdout=True) | 913 return_stdout=True) |
856 | 914 |
857 file_name = 'bar' | 915 file_name = 'bar' |
(...skipping 24 matching lines...) Expand all Loading... |
882 key_uri = self.CreateObject(bucket_uri=bucket_uri, contents=contents) | 940 key_uri = self.CreateObject(bucket_uri=bucket_uri, contents=contents) |
883 stdout = self.RunGsUtil(['cp', suri(key_uri), '-'], return_stdout=True) | 941 stdout = self.RunGsUtil(['cp', suri(key_uri), '-'], return_stdout=True) |
884 self.assertIn(contents, stdout) | 942 self.assertIn(contents, stdout) |
885 | 943 |
886 def test_cp_local_file_to_local_stream(self): | 944 def test_cp_local_file_to_local_stream(self): |
887 contents = 'content' | 945 contents = 'content' |
888 fpath = self.CreateTempFile(contents=contents) | 946 fpath = self.CreateTempFile(contents=contents) |
889 stdout = self.RunGsUtil(['cp', fpath, '-'], return_stdout=True) | 947 stdout = self.RunGsUtil(['cp', fpath, '-'], return_stdout=True) |
890 self.assertIn(contents, stdout) | 948 self.assertIn(contents, stdout) |
891 | 949 |
892 @PerformsFileToObjectUpload | 950 @SequentialAndParallelTransfer |
893 def test_cp_zero_byte_file(self): | 951 def test_cp_zero_byte_file(self): |
894 dst_bucket_uri = self.CreateBucket() | 952 dst_bucket_uri = self.CreateBucket() |
895 src_dir = self.CreateTempDir() | 953 src_dir = self.CreateTempDir() |
896 fpath = os.path.join(src_dir, 'zero_byte') | 954 fpath = os.path.join(src_dir, 'zero_byte') |
897 with open(fpath, 'w') as unused_out_file: | 955 with open(fpath, 'w') as unused_out_file: |
898 pass # Write a zero byte file | 956 pass # Write a zero byte file |
899 self.RunGsUtil(['cp', fpath, suri(dst_bucket_uri)]) | 957 self.RunGsUtil(['cp', fpath, suri(dst_bucket_uri)]) |
900 | 958 |
901 @Retry(AssertionError, tries=3, timeout_secs=1) | 959 @Retry(AssertionError, tries=3, timeout_secs=1) |
902 def _Check1(): | 960 def _Check1(): |
903 stdout = self.RunGsUtil(['ls', suri(dst_bucket_uri)], return_stdout=True) | 961 stdout = self.RunGsUtil(['ls', suri(dst_bucket_uri)], return_stdout=True) |
904 self.assertIn(os.path.basename(fpath), stdout) | 962 self.assertIn(os.path.basename(fpath), stdout) |
905 _Check1() | 963 _Check1() |
906 | 964 |
907 download_path = os.path.join(src_dir, 'zero_byte_download') | 965 download_path = os.path.join(src_dir, 'zero_byte_download') |
908 self.RunGsUtil(['cp', suri(dst_bucket_uri, 'zero_byte'), download_path]) | 966 self.RunGsUtil(['cp', suri(dst_bucket_uri, 'zero_byte'), download_path]) |
909 self.assertTrue(os.stat(download_path)) | 967 self.assertTrue(os.stat(download_path)) |
910 | 968 |
911 def test_copy_bucket_to_bucket(self): | 969 def test_copy_bucket_to_bucket(self): |
912 """Tests that recursively copying from bucket to bucket. | 970 """Tests recursively copying from bucket to bucket. |
913 | 971 |
914 This should produce identically named objects (and not, in particular, | 972 This should produce identically named objects (and not, in particular, |
915 destination objects named by the version-specific URI from source objects). | 973 destination objects named by the version-specific URI from source objects). |
916 """ | 974 """ |
917 src_bucket_uri = self.CreateVersionedBucket() | 975 src_bucket_uri = self.CreateVersionedBucket() |
918 dst_bucket_uri = self.CreateVersionedBucket() | 976 dst_bucket_uri = self.CreateVersionedBucket() |
919 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj0', | 977 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj0', |
920 contents='abc') | 978 contents='abc') |
921 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj1', | 979 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj1', |
922 contents='def') | 980 contents='def') |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
971 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj0', | 1029 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj0', |
972 contents='abc') | 1030 contents='abc') |
973 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj1', | 1031 self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj1', |
974 contents='def') | 1032 contents='def') |
975 | 1033 |
976 # Create a placeholder like what can be left over by web GUI tools. | 1034 # Create a placeholder like what can be left over by web GUI tools. |
977 key_uri = src_bucket_uri.clone_replace_name('/') | 1035 key_uri = src_bucket_uri.clone_replace_name('/') |
978 key_uri.set_contents_from_string('') | 1036 key_uri.set_contents_from_string('') |
979 self.AssertNObjectsInBucket(src_bucket_uri, 3) | 1037 self.AssertNObjectsInBucket(src_bucket_uri, 3) |
980 | 1038 |
981 stderr = self.RunGsUtil(['cp', '-R', suri(src_bucket_uri), dst_dir], | 1039 self.RunGsUtil(['cp', '-R', suri(src_bucket_uri), dst_dir]) |
982 return_stderr=True) | |
983 self.assertIn('Skipping cloud sub-directory placeholder object', stderr) | |
984 dir_list = [] | 1040 dir_list = [] |
985 for dirname, _, filenames in os.walk(dst_dir): | 1041 for dirname, _, filenames in os.walk(dst_dir): |
986 for filename in filenames: | 1042 for filename in filenames: |
987 dir_list.append(os.path.join(dirname, filename)) | 1043 dir_list.append(os.path.join(dirname, filename)) |
988 dir_list = sorted(dir_list) | 1044 dir_list = sorted(dir_list) |
989 self.assertEqual(len(dir_list), 2) | 1045 self.assertEqual(len(dir_list), 2) |
990 self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name, | 1046 self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name, |
991 'obj0'), dir_list[0]) | 1047 'obj0'), dir_list[0]) |
992 self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name, | 1048 self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name, |
993 'obj1'), dir_list[1]) | 1049 'obj1'), dir_list[1]) |
(...skipping 24 matching lines...) Expand all Loading... |
1018 @Retry(AssertionError, tries=3, timeout_secs=1) | 1074 @Retry(AssertionError, tries=3, timeout_secs=1) |
1019 def _Check1(): | 1075 def _Check1(): |
1020 stdout = self.RunGsUtil(['ls', '-L', suri(bucket_uri)], | 1076 stdout = self.RunGsUtil(['ls', '-L', suri(bucket_uri)], |
1021 return_stdout=True) | 1077 return_stdout=True) |
1022 self.assertRegexpMatches(stdout, | 1078 self.assertRegexpMatches(stdout, |
1023 r'Hash\s+\(md5\):\s+%s' % re.escape(file_md5)) | 1079 r'Hash\s+\(md5\):\s+%s' % re.escape(file_md5)) |
1024 _Check1() | 1080 _Check1() |
1025 | 1081 |
1026 @unittest.skipIf(IS_WINDOWS, | 1082 @unittest.skipIf(IS_WINDOWS, |
1027 'Unicode handling on Windows requires mods to site-packages') | 1083 'Unicode handling on Windows requires mods to site-packages') |
1028 @PerformsFileToObjectUpload | 1084 @SequentialAndParallelTransfer |
1029 def test_cp_manifest_upload_unicode(self): | 1085 def test_cp_manifest_upload_unicode(self): |
1030 return self._ManifestUpload('foo-unicöde', 'bar-unicöde', | 1086 return self._ManifestUpload('foo-unicöde', 'bar-unicöde', |
1031 'manifest-unicöde') | 1087 'manifest-unicöde') |
1032 | 1088 |
1033 @PerformsFileToObjectUpload | 1089 @SequentialAndParallelTransfer |
1034 def test_cp_manifest_upload(self): | 1090 def test_cp_manifest_upload(self): |
1035 """Tests uploading with a mnifest file.""" | 1091 """Tests uploading with a mnifest file.""" |
1036 return self._ManifestUpload('foo', 'bar', 'manifest') | 1092 return self._ManifestUpload('foo', 'bar', 'manifest') |
1037 | 1093 |
1038 def _ManifestUpload(self, file_name, object_name, manifest_name): | 1094 def _ManifestUpload(self, file_name, object_name, manifest_name): |
1039 """Tests uploading with a manifest file.""" | 1095 """Tests uploading with a manifest file.""" |
1040 bucket_uri = self.CreateBucket() | 1096 bucket_uri = self.CreateBucket() |
1041 dsturi = suri(bucket_uri, object_name) | 1097 dsturi = suri(bucket_uri, object_name) |
1042 | 1098 |
1043 fpath = self.CreateTempFile(file_name=file_name, contents='bar') | 1099 fpath = self.CreateTempFile(file_name=file_name, contents='bar') |
(...skipping 20 matching lines...) Expand all Loading... |
1064 if self.RunGsUtil == testcase.GsUtilIntegrationTestCase.RunGsUtil: | 1120 if self.RunGsUtil == testcase.GsUtilIntegrationTestCase.RunGsUtil: |
1065 # Check that we didn't do automatic parallel uploads - compose doesn't | 1121 # Check that we didn't do automatic parallel uploads - compose doesn't |
1066 # calculate the MD5 hash. Since RunGsUtil is overriden in | 1122 # calculate the MD5 hash. Since RunGsUtil is overriden in |
1067 # TestCpParallelUploads to force parallel uploads, we can check which | 1123 # TestCpParallelUploads to force parallel uploads, we can check which |
1068 # method was used. | 1124 # method was used. |
1069 self.assertEqual(results[4], 'rL0Y20zC+Fzt72VPzMSk2A==') # md5 | 1125 self.assertEqual(results[4], 'rL0Y20zC+Fzt72VPzMSk2A==') # md5 |
1070 self.assertEqual(int(results[6]), 3) # Source Size | 1126 self.assertEqual(int(results[6]), 3) # Source Size |
1071 self.assertEqual(int(results[7]), 3) # Bytes Transferred | 1127 self.assertEqual(int(results[7]), 3) # Bytes Transferred |
1072 self.assertEqual(results[8], 'OK') # Result | 1128 self.assertEqual(results[8], 'OK') # Result |
1073 | 1129 |
1074 @PerformsFileToObjectUpload | 1130 @SequentialAndParallelTransfer |
1075 def test_cp_manifest_download(self): | 1131 def test_cp_manifest_download(self): |
1076 """Tests downloading with a manifest file.""" | 1132 """Tests downloading with a manifest file.""" |
1077 key_uri = self.CreateObject(contents='foo') | 1133 key_uri = self.CreateObject(contents='foo') |
1078 fpath = self.CreateTempFile(contents='') | 1134 fpath = self.CreateTempFile(contents='') |
1079 logpath = self.CreateTempFile(contents='') | 1135 logpath = self.CreateTempFile(contents='') |
1080 # Ensure the file is empty. | 1136 # Ensure the file is empty. |
1081 open(logpath, 'w').close() | 1137 open(logpath, 'w').close() |
1082 self.RunGsUtil(['cp', '-L', logpath, suri(key_uri), fpath], | 1138 self.RunGsUtil(['cp', '-L', logpath, suri(key_uri), fpath], |
1083 return_stdout=True) | 1139 return_stdout=True) |
1084 with open(logpath, 'r') as f: | 1140 with open(logpath, 'r') as f: |
1085 lines = f.readlines() | 1141 lines = f.readlines() |
1086 self.assertEqual(len(lines), 2) | 1142 self.assertEqual(len(lines), 2) |
1087 | 1143 |
1088 expected_headers = ['Source', 'Destination', 'Start', 'End', 'Md5', | 1144 expected_headers = ['Source', 'Destination', 'Start', 'End', 'Md5', |
1089 'UploadId', 'Source Size', 'Bytes Transferred', | 1145 'UploadId', 'Source Size', 'Bytes Transferred', |
1090 'Result', 'Description'] | 1146 'Result', 'Description'] |
1091 self.assertEqual(expected_headers, lines[0].strip().split(',')) | 1147 self.assertEqual(expected_headers, lines[0].strip().split(',')) |
1092 results = lines[1].strip().split(',') | 1148 results = lines[1].strip().split(',') |
1093 self.assertEqual(results[0][:5], '%s://' % | 1149 self.assertEqual(results[0][:5], '%s://' % |
1094 self.default_provider) # source | 1150 self.default_provider) # source |
1095 self.assertEqual(results[1][:7], 'file://') # destination | 1151 self.assertEqual(results[1][:7], 'file://') # destination |
1096 date_format = '%Y-%m-%dT%H:%M:%S.%fZ' | 1152 date_format = '%Y-%m-%dT%H:%M:%S.%fZ' |
1097 start_date = datetime.datetime.strptime(results[2], date_format) | 1153 start_date = datetime.datetime.strptime(results[2], date_format) |
1098 end_date = datetime.datetime.strptime(results[3], date_format) | 1154 end_date = datetime.datetime.strptime(results[3], date_format) |
1099 self.assertEqual(end_date > start_date, True) | 1155 self.assertEqual(end_date > start_date, True) |
1100 self.assertEqual(results[4], 'rL0Y20zC+Fzt72VPzMSk2A==') # md5 | |
1101 self.assertEqual(int(results[6]), 3) # Source Size | 1156 self.assertEqual(int(results[6]), 3) # Source Size |
1102 # Bytes transferred might be more than 3 if the file was gzipped, since | 1157 # Bytes transferred might be more than 3 if the file was gzipped, since |
1103 # the minimum gzip header is 10 bytes. | 1158 # the minimum gzip header is 10 bytes. |
1104 self.assertGreaterEqual(int(results[7]), 3) # Bytes Transferred | 1159 self.assertGreaterEqual(int(results[7]), 3) # Bytes Transferred |
1105 self.assertEqual(results[8], 'OK') # Result | 1160 self.assertEqual(results[8], 'OK') # Result |
1106 | 1161 |
1107 @PerformsFileToObjectUpload | 1162 @SequentialAndParallelTransfer |
1108 def test_copy_unicode_non_ascii_filename(self): | 1163 def test_copy_unicode_non_ascii_filename(self): |
1109 key_uri = self.CreateObject(contents='foo') | 1164 key_uri = self.CreateObject(contents='foo') |
1110 # Make file large enough to cause a resumable upload (which hashes filename | 1165 # Make file large enough to cause a resumable upload (which hashes filename |
1111 # to construct tracker filename). | 1166 # to construct tracker filename). |
1112 fpath = self.CreateTempFile(file_name=u'Аудиоархив', | 1167 fpath = self.CreateTempFile(file_name=u'Аудиоархив', |
1113 contents='x' * 3 * 1024 * 1024) | 1168 contents='x' * 3 * 1024 * 1024) |
1114 fpath_bytes = fpath.encode(UTF8) | 1169 fpath_bytes = fpath.encode(UTF8) |
1115 stderr = self.RunGsUtil(['cp', fpath_bytes, suri(key_uri)], | 1170 stderr = self.RunGsUtil(['cp', fpath_bytes, suri(key_uri)], |
1116 return_stderr=True) | 1171 return_stderr=True) |
1117 self.assertIn('Copying file:', stderr) | 1172 self.assertIn('Copying file:', stderr) |
1118 | 1173 |
1119 # Note: We originally one time implemented a test | 1174 # Note: We originally one time implemented a test |
1120 # (test_copy_invalid_unicode_filename) that invalid unicode filenames were | 1175 # (test_copy_invalid_unicode_filename) that invalid unicode filenames were |
1121 # skipped, but it turns out os.walk() on MacOS doesn't have problems with | 1176 # skipped, but it turns out os.walk() on MacOS doesn't have problems with |
1122 # such files (so, failed that test). Given that, we decided to remove the | 1177 # such files (so, failed that test). Given that, we decided to remove the |
1123 # test. | 1178 # test. |
1124 | 1179 |
| 1180 @SequentialAndParallelTransfer |
1125 def test_gzip_upload_and_download(self): | 1181 def test_gzip_upload_and_download(self): |
1126 bucket_uri = self.CreateBucket() | 1182 bucket_uri = self.CreateBucket() |
1127 contents = 'x' * 10000 | 1183 contents = 'x' * 10000 |
1128 tmpdir = self.CreateTempDir() | 1184 tmpdir = self.CreateTempDir() |
1129 self.CreateTempFile(file_name='test.html', tmpdir=tmpdir, contents=contents) | 1185 self.CreateTempFile(file_name='test.html', tmpdir=tmpdir, contents=contents) |
1130 self.CreateTempFile(file_name='test.js', tmpdir=tmpdir, contents=contents) | 1186 self.CreateTempFile(file_name='test.js', tmpdir=tmpdir, contents=contents) |
1131 self.CreateTempFile(file_name='test.txt', tmpdir=tmpdir, contents=contents) | 1187 self.CreateTempFile(file_name='test.txt', tmpdir=tmpdir, contents=contents) |
1132 # Test that copying specifying only 2 of the 3 prefixes gzips the correct | 1188 # Test that copying specifying only 2 of the 3 prefixes gzips the correct |
1133 # files, and test that including whitespace in the extension list works. | 1189 # files, and test that including whitespace in the extension list works. |
1134 self.RunGsUtil(['cp', '-z', 'js, html', | 1190 self.RunGsUtil(['cp', '-z', 'js, html', |
(...skipping 16 matching lines...) Expand all Loading... |
1151 | 1207 |
1152 def test_upload_with_subdir_and_unexpanded_wildcard(self): | 1208 def test_upload_with_subdir_and_unexpanded_wildcard(self): |
1153 fpath1 = self.CreateTempFile(file_name=('tmp', 'x', 'y', 'z')) | 1209 fpath1 = self.CreateTempFile(file_name=('tmp', 'x', 'y', 'z')) |
1154 bucket_uri = self.CreateBucket() | 1210 bucket_uri = self.CreateBucket() |
1155 wildcard_uri = '%s*' % fpath1[:-5] | 1211 wildcard_uri = '%s*' % fpath1[:-5] |
1156 stderr = self.RunGsUtil(['cp', '-R', wildcard_uri, suri(bucket_uri)], | 1212 stderr = self.RunGsUtil(['cp', '-R', wildcard_uri, suri(bucket_uri)], |
1157 return_stderr=True) | 1213 return_stderr=True) |
1158 self.assertIn('Copying file:', stderr) | 1214 self.assertIn('Copying file:', stderr) |
1159 self.AssertNObjectsInBucket(bucket_uri, 1) | 1215 self.AssertNObjectsInBucket(bucket_uri, 1) |
1160 | 1216 |
| 1217 @SequentialAndParallelTransfer |
1161 def test_cp_object_ending_with_slash(self): | 1218 def test_cp_object_ending_with_slash(self): |
1162 """Tests that cp works with object names ending with slash.""" | 1219 """Tests that cp works with object names ending with slash.""" |
1163 tmpdir = self.CreateTempDir() | 1220 tmpdir = self.CreateTempDir() |
1164 bucket_uri = self.CreateBucket() | 1221 bucket_uri = self.CreateBucket() |
1165 self.CreateObject(bucket_uri=bucket_uri, | 1222 self.CreateObject(bucket_uri=bucket_uri, |
1166 object_name='abc/', | 1223 object_name='abc/', |
1167 contents='dir') | 1224 contents='dir') |
1168 self.CreateObject(bucket_uri=bucket_uri, | 1225 self.CreateObject(bucket_uri=bucket_uri, |
1169 object_name='abc/def', | 1226 object_name='abc/def', |
1170 contents='def') | 1227 contents='def') |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1207 | 1264 |
1208 def test_cp_multithreaded_wildcard(self): | 1265 def test_cp_multithreaded_wildcard(self): |
1209 """Tests that cp -m works with a wildcard.""" | 1266 """Tests that cp -m works with a wildcard.""" |
1210 num_test_files = 5 | 1267 num_test_files = 5 |
1211 tmp_dir = self.CreateTempDir(test_files=num_test_files) | 1268 tmp_dir = self.CreateTempDir(test_files=num_test_files) |
1212 bucket_uri = self.CreateBucket() | 1269 bucket_uri = self.CreateBucket() |
1213 wildcard_uri = '%s%s*' % (tmp_dir, os.sep) | 1270 wildcard_uri = '%s%s*' % (tmp_dir, os.sep) |
1214 self.RunGsUtil(['-m', 'cp', wildcard_uri, suri(bucket_uri)]) | 1271 self.RunGsUtil(['-m', 'cp', wildcard_uri, suri(bucket_uri)]) |
1215 self.AssertNObjectsInBucket(bucket_uri, num_test_files) | 1272 self.AssertNObjectsInBucket(bucket_uri, num_test_files) |
1216 | 1273 |
| 1274 @SequentialAndParallelTransfer |
1217 def test_cp_duplicate_source_args(self): | 1275 def test_cp_duplicate_source_args(self): |
1218 """Tests that cp -m works when a source argument is provided twice.""" | 1276 """Tests that cp -m works when a source argument is provided twice.""" |
1219 object_contents = 'edge' | 1277 object_contents = 'edge' |
1220 object_uri = self.CreateObject(object_name='foo', contents=object_contents) | 1278 object_uri = self.CreateObject(object_name='foo', contents=object_contents) |
1221 tmp_dir = self.CreateTempDir() | 1279 tmp_dir = self.CreateTempDir() |
1222 self.RunGsUtil(['-m', 'cp', suri(object_uri), suri(object_uri), tmp_dir]) | 1280 self.RunGsUtil(['-m', 'cp', suri(object_uri), suri(object_uri), tmp_dir]) |
1223 with open(os.path.join(tmp_dir, 'foo'), 'r') as in_fp: | 1281 with open(os.path.join(tmp_dir, 'foo'), 'r') as in_fp: |
1224 contents = in_fp.read() | 1282 contents = in_fp.read() |
1225 # Contents should be not duplicated. | 1283 # Contents should be not duplicated. |
1226 self.assertEqual(contents, object_contents) | 1284 self.assertEqual(contents, object_contents) |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1409 contents='a' * ONE_KIB) | 1467 contents='a' * ONE_KIB) |
1410 stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)], | 1468 stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)], |
1411 expected_status=1, return_stderr=True) | 1469 expected_status=1, return_stderr=True) |
1412 self.assertIn('ResumableUploadAbortException', stderr) | 1470 self.assertIn('ResumableUploadAbortException', stderr) |
1413 | 1471 |
1414 # This temporarily changes the tracker directory to unwritable which | 1472 # This temporarily changes the tracker directory to unwritable which |
1415 # interferes with any parallel running tests that use the tracker directory. | 1473 # interferes with any parallel running tests that use the tracker directory. |
1416 @NotParallelizable | 1474 @NotParallelizable |
1417 @SkipForS3('No resumable upload support for S3.') | 1475 @SkipForS3('No resumable upload support for S3.') |
1418 @unittest.skipIf(IS_WINDOWS, 'chmod on dir unsupported on Windows.') | 1476 @unittest.skipIf(IS_WINDOWS, 'chmod on dir unsupported on Windows.') |
1419 @PerformsFileToObjectUpload | 1477 @SequentialAndParallelTransfer |
1420 def test_cp_unwritable_tracker_file(self): | 1478 def test_cp_unwritable_tracker_file(self): |
1421 """Tests a resumable upload with an unwritable tracker file.""" | 1479 """Tests a resumable upload with an unwritable tracker file.""" |
1422 bucket_uri = self.CreateBucket() | 1480 bucket_uri = self.CreateBucket() |
1423 tracker_filename = GetTrackerFilePath( | 1481 tracker_filename = GetTrackerFilePath( |
1424 StorageUrlFromString(suri(bucket_uri, 'foo')), | 1482 StorageUrlFromString(suri(bucket_uri, 'foo')), |
1425 TrackerFileType.UPLOAD, self.test_api) | 1483 TrackerFileType.UPLOAD, self.test_api) |
1426 tracker_dir = os.path.dirname(tracker_filename) | 1484 tracker_dir = os.path.dirname(tracker_filename) |
1427 fpath = self.CreateTempFile(file_name='foo', contents='a' * ONE_KIB) | 1485 fpath = self.CreateTempFile(file_name='foo', contents='a' * ONE_KIB) |
1428 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 1486 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1429 save_mod = os.stat(tracker_dir).st_mode | 1487 save_mod = os.stat(tracker_dir).st_mode |
1430 | 1488 |
1431 try: | 1489 try: |
1432 os.chmod(tracker_dir, 0) | 1490 os.chmod(tracker_dir, 0) |
1433 with SetBotoConfigForTest([boto_config_for_test]): | 1491 with SetBotoConfigForTest([boto_config_for_test]): |
1434 stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)], | 1492 stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)], |
1435 expected_status=1, return_stderr=True) | 1493 expected_status=1, return_stderr=True) |
1436 self.assertIn('Couldn\'t write tracker file', stderr) | 1494 self.assertIn('Couldn\'t write tracker file', stderr) |
1437 finally: | 1495 finally: |
1438 os.chmod(tracker_dir, save_mod) | 1496 os.chmod(tracker_dir, save_mod) |
1439 if os.path.exists(tracker_filename): | 1497 if os.path.exists(tracker_filename): |
1440 os.unlink(tracker_filename) | 1498 os.unlink(tracker_filename) |
1441 | 1499 |
1442 # This temporarily changes the tracker directory to unwritable which | 1500 # This temporarily changes the tracker directory to unwritable which |
1443 # interferes with any parallel running tests that use the tracker directory. | 1501 # interferes with any parallel running tests that use the tracker directory. |
1444 @NotParallelizable | 1502 @NotParallelizable |
1445 @unittest.skipIf(IS_WINDOWS, 'chmod on dir unsupported on Windows.') | 1503 @unittest.skipIf(IS_WINDOWS, 'chmod on dir unsupported on Windows.') |
| 1504 @SequentialAndParallelTransfer |
1446 def test_cp_unwritable_tracker_file_download(self): | 1505 def test_cp_unwritable_tracker_file_download(self): |
1447 """Tests downloads with an unwritable tracker file.""" | 1506 """Tests downloads with an unwritable tracker file.""" |
1448 object_uri = self.CreateObject(contents='foo' * ONE_KIB) | 1507 object_uri = self.CreateObject(contents='foo' * ONE_KIB) |
1449 tracker_filename = GetTrackerFilePath( | 1508 tracker_filename = GetTrackerFilePath( |
1450 StorageUrlFromString(suri(object_uri)), | 1509 StorageUrlFromString(suri(object_uri)), |
1451 TrackerFileType.DOWNLOAD, self.test_api) | 1510 TrackerFileType.DOWNLOAD, self.test_api) |
1452 tracker_dir = os.path.dirname(tracker_filename) | 1511 tracker_dir = os.path.dirname(tracker_filename) |
1453 fpath = self.CreateTempFile() | 1512 fpath = self.CreateTempFile() |
1454 save_mod = os.stat(tracker_dir).st_mode | 1513 save_mod = os.stat(tracker_dir).st_mode |
1455 | 1514 |
(...skipping 28 matching lines...) Expand all Loading... |
1484 suri(object_uri), fpath], | 1543 suri(object_uri), fpath], |
1485 expected_status=1, return_stderr=True) | 1544 expected_status=1, return_stderr=True) |
1486 self.assertIn('Artifically halting download.', stderr) | 1545 self.assertIn('Artifically halting download.', stderr) |
1487 tracker_filename = GetTrackerFilePath( | 1546 tracker_filename = GetTrackerFilePath( |
1488 StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api) | 1547 StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api) |
1489 self.assertTrue(os.path.isfile(tracker_filename)) | 1548 self.assertTrue(os.path.isfile(tracker_filename)) |
1490 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], | 1549 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
1491 return_stderr=True) | 1550 return_stderr=True) |
1492 self.assertIn('Resuming download', stderr) | 1551 self.assertIn('Resuming download', stderr) |
1493 | 1552 |
| 1553 @SequentialAndParallelTransfer |
1494 def test_cp_resumable_download_etag_differs(self): | 1554 def test_cp_resumable_download_etag_differs(self): |
1495 """Tests that download restarts the file when the source object changes. | 1555 """Tests that download restarts the file when the source object changes. |
1496 | 1556 |
1497 This causes the etag not to match. | 1557 This causes the etag not to match. |
1498 """ | 1558 """ |
1499 bucket_uri = self.CreateBucket() | 1559 bucket_uri = self.CreateBucket() |
1500 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', | 1560 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
1501 contents='a' * self.halt_size) | 1561 contents='abc' * self.halt_size) |
1502 fpath = self.CreateTempFile() | 1562 fpath = self.CreateTempFile() |
1503 test_callback_file = self.CreateTempFile( | 1563 test_callback_file = self.CreateTempFile( |
1504 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) | 1564 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
1505 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 1565 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1506 with SetBotoConfigForTest([boto_config_for_test]): | 1566 with SetBotoConfigForTest([boto_config_for_test]): |
1507 # This will create a tracker file with an ETag. | 1567 # This will create a tracker file with an ETag. |
1508 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, | 1568 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
1509 suri(object_uri), fpath], | 1569 suri(object_uri), fpath], |
1510 expected_status=1, return_stderr=True) | 1570 expected_status=1, return_stderr=True) |
1511 self.assertIn('Artifically halting download.', stderr) | 1571 self.assertIn('Artifically halting download.', stderr) |
1512 # Create a new object with different contents - it should have a | 1572 # Create a new object with different contents - it should have a |
1513 # different ETag since the content has changed. | 1573 # different ETag since the content has changed. |
1514 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', | 1574 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
1515 contents='b' * self.halt_size) | 1575 contents='b' * self.halt_size) |
1516 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], | 1576 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
1517 return_stderr=True) | 1577 return_stderr=True) |
1518 self.assertNotIn('Resuming download', stderr) | 1578 self.assertNotIn('Resuming download', stderr) |
1519 | 1579 |
| 1580 # TODO: Enable this test for sequential downloads when their tracker files are |
| 1581 # modified to contain the source object generation. |
| 1582 @unittest.skipUnless(UsingCrcmodExtension(crcmod), |
| 1583 'Sliced download requires fast crcmod.') |
| 1584 @SkipForS3('No sliced download support for S3.') |
| 1585 def test_cp_resumable_download_generation_differs(self): |
| 1586 """Tests that a resumable download restarts if the generation differs.""" |
| 1587 bucket_uri = self.CreateBucket() |
| 1588 file_contents = 'abcd' * self.halt_size |
| 1589 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 1590 contents=file_contents) |
| 1591 fpath = self.CreateTempFile() |
| 1592 |
| 1593 test_callback_file = self.CreateTempFile( |
| 1594 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
| 1595 |
| 1596 boto_config_for_test = [ |
| 1597 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 1598 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 1599 ('GSUtil', 'sliced_object_download_max_components', '3')] |
| 1600 |
| 1601 with SetBotoConfigForTest(boto_config_for_test): |
| 1602 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 1603 suri(object_uri), suri(fpath)], |
| 1604 return_stderr=True, expected_status=1) |
| 1605 self.assertIn('Artifically halting download.', stderr) |
| 1606 |
| 1607 # Overwrite the object with an identical object, increasing |
| 1608 # the generation but leaving other metadata the same. |
| 1609 identical_file = self.CreateTempFile(contents=file_contents) |
| 1610 self.RunGsUtil(['cp', suri(identical_file), suri(object_uri)]) |
| 1611 |
| 1612 stderr = self.RunGsUtil(['cp', suri(object_uri), suri(fpath)], |
| 1613 return_stderr=True) |
| 1614 self.assertIn('Restarting download from scratch', stderr) |
| 1615 with open(fpath, 'r') as f: |
| 1616 self.assertEqual(f.read(), file_contents, 'File contents differ') |
| 1617 |
1520 def test_cp_resumable_download_file_larger(self): | 1618 def test_cp_resumable_download_file_larger(self): |
1521 """Tests download deletes the tracker file when existing file is larger.""" | 1619 """Tests download deletes the tracker file when existing file is larger.""" |
1522 bucket_uri = self.CreateBucket() | 1620 bucket_uri = self.CreateBucket() |
1523 fpath = self.CreateTempFile() | 1621 fpath = self.CreateTempFile() |
1524 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', | 1622 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
1525 contents='a' * self.halt_size) | 1623 contents='a' * self.halt_size) |
1526 test_callback_file = self.CreateTempFile( | 1624 test_callback_file = self.CreateTempFile( |
1527 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) | 1625 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
1528 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 1626 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1529 with SetBotoConfigForTest([boto_config_for_test]): | 1627 with SetBotoConfigForTest([boto_config_for_test]): |
1530 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, | 1628 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
1531 suri(object_uri), fpath], | 1629 suri(object_uri), fpath], |
1532 expected_status=1, return_stderr=True) | 1630 expected_status=1, return_stderr=True) |
1533 self.assertIn('Artifically halting download.', stderr) | 1631 self.assertIn('Artifically halting download.', stderr) |
1534 with open(fpath, 'w') as larger_file: | 1632 with open(fpath + '_.gstmp', 'w') as larger_file: |
1535 for _ in range(self.halt_size * 2): | 1633 for _ in range(self.halt_size * 2): |
1536 larger_file.write('a') | 1634 larger_file.write('a') |
1537 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], | 1635 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
1538 expected_status=1, return_stderr=True) | 1636 expected_status=1, return_stderr=True) |
1539 self.assertNotIn('Resuming download', stderr) | 1637 self.assertNotIn('Resuming download', stderr) |
1540 self.assertIn('is larger', stderr) | |
1541 self.assertIn('Deleting tracker file', stderr) | 1638 self.assertIn('Deleting tracker file', stderr) |
1542 | 1639 |
1543 def test_cp_resumable_download_content_differs(self): | 1640 def test_cp_resumable_download_content_differs(self): |
1544 """Tests that we do not re-download when tracker file matches existing file. | 1641 """Tests that we do not re-download when tracker file matches existing file. |
1545 | 1642 |
1546 We only compare size, not contents, so re-download should not occur even | 1643 We only compare size, not contents, so re-download should not occur even |
1547 though the contents are technically different. However, hash validation on | 1644 though the contents are technically different. However, hash validation on |
1548 the file should still occur and we will delete the file then because | 1645 the file should still occur and we will delete the file then because |
1549 the hashes differ. | 1646 the hashes differ. |
1550 """ | 1647 """ |
1551 bucket_uri = self.CreateBucket() | 1648 bucket_uri = self.CreateBucket() |
1552 tmp_dir = self.CreateTempDir() | 1649 tmp_dir = self.CreateTempDir() |
1553 fpath = self.CreateTempFile(tmpdir=tmp_dir, contents='abcd' * ONE_KIB) | 1650 fpath = self.CreateTempFile(tmpdir=tmp_dir) |
| 1651 temp_download_file = fpath + '_.gstmp' |
| 1652 with open(temp_download_file, 'w') as fp: |
| 1653 fp.write('abcd' * ONE_KIB) |
| 1654 |
1554 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', | 1655 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
1555 contents='efgh' * ONE_KIB) | 1656 contents='efgh' * ONE_KIB) |
1556 stdout = self.RunGsUtil(['ls', '-L', suri(object_uri)], return_stdout=True) | 1657 stdout = self.RunGsUtil(['ls', '-L', suri(object_uri)], return_stdout=True) |
1557 etag_match = re.search(r'\s*ETag:\s*(.*)', stdout) | 1658 etag_match = re.search(r'\s*ETag:\s*(.*)', stdout) |
1558 self.assertIsNotNone(etag_match, 'Could not get object ETag') | 1659 self.assertIsNotNone(etag_match, 'Could not get object ETag') |
1559 self.assertEqual(len(etag_match.groups()), 1, | 1660 self.assertEqual(len(etag_match.groups()), 1, |
1560 'Did not match expected single ETag') | 1661 'Did not match expected single ETag') |
1561 etag = etag_match.group(1) | 1662 etag = etag_match.group(1) |
1562 | 1663 |
1563 tracker_filename = GetTrackerFilePath( | 1664 tracker_filename = GetTrackerFilePath( |
1564 StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api) | 1665 StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api) |
1565 try: | 1666 try: |
1566 with open(tracker_filename, 'w') as tracker_fp: | 1667 with open(tracker_filename, 'w') as tracker_fp: |
1567 tracker_fp.write(etag) | 1668 tracker_fp.write(etag) |
1568 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 1669 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1569 with SetBotoConfigForTest([boto_config_for_test]): | 1670 with SetBotoConfigForTest([boto_config_for_test]): |
1570 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], | 1671 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
1571 return_stderr=True, expected_status=1) | 1672 return_stderr=True, expected_status=1) |
1572 self.assertIn('Download already complete for file', stderr) | 1673 self.assertIn('Download already complete', stderr) |
1573 self.assertIn('doesn\'t match cloud-supplied digest', stderr) | 1674 self.assertIn('doesn\'t match cloud-supplied digest', stderr) |
1574 # File and tracker file should be deleted. | 1675 # File and tracker file should be deleted. |
| 1676 self.assertFalse(os.path.isfile(temp_download_file)) |
| 1677 self.assertFalse(os.path.isfile(tracker_filename)) |
| 1678 # Permanent file should not have been created. |
1575 self.assertFalse(os.path.isfile(fpath)) | 1679 self.assertFalse(os.path.isfile(fpath)) |
1576 self.assertFalse(os.path.isfile(tracker_filename)) | |
1577 finally: | 1680 finally: |
1578 if os.path.exists(tracker_filename): | 1681 if os.path.exists(tracker_filename): |
1579 os.unlink(tracker_filename) | 1682 os.unlink(tracker_filename) |
1580 | 1683 |
1581 def test_cp_resumable_download_content_matches(self): | 1684 def test_cp_resumable_download_content_matches(self): |
1582 """Tests download no-ops when tracker file matches existing file.""" | 1685 """Tests download no-ops when tracker file matches existing file.""" |
1583 bucket_uri = self.CreateBucket() | 1686 bucket_uri = self.CreateBucket() |
1584 tmp_dir = self.CreateTempDir() | 1687 tmp_dir = self.CreateTempDir() |
| 1688 fpath = self.CreateTempFile(tmpdir=tmp_dir) |
1585 matching_contents = 'abcd' * ONE_KIB | 1689 matching_contents = 'abcd' * ONE_KIB |
1586 fpath = self.CreateTempFile(tmpdir=tmp_dir, contents=matching_contents) | 1690 temp_download_file = fpath + '_.gstmp' |
| 1691 with open(temp_download_file, 'w') as fp: |
| 1692 fp.write(matching_contents) |
| 1693 |
1587 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', | 1694 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
1588 contents=matching_contents) | 1695 contents=matching_contents) |
1589 stdout = self.RunGsUtil(['ls', '-L', suri(object_uri)], return_stdout=True) | 1696 stdout = self.RunGsUtil(['ls', '-L', suri(object_uri)], return_stdout=True) |
1590 etag_match = re.search(r'\s*ETag:\s*(.*)', stdout) | 1697 etag_match = re.search(r'\s*ETag:\s*(.*)', stdout) |
1591 self.assertIsNotNone(etag_match, 'Could not get object ETag') | 1698 self.assertIsNotNone(etag_match, 'Could not get object ETag') |
1592 self.assertEqual(len(etag_match.groups()), 1, | 1699 self.assertEqual(len(etag_match.groups()), 1, |
1593 'Did not match expected single ETag') | 1700 'Did not match expected single ETag') |
1594 etag = etag_match.group(1) | 1701 etag = etag_match.group(1) |
1595 tracker_filename = GetTrackerFilePath( | 1702 tracker_filename = GetTrackerFilePath( |
1596 StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api) | 1703 StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api) |
1597 with open(tracker_filename, 'w') as tracker_fp: | 1704 with open(tracker_filename, 'w') as tracker_fp: |
1598 tracker_fp.write(etag) | 1705 tracker_fp.write(etag) |
1599 try: | 1706 try: |
1600 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 1707 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1601 with SetBotoConfigForTest([boto_config_for_test]): | 1708 with SetBotoConfigForTest([boto_config_for_test]): |
1602 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], | 1709 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
1603 return_stderr=True) | 1710 return_stderr=True) |
1604 self.assertIn('Download already complete for file', stderr) | 1711 self.assertIn('Download already complete', stderr) |
1605 # Tracker file should be removed after successful hash validation. | 1712 # Tracker file should be removed after successful hash validation. |
1606 self.assertFalse(os.path.isfile(tracker_filename)) | 1713 self.assertFalse(os.path.isfile(tracker_filename)) |
1607 finally: | 1714 finally: |
1608 if os.path.exists(tracker_filename): | 1715 if os.path.exists(tracker_filename): |
1609 os.unlink(tracker_filename) | 1716 os.unlink(tracker_filename) |
1610 | 1717 |
1611 def test_cp_resumable_download_tracker_file_not_matches(self): | 1718 def test_cp_resumable_download_tracker_file_not_matches(self): |
1612 """Tests that download overwrites when tracker file etag does not match.""" | 1719 """Tests that download overwrites when tracker file etag does not match.""" |
1613 bucket_uri = self.CreateBucket() | 1720 bucket_uri = self.CreateBucket() |
1614 tmp_dir = self.CreateTempDir() | 1721 tmp_dir = self.CreateTempDir() |
(...skipping 21 matching lines...) Expand all Loading... |
1636 with open(fpath, 'r') as in_fp: | 1743 with open(fpath, 'r') as in_fp: |
1637 contents = in_fp.read() | 1744 contents = in_fp.read() |
1638 self.assertEqual(contents, 'efgh' * ONE_KIB, | 1745 self.assertEqual(contents, 'efgh' * ONE_KIB, |
1639 'File not overwritten when it should have been ' | 1746 'File not overwritten when it should have been ' |
1640 'due to a non-matching tracker file.') | 1747 'due to a non-matching tracker file.') |
1641 self.assertFalse(os.path.isfile(tracker_filename)) | 1748 self.assertFalse(os.path.isfile(tracker_filename)) |
1642 finally: | 1749 finally: |
1643 if os.path.exists(tracker_filename): | 1750 if os.path.exists(tracker_filename): |
1644 os.unlink(tracker_filename) | 1751 os.unlink(tracker_filename) |
1645 | 1752 |
| 1753 @SequentialAndParallelTransfer |
1646 def test_cp_resumable_download_gzip(self): | 1754 def test_cp_resumable_download_gzip(self): |
1647 """Tests that download can be resumed successfully with a gzipped file.""" | 1755 """Tests that download can be resumed successfully with a gzipped file.""" |
1648 # Generate some reasonably incompressible data. This compresses to a bit | 1756 # Generate some reasonably incompressible data. This compresses to a bit |
1649 # around 128K in practice, but we assert specifically below that it is | 1757 # around 128K in practice, but we assert specifically below that it is |
1650 # larger than self.halt_size to guarantee that we can halt the download | 1758 # larger than self.halt_size to guarantee that we can halt the download |
1651 # partway through. | 1759 # partway through. |
1652 object_uri = self.CreateObject() | 1760 object_uri = self.CreateObject() |
1653 random.seed(0) | 1761 random.seed(0) |
1654 contents = str([random.choice(string.ascii_letters) | 1762 contents = str([random.choice(string.ascii_letters) |
1655 for _ in xrange(ONE_KIB * 128)]) | 1763 for _ in xrange(ONE_KIB * 128)]) |
(...skipping 20 matching lines...) Expand all Loading... |
1676 fpath2 = self.CreateTempFile() | 1784 fpath2 = self.CreateTempFile() |
1677 test_callback_file = self.CreateTempFile( | 1785 test_callback_file = self.CreateTempFile( |
1678 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) | 1786 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
1679 | 1787 |
1680 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 1788 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1681 with SetBotoConfigForTest([boto_config_for_test]): | 1789 with SetBotoConfigForTest([boto_config_for_test]): |
1682 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, | 1790 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
1683 suri(object_uri), suri(fpath2)], | 1791 suri(object_uri), suri(fpath2)], |
1684 return_stderr=True, expected_status=1) | 1792 return_stderr=True, expected_status=1) |
1685 self.assertIn('Artifically halting download.', stderr) | 1793 self.assertIn('Artifically halting download.', stderr) |
| 1794 self.assertIn('Downloading to temp gzip filename', stderr) |
| 1795 |
| 1796 # Tracker files will have different names depending on if we are |
| 1797 # downloading sequentially or in parallel. |
| 1798 sliced_download_threshold = HumanReadableToBytes( |
| 1799 boto.config.get('GSUtil', 'sliced_object_download_threshold', |
| 1800 DEFAULT_SLICED_OBJECT_DOWNLOAD_THRESHOLD)) |
| 1801 sliced_download = (len(contents) > sliced_download_threshold |
| 1802 and sliced_download_threshold > 0 |
| 1803 and UsingCrcmodExtension(crcmod)) |
| 1804 if sliced_download: |
| 1805 trackerfile_type = TrackerFileType.SLICED_DOWNLOAD |
| 1806 else: |
| 1807 trackerfile_type = TrackerFileType.DOWNLOAD |
1686 tracker_filename = GetTrackerFilePath( | 1808 tracker_filename = GetTrackerFilePath( |
1687 StorageUrlFromString(fpath2), TrackerFileType.DOWNLOAD, self.test_api) | 1809 StorageUrlFromString(fpath2), trackerfile_type, self.test_api) |
1688 self.assertTrue(os.path.isfile(tracker_filename)) | 1810 |
1689 self.assertIn('Downloading to temp gzip filename', stderr) | |
1690 # We should have a temporary gzipped file, a tracker file, and no | 1811 # We should have a temporary gzipped file, a tracker file, and no |
1691 # final file yet. | 1812 # final file yet. |
| 1813 self.assertTrue(os.path.isfile(tracker_filename)) |
1692 self.assertTrue(os.path.isfile('%s_.gztmp' % fpath2)) | 1814 self.assertTrue(os.path.isfile('%s_.gztmp' % fpath2)) |
1693 stderr = self.RunGsUtil(['cp', suri(object_uri), suri(fpath2)], | 1815 stderr = self.RunGsUtil(['cp', suri(object_uri), suri(fpath2)], |
1694 return_stderr=True) | 1816 return_stderr=True) |
1695 self.assertIn('Resuming download', stderr) | 1817 self.assertIn('Resuming download', stderr) |
1696 with open(fpath2, 'r') as f: | 1818 with open(fpath2, 'r') as f: |
1697 self.assertEqual(f.read(), contents, 'File contents did not match.') | 1819 self.assertEqual(f.read(), contents, 'File contents did not match.') |
1698 self.assertFalse(os.path.isfile(tracker_filename)) | 1820 self.assertFalse(os.path.isfile(tracker_filename)) |
1699 self.assertFalse(os.path.isfile('%s_.gztmp' % fpath2)) | 1821 self.assertFalse(os.path.isfile('%s_.gztmp' % fpath2)) |
1700 | 1822 |
| 1823 @SequentialAndParallelTransfer |
| 1824 def test_cp_resumable_download_check_hashes_never(self): |
| 1825 """Tests that resumble downloads work with check_hashes = never.""" |
| 1826 bucket_uri = self.CreateBucket() |
| 1827 contents = 'abcd' * self.halt_size |
| 1828 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 1829 contents=contents) |
| 1830 fpath = self.CreateTempFile() |
| 1831 test_callback_file = self.CreateTempFile( |
| 1832 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
| 1833 |
| 1834 boto_config_for_test = [('GSUtil', 'resumable_threshold', str(ONE_KIB)), |
| 1835 ('GSUtil', 'check_hashes', 'never')] |
| 1836 with SetBotoConfigForTest(boto_config_for_test): |
| 1837 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 1838 suri(object_uri), fpath], |
| 1839 expected_status=1, return_stderr=True) |
| 1840 self.assertIn('Artifically halting download.', stderr) |
| 1841 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
| 1842 return_stderr=True) |
| 1843 self.assertIn('Resuming download', stderr) |
| 1844 self.assertIn('Found no hashes to validate object downloaded', stderr) |
| 1845 with open(fpath, 'r') as f: |
| 1846 self.assertEqual(f.read(), contents, 'File contents did not match.') |
| 1847 |
1701 @SkipForS3('No resumable upload support for S3.') | 1848 @SkipForS3('No resumable upload support for S3.') |
1702 def test_cp_resumable_upload_bucket_deleted(self): | 1849 def test_cp_resumable_upload_bucket_deleted(self): |
1703 """Tests that a not found exception is raised if bucket no longer exists.""" | 1850 """Tests that a not found exception is raised if bucket no longer exists.""" |
1704 bucket_uri = self.CreateBucket() | 1851 bucket_uri = self.CreateBucket() |
1705 fpath = self.CreateTempFile(contents='a' * 2 * ONE_KIB) | 1852 fpath = self.CreateTempFile(contents='a' * 2 * ONE_KIB) |
1706 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 1853 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1707 test_callback_file = self.CreateTempFile( | 1854 test_callback_file = self.CreateTempFile( |
1708 contents=pickle.dumps( | 1855 contents=pickle.dumps( |
1709 _DeleteBucketThenStartOverCopyCallbackHandler(5, bucket_uri))) | 1856 _DeleteBucketThenStartOverCopyCallbackHandler(5, bucket_uri))) |
1710 | 1857 |
1711 with SetBotoConfigForTest([boto_config_for_test]): | 1858 with SetBotoConfigForTest([boto_config_for_test]): |
1712 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, | 1859 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
1713 fpath, suri(bucket_uri)], return_stderr=True, | 1860 fpath, suri(bucket_uri)], return_stderr=True, |
1714 expected_status=1) | 1861 expected_status=1) |
1715 self.assertIn('Deleting bucket', stderr) | 1862 self.assertIn('Deleting bucket', stderr) |
1716 self.assertIn('bucket does not exist', stderr) | 1863 self.assertIn('bucket does not exist', stderr) |
1717 | 1864 |
| 1865 @SkipForS3('No sliced download support for S3.') |
| 1866 def test_cp_sliced_download(self): |
| 1867 """Tests that sliced object download works in the general case.""" |
| 1868 bucket_uri = self.CreateBucket() |
| 1869 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 1870 contents='abc' * ONE_KIB) |
| 1871 fpath = self.CreateTempFile() |
| 1872 |
| 1873 # Force fast crcmod to return True to test the basic sliced download |
| 1874 # scenario, ensuring that if the user installs crcmod, it will work. |
| 1875 boto_config_for_test = [ |
| 1876 ('GSUtil', 'resumable_threshold', str(ONE_KIB)), |
| 1877 ('GSUtil', 'test_assume_fast_crcmod', 'True'), |
| 1878 ('GSUtil', 'sliced_object_download_threshold', str(ONE_KIB)), |
| 1879 ('GSUtil', 'sliced_object_download_max_components', '3')] |
| 1880 |
| 1881 with SetBotoConfigForTest(boto_config_for_test): |
| 1882 self.RunGsUtil(['cp', suri(object_uri), fpath]) |
| 1883 |
| 1884 # Each tracker file should have been deleted. |
| 1885 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 1886 StorageUrlFromString(fpath), self.test_api) |
| 1887 for tracker_filename in tracker_filenames: |
| 1888 self.assertFalse(os.path.isfile(tracker_filename)) |
| 1889 |
| 1890 with open(fpath, 'r') as f: |
| 1891 self.assertEqual(f.read(), 'abc' * ONE_KIB, 'File contents differ') |
| 1892 |
| 1893 @unittest.skipUnless(UsingCrcmodExtension(crcmod), |
| 1894 'Sliced download requires fast crcmod.') |
| 1895 @SkipForS3('No sliced download support for S3.') |
| 1896 def test_cp_unresumable_sliced_download(self): |
| 1897 """Tests sliced download works when resumability is disabled.""" |
| 1898 bucket_uri = self.CreateBucket() |
| 1899 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 1900 contents='abcd' * self.halt_size) |
| 1901 fpath = self.CreateTempFile() |
| 1902 test_callback_file = self.CreateTempFile( |
| 1903 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
| 1904 |
| 1905 boto_config_for_test = [ |
| 1906 ('GSUtil', 'resumable_threshold', str(self.halt_size*5)), |
| 1907 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 1908 ('GSUtil', 'sliced_object_download_max_components', '4')] |
| 1909 |
| 1910 with SetBotoConfigForTest(boto_config_for_test): |
| 1911 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 1912 suri(object_uri), suri(fpath)], |
| 1913 return_stderr=True, expected_status=1) |
| 1914 self.assertIn('not downloaded successfully', stderr) |
| 1915 # Temporary download file should exist. |
| 1916 self.assertTrue(os.path.isfile(fpath + '_.gstmp')) |
| 1917 |
| 1918 # No tracker files should exist. |
| 1919 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 1920 StorageUrlFromString(fpath), self.test_api) |
| 1921 for tracker_filename in tracker_filenames: |
| 1922 self.assertFalse(os.path.isfile(tracker_filename)) |
| 1923 |
| 1924 # Perform the entire download, without resuming. |
| 1925 with SetBotoConfigForTest(boto_config_for_test): |
| 1926 stderr = self.RunGsUtil(['cp', suri(object_uri), suri(fpath)], |
| 1927 return_stderr=True) |
| 1928 self.assertNotIn('Resuming download', stderr) |
| 1929 # Temporary download file should have been deleted. |
| 1930 self.assertFalse(os.path.isfile(fpath + '_.gstmp')) |
| 1931 with open(fpath, 'r') as f: |
| 1932 self.assertEqual(f.read(), 'abcd' * self.halt_size, |
| 1933 'File contents differ') |
| 1934 |
| 1935 @unittest.skipUnless(UsingCrcmodExtension(crcmod), |
| 1936 'Sliced download requires fast crcmod.') |
| 1937 @SkipForS3('No sliced download support for S3.') |
| 1938 def test_cp_sliced_download_resume(self): |
| 1939 """Tests that sliced object download is resumable.""" |
| 1940 bucket_uri = self.CreateBucket() |
| 1941 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 1942 contents='abc' * self.halt_size) |
| 1943 fpath = self.CreateTempFile() |
| 1944 test_callback_file = self.CreateTempFile( |
| 1945 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
| 1946 |
| 1947 boto_config_for_test = [ |
| 1948 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 1949 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 1950 ('GSUtil', 'sliced_object_download_max_components', '3')] |
| 1951 |
| 1952 with SetBotoConfigForTest(boto_config_for_test): |
| 1953 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 1954 suri(object_uri), suri(fpath)], |
| 1955 return_stderr=True, expected_status=1) |
| 1956 self.assertIn('not downloaded successfully', stderr) |
| 1957 |
| 1958 # Each tracker file should exist. |
| 1959 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 1960 StorageUrlFromString(fpath), self.test_api) |
| 1961 for tracker_filename in tracker_filenames: |
| 1962 self.assertTrue(os.path.isfile(tracker_filename)) |
| 1963 |
| 1964 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
| 1965 return_stderr=True) |
| 1966 self.assertIn('Resuming download', stderr) |
| 1967 |
| 1968 # Each tracker file should have been deleted. |
| 1969 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 1970 StorageUrlFromString(fpath), self.test_api) |
| 1971 for tracker_filename in tracker_filenames: |
| 1972 self.assertFalse(os.path.isfile(tracker_filename)) |
| 1973 |
| 1974 with open(fpath, 'r') as f: |
| 1975 self.assertEqual(f.read(), 'abc' * self.halt_size, |
| 1976 'File contents differ') |
| 1977 |
| 1978 @unittest.skipUnless(UsingCrcmodExtension(crcmod), |
| 1979 'Sliced download requires fast crcmod.') |
| 1980 @SkipForS3('No sliced download support for S3.') |
| 1981 def test_cp_sliced_download_partial_resume(self): |
| 1982 """Test sliced download resumability when some components are finished.""" |
| 1983 bucket_uri = self.CreateBucket() |
| 1984 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 1985 contents='abc' * self.halt_size) |
| 1986 fpath = self.CreateTempFile() |
| 1987 test_callback_file = self.CreateTempFile( |
| 1988 contents=pickle.dumps(_HaltOneComponentCopyCallbackHandler(5))) |
| 1989 |
| 1990 boto_config_for_test = [ |
| 1991 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 1992 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 1993 ('GSUtil', 'sliced_object_download_max_components', '3')] |
| 1994 |
| 1995 with SetBotoConfigForTest(boto_config_for_test): |
| 1996 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 1997 suri(object_uri), suri(fpath)], |
| 1998 return_stderr=True, expected_status=1) |
| 1999 self.assertIn('not downloaded successfully', stderr) |
| 2000 |
| 2001 # Each tracker file should exist. |
| 2002 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 2003 StorageUrlFromString(fpath), self.test_api) |
| 2004 for tracker_filename in tracker_filenames: |
| 2005 self.assertTrue(os.path.isfile(tracker_filename)) |
| 2006 |
| 2007 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
| 2008 return_stderr=True) |
| 2009 self.assertIn('Resuming download', stderr) |
| 2010 self.assertIn('Download already complete', stderr) |
| 2011 |
| 2012 # Each tracker file should have been deleted. |
| 2013 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 2014 StorageUrlFromString(fpath), self.test_api) |
| 2015 for tracker_filename in tracker_filenames: |
| 2016 self.assertFalse(os.path.isfile(tracker_filename)) |
| 2017 |
| 2018 with open(fpath, 'r') as f: |
| 2019 self.assertEqual(f.read(), 'abc' * self.halt_size, |
| 2020 'File contents differ') |
| 2021 |
| 2022 @unittest.skipUnless(UsingCrcmodExtension(crcmod), |
| 2023 'Sliced download requires fast crcmod.') |
| 2024 @SkipForS3('No sliced download support for S3.') |
| 2025 def test_cp_sliced_download_resume_content_differs(self): |
| 2026 """Tests differing file contents are detected by sliced downloads.""" |
| 2027 bucket_uri = self.CreateBucket() |
| 2028 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 2029 contents='abc' * self.halt_size) |
| 2030 fpath = self.CreateTempFile(contents='') |
| 2031 test_callback_file = self.CreateTempFile( |
| 2032 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
| 2033 |
| 2034 boto_config_for_test = [ |
| 2035 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 2036 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 2037 ('GSUtil', 'sliced_object_download_max_components', '3')] |
| 2038 |
| 2039 with SetBotoConfigForTest(boto_config_for_test): |
| 2040 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 2041 suri(object_uri), suri(fpath)], |
| 2042 return_stderr=True, expected_status=1) |
| 2043 self.assertIn('not downloaded successfully', stderr) |
| 2044 |
| 2045 # Temporary download file should exist. |
| 2046 self.assertTrue(os.path.isfile(fpath + '_.gstmp')) |
| 2047 |
| 2048 # Each tracker file should exist. |
| 2049 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 2050 StorageUrlFromString(fpath), self.test_api) |
| 2051 for tracker_filename in tracker_filenames: |
| 2052 self.assertTrue(os.path.isfile(tracker_filename)) |
| 2053 |
| 2054 with open(fpath + '_.gstmp', 'r+b') as f: |
| 2055 f.write('altered file contents') |
| 2056 |
| 2057 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
| 2058 return_stderr=True, expected_status=1) |
| 2059 self.assertIn('Resuming download', stderr) |
| 2060 self.assertIn('doesn\'t match cloud-supplied digest', stderr) |
| 2061 self.assertIn('HashMismatchException: crc32c', stderr) |
| 2062 |
| 2063 # Each tracker file should have been deleted. |
| 2064 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 2065 StorageUrlFromString(fpath), self.test_api) |
| 2066 for tracker_filename in tracker_filenames: |
| 2067 self.assertFalse(os.path.isfile(tracker_filename)) |
| 2068 |
| 2069 # Temporary file should have been deleted due to hash mismatch. |
| 2070 self.assertFalse(os.path.isfile(fpath + '_.gstmp')) |
| 2071 # Final file should not exist. |
| 2072 self.assertFalse(os.path.isfile(fpath)) |
| 2073 |
| 2074 @unittest.skipUnless(UsingCrcmodExtension(crcmod), |
| 2075 'Sliced download requires fast crcmod.') |
| 2076 @SkipForS3('No sliced download support for S3.') |
| 2077 def test_cp_sliced_download_component_size_changed(self): |
| 2078 """Tests sliced download doesn't break when the boto config changes. |
| 2079 |
| 2080 If the number of components used changes cross-process, the download should |
| 2081 be restarted. |
| 2082 """ |
| 2083 bucket_uri = self.CreateBucket() |
| 2084 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 2085 contents='abcd' * self.halt_size) |
| 2086 fpath = self.CreateTempFile() |
| 2087 test_callback_file = self.CreateTempFile( |
| 2088 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
| 2089 |
| 2090 boto_config_for_test = [ |
| 2091 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 2092 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 2093 ('GSUtil', 'sliced_object_download_component_size', |
| 2094 str(self.halt_size//4)), |
| 2095 ('GSUtil', 'sliced_object_download_max_components', '4')] |
| 2096 |
| 2097 with SetBotoConfigForTest(boto_config_for_test): |
| 2098 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 2099 suri(object_uri), suri(fpath)], |
| 2100 return_stderr=True, expected_status=1) |
| 2101 self.assertIn('not downloaded successfully', stderr) |
| 2102 |
| 2103 boto_config_for_test = [ |
| 2104 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 2105 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 2106 ('GSUtil', 'sliced_object_download_component_size', |
| 2107 str(self.halt_size//2)), |
| 2108 ('GSUtil', 'sliced_object_download_max_components', '2')] |
| 2109 |
| 2110 with SetBotoConfigForTest(boto_config_for_test): |
| 2111 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
| 2112 return_stderr=True) |
| 2113 self.assertIn('Sliced download tracker file doesn\'t match ', stderr) |
| 2114 self.assertIn('Restarting download from scratch', stderr) |
| 2115 self.assertNotIn('Resuming download', stderr) |
| 2116 |
| 2117 @unittest.skipUnless(UsingCrcmodExtension(crcmod), |
| 2118 'Sliced download requires fast crcmod.') |
| 2119 @SkipForS3('No sliced download support for S3.') |
| 2120 def test_cp_sliced_download_disabled_cross_process(self): |
| 2121 """Tests temporary files are not orphaned if sliced download is disabled. |
| 2122 |
| 2123 Specifically, temporary files should be deleted when the corresponding |
| 2124 non-sliced download is completed. |
| 2125 """ |
| 2126 bucket_uri = self.CreateBucket() |
| 2127 object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', |
| 2128 contents='abcd' * self.halt_size) |
| 2129 fpath = self.CreateTempFile() |
| 2130 test_callback_file = self.CreateTempFile( |
| 2131 contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5))) |
| 2132 |
| 2133 boto_config_for_test = [ |
| 2134 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 2135 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size)), |
| 2136 ('GSUtil', 'sliced_object_download_max_components', '4')] |
| 2137 |
| 2138 with SetBotoConfigForTest(boto_config_for_test): |
| 2139 stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file, |
| 2140 suri(object_uri), suri(fpath)], |
| 2141 return_stderr=True, expected_status=1) |
| 2142 self.assertIn('not downloaded successfully', stderr) |
| 2143 # Temporary download file should exist. |
| 2144 self.assertTrue(os.path.isfile(fpath + '_.gstmp')) |
| 2145 |
| 2146 # Each tracker file should exist. |
| 2147 tracker_filenames = GetSlicedDownloadTrackerFilePaths( |
| 2148 StorageUrlFromString(fpath), self.test_api) |
| 2149 for tracker_filename in tracker_filenames: |
| 2150 self.assertTrue(os.path.isfile(tracker_filename)) |
| 2151 |
| 2152 # Disable sliced downloads by increasing the threshold |
| 2153 boto_config_for_test = [ |
| 2154 ('GSUtil', 'resumable_threshold', str(self.halt_size)), |
| 2155 ('GSUtil', 'sliced_object_download_threshold', str(self.halt_size*5)), |
| 2156 ('GSUtil', 'sliced_object_download_max_components', '4')] |
| 2157 |
| 2158 with SetBotoConfigForTest(boto_config_for_test): |
| 2159 stderr = self.RunGsUtil(['cp', suri(object_uri), fpath], |
| 2160 return_stderr=True) |
| 2161 self.assertNotIn('Resuming download', stderr) |
| 2162 # Temporary download file should have been deleted. |
| 2163 self.assertFalse(os.path.isfile(fpath + '_.gstmp')) |
| 2164 |
| 2165 # Each tracker file should have been deleted. |
| 2166 for tracker_filename in tracker_filenames: |
| 2167 self.assertFalse(os.path.isfile(tracker_filename)) |
| 2168 with open(fpath, 'r') as f: |
| 2169 self.assertEqual(f.read(), 'abcd' * self.halt_size) |
| 2170 |
1718 @SkipForS3('No resumable upload support for S3.') | 2171 @SkipForS3('No resumable upload support for S3.') |
1719 def test_cp_resumable_upload_start_over_http_error(self): | 2172 def test_cp_resumable_upload_start_over_http_error(self): |
1720 for start_over_error in (404, 410): | 2173 for start_over_error in (404, 410): |
1721 self.start_over_error_test_helper(start_over_error) | 2174 self.start_over_error_test_helper(start_over_error) |
1722 | 2175 |
1723 def start_over_error_test_helper(self, http_error_num): | 2176 def start_over_error_test_helper(self, http_error_num): |
1724 bucket_uri = self.CreateBucket() | 2177 bucket_uri = self.CreateBucket() |
1725 fpath = self.CreateTempFile(contents='a' * 2 * ONE_KIB) | 2178 fpath = self.CreateTempFile(contents='a' * 2 * ONE_KIB) |
1726 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) | 2179 boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB)) |
1727 if self.test_api == ApiSelector.JSON: | 2180 if self.test_api == ApiSelector.JSON: |
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2012 def test_cp_upload_respects_no_hashes(self): | 2465 def test_cp_upload_respects_no_hashes(self): |
2013 bucket_uri = self.CreateBucket() | 2466 bucket_uri = self.CreateBucket() |
2014 fpath = self.CreateTempFile(contents='abcd') | 2467 fpath = self.CreateTempFile(contents='abcd') |
2015 with SetBotoConfigForTest([('GSUtil', 'check_hashes', 'never')]): | 2468 with SetBotoConfigForTest([('GSUtil', 'check_hashes', 'never')]): |
2016 log_handler = self.RunCommand('cp', [fpath, suri(bucket_uri)], | 2469 log_handler = self.RunCommand('cp', [fpath, suri(bucket_uri)], |
2017 return_log_handler=True) | 2470 return_log_handler=True) |
2018 warning_messages = log_handler.messages['warning'] | 2471 warning_messages = log_handler.messages['warning'] |
2019 self.assertEquals(1, len(warning_messages)) | 2472 self.assertEquals(1, len(warning_messages)) |
2020 self.assertIn('Found no hashes to validate object upload', | 2473 self.assertIn('Found no hashes to validate object upload', |
2021 warning_messages[0]) | 2474 warning_messages[0]) |
OLD | NEW |