Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(56)

Side by Side Diff: py/utils/gs_utils_manualtest.py

Issue 420553002: gs_utils: when uploading a whole dir using IF_NEW, check for existence of multiple files in a singl… (Closed) Base URL: https://skia.googlesource.com/common.git@master
Patch Set: check for existence of multiple files at once Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « py/utils/gs_utils.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 2
3 """Tests for gs_utils.py. 3 """Tests for gs_utils.py.
4 4
5 TODO(epoger): How should we exercise these self-tests? See http://skbug.com/2751 5 TODO(epoger): How should we exercise these self-tests? See http://skbug.com/2751
6 """ 6 """
7 7
8 # System-level imports. 8 # System-level imports.
9 import os 9 import os
10 import posixpath 10 import posixpath
(...skipping 27 matching lines...) Expand all
38 """Returns a unique directory name suitable for use in Google Storage.""" 38 """Returns a unique directory name suitable for use in Google Storage."""
39 return 'gs_utils_manualtest/%d' % random.randint(0, sys.maxint) 39 return 'gs_utils_manualtest/%d' % random.randint(0, sys.maxint)
40 40
41 41
42 def _test_public_read(): 42 def _test_public_read():
43 """Make sure we can read from public files without .boto file credentials.""" 43 """Make sure we can read from public files without .boto file credentials."""
44 gs = gs_utils.GSUtils() 44 gs = gs_utils.GSUtils()
45 gs.list_bucket_contents(bucket=TEST_BUCKET, subdir=None) 45 gs.list_bucket_contents(bucket=TEST_BUCKET, subdir=None)
46 46
47 47
48 def _test_upload_if(): 48 def _test_upload_if_one_file():
49 """Test upload_if param within upload_file().""" 49 """Test upload_if param within upload_file()."""
50 gs = _get_authenticated_gs_handle() 50 gs = _get_authenticated_gs_handle()
51 filename = 'filename' 51 filename = 'filename'
52 remote_dir = _get_unique_posix_dir() 52 remote_dir = _get_unique_posix_dir()
53 dest_path = posixpath.join(remote_dir, filename) 53 dest_path = posixpath.join(remote_dir, filename)
54 local_dir = tempfile.mkdtemp() 54 local_dir = tempfile.mkdtemp()
55 try: 55 try:
56 # Create a file on local disk, and upload it for the first time. 56 # Create a file on local disk, and upload it for the first time.
57 local_path = os.path.join(local_dir, filename) 57 local_path = os.path.join(local_dir, filename)
58 with open(local_path, 'w') as f: 58 with open(local_path, 'w') as f:
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 bucket=TEST_BUCKET, path=dest_path) 106 bucket=TEST_BUCKET, path=dest_path)
107 assert old_timestamp != new_timestamp, '%s != %s' % ( 107 assert old_timestamp != new_timestamp, '%s != %s' % (
108 old_timestamp, new_timestamp) 108 old_timestamp, new_timestamp)
109 finally: 109 finally:
110 # Clean up the remote_dir. 110 # Clean up the remote_dir.
111 gs.delete_file(bucket=TEST_BUCKET, path=dest_path) 111 gs.delete_file(bucket=TEST_BUCKET, path=dest_path)
112 finally: 112 finally:
113 # Clean up the local dir. 113 # Clean up the local dir.
114 shutil.rmtree(local_dir) 114 shutil.rmtree(local_dir)
115 115
116
117 def _test_upload_if_multiple_files():
118 """Test upload_if param within upload_dir_contents()."""
119 gs = _get_authenticated_gs_handle()
120 subdir = 'subdir'
121 filenames = ['file1', 'file2']
122 local_dir = tempfile.mkdtemp()
123 remote_dir = _get_unique_posix_dir()
124 sample_file_local_path = os.path.join(local_dir, subdir, filenames[0])
125 sample_file_remote_path = posixpath.join(remote_dir, subdir, filenames[0])
126 try:
127 # Create files on local disk, and upload them for the first time.
128 os.mkdir(os.path.join(local_dir, subdir))
129 for filename in filenames:
130 with open(os.path.join(local_dir, subdir, filename), 'w') as f:
131 f.write('original contents of %s' % filename)
132 gs.upload_dir_contents(
133 source_dir=local_dir, dest_bucket=TEST_BUCKET,
134 dest_dir=remote_dir, upload_if=gs.UploadIf.IF_NEW)
135 try:
136 # Re-upload the same files, with upload_if=gs.UploadIf.ALWAYS;
137 # the timestamps should change.
138 old_timestamp = gs.get_last_modified_time(
139 bucket=TEST_BUCKET, path=sample_file_remote_path)
140 time.sleep(2)
141 gs.upload_dir_contents(
142 source_dir=local_dir, dest_bucket=TEST_BUCKET,
143 dest_dir=remote_dir, upload_if=gs.UploadIf.ALWAYS)
144 new_timestamp = gs.get_last_modified_time(
145 bucket=TEST_BUCKET, path=sample_file_remote_path)
146 assert old_timestamp != new_timestamp, '%s != %s' % (
147 old_timestamp, new_timestamp)
148
149 # Re-upload the same files, with upload_if=gs.UploadIf.IF_MODIFIED;
150 # the timestamps should NOT change.
151 old_timestamp = new_timestamp
152 time.sleep(2)
153 gs.upload_dir_contents(
154 source_dir=local_dir, dest_bucket=TEST_BUCKET,
155 dest_dir=remote_dir, upload_if=gs.UploadIf.IF_MODIFIED)
156 new_timestamp = gs.get_last_modified_time(
157 bucket=TEST_BUCKET, path=sample_file_remote_path)
158 assert old_timestamp == new_timestamp, '%s == %s' % (
159 old_timestamp, new_timestamp)
160
161 # Modify and re-upload the files, with upload_if=gs.UploadIf.IF_NEW;
162 # the timestamps should still not change.
163 old_timestamp = new_timestamp
164 with open(sample_file_local_path, 'w') as f:
165 f.write('modified contents of sample file')
166 time.sleep(2)
167 gs.upload_dir_contents(
168 source_dir=local_dir, dest_bucket=TEST_BUCKET,
169 dest_dir=remote_dir, upload_if=gs.UploadIf.IF_NEW)
170 new_timestamp = gs.get_last_modified_time(
171 bucket=TEST_BUCKET, path=sample_file_remote_path)
172 assert old_timestamp == new_timestamp, '%s == %s' % (
173 old_timestamp, new_timestamp)
174
175 # Re-upload the modified file, with upload_if=gs.UploadIf.IF_MODIFIED;
176 # now the timestamp SHOULD change.
177 old_timestamp = new_timestamp
178 time.sleep(2)
179 gs.upload_dir_contents(
180 source_dir=local_dir, dest_bucket=TEST_BUCKET,
181 dest_dir=remote_dir, upload_if=gs.UploadIf.IF_MODIFIED)
182 new_timestamp = gs.get_last_modified_time(
183 bucket=TEST_BUCKET, path=sample_file_remote_path)
184 assert old_timestamp != new_timestamp, '%s != %s' % (
185 old_timestamp, new_timestamp)
186 finally:
187 # Delete all the files we uploaded to Google Storage.
188 for filename in filenames:
189 gs.delete_file(bucket=TEST_BUCKET,
190 path=posixpath.join(remote_dir, subdir, filename))
191 finally:
192 # Clean up the local dir.
193 shutil.rmtree(local_dir)
194
195
116 def _test_authenticated_round_trip(): 196 def _test_authenticated_round_trip():
117 gs = _get_authenticated_gs_handle() 197 gs = _get_authenticated_gs_handle()
118 remote_dir = _get_unique_posix_dir() 198 remote_dir = _get_unique_posix_dir()
119 subdir = 'subdir' 199 subdir = 'subdir'
120 filenames_to_upload = ['file1', 'file2'] 200 filenames_to_upload = ['file1', 'file2']
121 201
122 # Upload test files to Google Storage, checking that their fine-grained 202 # Upload test files to Google Storage, checking that their fine-grained
123 # ACLs were set correctly. 203 # ACLs were set correctly.
124 id_type = gs.IdType.GROUP_BY_DOMAIN 204 id_type = gs.IdType.GROUP_BY_DOMAIN
125 id_value = 'chromium.org' 205 id_value = 'chromium.org'
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
268 assert file_contents == 'contents of %s\n' % filename, ( 348 assert file_contents == 'contents of %s\n' % filename, (
269 '%s == "contents of %s\n"' % (file_contents, filename)) 349 '%s == "contents of %s\n"' % (file_contents, filename))
270 finally: 350 finally:
271 shutil.rmtree(local_dest_dir) 351 shutil.rmtree(local_dest_dir)
272 for filename in filenames: 352 for filename in filenames:
273 gs.delete_file(bucket=TEST_BUCKET, 353 gs.delete_file(bucket=TEST_BUCKET,
274 path=posixpath.join(remote_dir, subdir, filename)) 354 path=posixpath.join(remote_dir, subdir, filename))
275 355
276 356
277 if __name__ == '__main__': 357 if __name__ == '__main__':
278 _test_upload_if() 358 _test_upload_if_multiple_files()
359 _test_upload_if_one_file()
279 _test_public_read() 360 _test_public_read()
280 _test_authenticated_round_trip() 361 _test_authenticated_round_trip()
281 _test_dir_upload_and_download() 362 _test_dir_upload_and_download()
282 # TODO(epoger): Add _test_unauthenticated_access() to make sure we raise 363 # TODO(epoger): Add _test_unauthenticated_access() to make sure we raise
283 # an exception when we try to access without needed credentials. 364 # an exception when we try to access without needed credentials.
OLDNEW
« no previous file with comments | « py/utils/gs_utils.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698