OLD | NEW |
1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import os | 5 import os |
6 import re | 6 import re |
7 import sys | 7 import sys |
8 | 8 |
9 | 9 |
10 def LoadSupport(input_api): | 10 def LoadSupport(input_api): |
(...skipping 21 matching lines...) Expand all Loading... |
32 for affected_file in input_api.AffectedFiles(include_deletes=False): | 32 for affected_file in input_api.AffectedFiles(include_deletes=False): |
33 hash_path = affected_file.AbsoluteLocalPath() | 33 hash_path = affected_file.AbsoluteLocalPath() |
34 _, extension = os.path.splitext(hash_path) | 34 _, extension = os.path.splitext(hash_path) |
35 if extension == '.sha1': | 35 if extension == '.sha1': |
36 hash_paths.append(hash_path) | 36 hash_paths.append(hash_path) |
37 if not hash_paths: | 37 if not hash_paths: |
38 return [] | 38 return [] |
39 | 39 |
40 cloud_storage = LoadSupport(input_api) | 40 cloud_storage = LoadSupport(input_api) |
41 | 41 |
42 # Look in both buckets, in case the user uploaded the file manually. But this | 42 # Look in all buckets, in case the user uploaded the file manually. But this |
43 # script focuses on WPR archives, so it only uploads to the internal bucket. | 43 # script focuses on WPR archives, so it only uploads to the internal bucket. |
44 hashes_in_cloud_storage = cloud_storage.List(cloud_storage.PUBLIC_BUCKET) | 44 hashes_in_cloud_storage = cloud_storage.List(cloud_storage.PUBLIC_BUCKET) |
45 try: | 45 try: |
| 46 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.PARTNER_BUCKET) |
46 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.INTERNAL_BUCKET) | 47 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.INTERNAL_BUCKET) |
47 except (cloud_storage.PermissionError, cloud_storage.CredentialsError): | 48 except (cloud_storage.PermissionError, cloud_storage.CredentialsError): |
48 pass | 49 pass |
49 | 50 |
50 files = [] | 51 files = [] |
51 for hash_path in hash_paths: | 52 for hash_path in hash_paths: |
52 file_hash = cloud_storage.ReadHash(hash_path) | 53 file_hash = cloud_storage.ReadHash(hash_path) |
53 if file_hash not in hashes_in_cloud_storage: | 54 if file_hash not in hashes_in_cloud_storage: |
54 files.append((hash_path, file_hash)) | 55 files.append((hash_path, file_hash)) |
55 | 56 |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
126 return input_api.AffectedFiles(file_filter=_IsNewJsonPageSet) | 127 return input_api.AffectedFiles(file_filter=_IsNewJsonPageSet) |
127 | 128 |
128 def CheckChangeOnUpload(input_api, output_api): | 129 def CheckChangeOnUpload(input_api, output_api): |
129 results = _SyncFilesToCloud(input_api, output_api) | 130 results = _SyncFilesToCloud(input_api, output_api) |
130 return results | 131 return results |
131 | 132 |
132 | 133 |
133 def CheckChangeOnCommit(input_api, output_api): | 134 def CheckChangeOnCommit(input_api, output_api): |
134 results = _VerifyFilesInCloud(input_api, output_api) | 135 results = _VerifyFilesInCloud(input_api, output_api) |
135 return results | 136 return results |
OLD | NEW |