Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(64)

Side by Side Diff: tools/perf/page_sets/PRESUBMIT.py

Issue 126093006: [telemetry] Allow public page set data and WPR archives. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | tools/perf/page_sets/data/test.json » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. 1 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import os 5 import os
6 import re 6 import re
7 import sys 7 import sys
8 8
9 9
10 def LoadSupport(input_api): 10 def LoadSupport(input_api):
11 if 'cloud_storage' not in globals(): 11 if 'cloud_storage' not in globals():
12 # Avoid leaking changes to global sys.path. 12 # Avoid leaking changes to global sys.path.
13 _old_sys_path = sys.path 13 _old_sys_path = sys.path
14 try: 14 try:
15 telemetry_path = os.path.join(os.path.dirname(os.path.dirname( 15 telemetry_path = os.path.join(os.path.dirname(os.path.dirname(
16 input_api.PresubmitLocalPath())), 'telemetry') 16 input_api.PresubmitLocalPath())), 'telemetry')
17 sys.path = [telemetry_path] + sys.path 17 sys.path = [telemetry_path] + sys.path
18 from telemetry.page import cloud_storage 18 from telemetry.page import cloud_storage
19 globals()['cloud_storage'] = cloud_storage 19 globals()['cloud_storage'] = cloud_storage
20 finally: 20 finally:
21 sys.path = _old_sys_path 21 sys.path = _old_sys_path
22 22
23 return globals()['cloud_storage'] 23 return globals()['cloud_storage']
24 24
25 25
26 def _SyncFilesToCloud(input_api, output_api): 26 def _GetFilesNotInCloud(input_api):
27 """Searches for .sha1 files and uploads them to Cloud Storage. 27 """Searches for .sha1 files and uploads them to Cloud Storage.
28 28
29 It validates all the hashes and skips upload if not necessary. 29 It validates all the hashes and skips upload if not necessary.
30 """ 30 """
31 31
32 cloud_storage = LoadSupport(input_api) 32 cloud_storage = LoadSupport(input_api)
33 33
34 # Look in both buckets, in case the user uploaded the file manually. But this 34 # Look in both buckets, in case the user uploaded the file manually. But this
35 # script focuses on WPR archives, so it only uploads to the internal bucket. 35 # script focuses on WPR archives, so it only uploads to the internal bucket.
36 hashes_in_cloud_storage = cloud_storage.List(cloud_storage.INTERNAL_BUCKET) 36 hashes_in_cloud_storage = cloud_storage.List(cloud_storage.PUBLIC_BUCKET)
37 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.PUBLIC_BUCKET) 37 try:
38 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.INTERNAL_BUCKET)
39 except (cloud_storage.PermissionError, cloud_storage.CredentialsError):
40 pass
38 41
39 results = [] 42 files = []
40 for affected_file in input_api.AffectedFiles(include_deletes=False): 43 for affected_file in input_api.AffectedFiles(include_deletes=False):
41 hash_path = affected_file.AbsoluteLocalPath() 44 hash_path = affected_file.AbsoluteLocalPath()
42 file_path, extension = os.path.splitext(hash_path) 45 _, extension = os.path.splitext(hash_path)
43 if extension != '.sha1': 46 if extension != '.sha1':
44 continue 47 continue
45 48
46 with open(hash_path, 'rb') as f: 49 with open(hash_path, 'rb') as f:
47 file_hash = f.read(1024).rstrip() 50 file_hash = f.read(1024).rstrip()
48 if file_hash in hashes_in_cloud_storage: 51 if file_hash not in hashes_in_cloud_storage:
49 results.append(output_api.PresubmitNotifyResult( 52 files.append((hash_path, file_hash))
50 'File already in Cloud Storage, skipping upload: %s' % hash_path)) 53
51 continue 54 return files
55
56
57 def _SyncFilesToCloud(input_api, output_api):
58 """Searches for .sha1 files and uploads them to Cloud Storage.
59
60 It validates all the hashes and skips upload if not necessary.
61 """
62
63 cloud_storage = LoadSupport(input_api)
64
65 results = []
66 for hash_path, file_hash in _GetFilesNotInCloud(input_api):
67 file_path, _ = os.path.splitext(hash_path)
52 68
53 if not re.match('^([A-Za-z0-9]{40})$', file_hash): 69 if not re.match('^([A-Za-z0-9]{40})$', file_hash):
54 results.append(output_api.PresubmitError( 70 results.append(output_api.PresubmitError(
55 'Hash file does not contain a valid SHA-1 hash: %s' % hash_path)) 71 'Hash file does not contain a valid SHA-1 hash: %s' % hash_path))
56 continue 72 continue
57 if not os.path.exists(file_path): 73 if not os.path.exists(file_path):
58 results.append(output_api.PresubmitError( 74 results.append(output_api.PresubmitError(
59 'Hash file exists, but file not found: %s' % hash_path)) 75 'Hash file exists, but file not found: %s' % hash_path))
60 continue 76 continue
61 if cloud_storage.GetHash(file_path) != file_hash: 77 if cloud_storage.GetHash(file_path) != file_hash:
62 results.append(output_api.PresubmitError( 78 results.append(output_api.PresubmitError(
63 'Hash file does not match file\'s actual hash: %s' % hash_path)) 79 'Hash file does not match file\'s actual hash: %s' % hash_path))
64 continue 80 continue
65 81
66 try: 82 try:
67 cloud_storage.Insert(cloud_storage.INTERNAL_BUCKET, file_hash, file_path) 83 bucket_input = raw_input('Uploading to Cloud Storage: %s\nIs this file '
84 '[p]ublic or Google-[i]nternal?').lower()
85 if 'public'.startswith(bucket_input):
86 bucket = cloud_storage.PUBLIC_BUCKET
87 elif ('internal'.startswith(bucket_input) or
88 'google-internal'.startswith(bucket_input)):
89 bucket = cloud_storage.INTERNAL_BUCKET
90 else:
91 results.append(output_api.PresubmitError(
92 'Response was neither "public" nor "internal": %s' % bucket_input))
93 return results
94
95 cloud_storage.Insert(bucket, file_hash, file_path)
68 results.append(output_api.PresubmitNotifyResult( 96 results.append(output_api.PresubmitNotifyResult(
69 'Uploaded file to Cloud Storage: %s' % hash_path)) 97 'Uploaded file to Cloud Storage: %s' % hash_path))
70 except cloud_storage.CloudStorageError, e: 98 except cloud_storage.CloudStorageError, e:
71 results.append(output_api.PresubmitError( 99 results.append(output_api.PresubmitError(
72 'Unable to upload to Cloud Storage: %s\n\n%s' % (hash_path, e))) 100 'Unable to upload to Cloud Storage: %s\n\n%s' % (hash_path, e)))
73 101
74 return results 102 return results
75 103
76 104
105 def _VerifyFilesInCloud(input_api, output_api):
106 """Searches for .sha1 files and uploads them to Cloud Storage.
107
108 It validates all the hashes and skips upload if not necessary.
109 """
110 results = []
111 for hash_path, _ in _GetFilesNotInCloud(input_api):
112 results.append(output_api.PresubmitError(
113 'Attemping to commit hash file, but corresponding '
114 'data file is not in Cloud Storage: %s' % hash_path))
115 return results
116
117
77 def CheckChangeOnUpload(input_api, output_api): 118 def CheckChangeOnUpload(input_api, output_api):
78 return _SyncFilesToCloud(input_api, output_api) 119 return _SyncFilesToCloud(input_api, output_api)
79 120
80 121
81 def CheckChangeOnCommit(input_api, output_api): 122 def CheckChangeOnCommit(input_api, output_api):
82 return _SyncFilesToCloud(input_api, output_api) 123 return _VerifyFilesInCloud(input_api, output_api)
OLDNEW
« no previous file with comments | « no previous file | tools/perf/page_sets/data/test.json » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698