Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(470)

Side by Side Diff: scripts/slave/chromium/archive_layout_test_results.py

Issue 2414153003: In archive_layout_test_results, only support archiving to GS. (Closed)
Patch Set: Rebased Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | scripts/slave/chromium/archive_layout_test_results_unittest.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """A tool to archive layout test results. 6 """A tool to archive layout test results.
7 7
8 To archive files on Google Storage, pass a GS bucket name via --gs-bucket. 8 To archive files on Google Storage, pass a GS bucket name via --gs-bucket.
9 To control access to archives, pass a value for --gs-acl (e.g. 'public-read', 9 To control access to archives, pass a value for --gs-acl (e.g. 'public-read',
10 see https://developers.google.com/storage/docs/accesscontrol#extension 10 see https://developers.google.com/storage/docs/accesscontrol#extension
(...skipping 18 matching lines...) Expand all
29 from common import chromium_utils 29 from common import chromium_utils
30 from slave import build_directory 30 from slave import build_directory
31 from slave import slave_utils 31 from slave import slave_utils
32 32
33 # Directory name, above the build directory, in which test results can be 33 # Directory name, above the build directory, in which test results can be
34 # found if no --results-dir option is given. 34 # found if no --results-dir option is given.
35 RESULT_DIR = 'layout-test-results' 35 RESULT_DIR = 'layout-test-results'
36 36
37 37
38 def _CollectArchiveFiles(output_dir): 38 def _CollectArchiveFiles(output_dir):
39 """Returns a pair of lists of file paths to archive. 39 """Returns a list of actual layout test result files to archive."""
40
41 The first list is all the actual results from the test run;
42 the second list is the diffs from the expected results.
43 """
44 actual_file_list = [] 40 actual_file_list = []
45 diff_file_list = []
46 41
47 for path, _, files in os.walk(output_dir): 42 for path, _, files in os.walk(output_dir):
48 rel_path = path[len(output_dir + '\\'):] 43 rel_path = path[len(output_dir + '\\'):]
49 for name in files: 44 for name in files:
50 if _IsActualResultFile(name): 45 if _IsActualResultFile(name):
51 actual_file_list.append(os.path.join(rel_path, name)) 46 actual_file_list.append(os.path.join(rel_path, name))
52 if _IsDiffFile(name):
53 diff_file_list.append(os.path.join(rel_path, name))
54 elif name.endswith('.json'): 47 elif name.endswith('.json'):
55 actual_file_list.append(os.path.join(rel_path, name)) 48 actual_file_list.append(os.path.join(rel_path, name))
56 49
57 if os.path.exists(os.path.join(output_dir, 'results.html')): 50 if os.path.exists(os.path.join(output_dir, 'results.html')):
58 actual_file_list.append('results.html') 51 actual_file_list.append('results.html')
59 52
60 if sys.platform == 'win32': 53 if sys.platform == 'win32':
61 if os.path.exists(os.path.join(output_dir, 'access_log.txt')): 54 if os.path.exists(os.path.join(output_dir, 'access_log.txt')):
62 actual_file_list.append('access_log.txt') 55 actual_file_list.append('access_log.txt')
63 if os.path.exists(os.path.join(output_dir, 'error_log.txt')): 56 if os.path.exists(os.path.join(output_dir, 'error_log.txt')):
64 actual_file_list.append('error_log.txt') 57 actual_file_list.append('error_log.txt')
65 58
66 return (actual_file_list, diff_file_list) 59 return actual_file_list
67 60
68 61
69 def _IsActualResultFile(name): 62 def _IsActualResultFile(name):
70 if '-stack.' in name or '-crash-log.' in name: 63 if '-stack.' in name or '-crash-log.' in name:
71 return True 64 return True
72 extension = os.path.splitext(name)[1] 65 extension = os.path.splitext(name)[1]
73 return ('-actual.' in name and extension in 66 return ('-actual.' in name and extension in
74 ('.txt', '.png', '.checksum', '.wav')) 67 ('.txt', '.png', '.checksum', '.wav'))
75 68
76 69
77 def _IsDiffFile(name):
78 return ('-wdiff.' in name or
79 '-expected.' in name or
80 name.endswith('-diff.txt') or
81 name.endswith('-diff.png'))
82
83
84 def _ArchiveFullLayoutTestResults(staging_dir, dest_dir, diff_file_list,
85 options):
86 # Copy the actual and diff files to the web server.
87 # Don't clobber the staging_dir in the MakeZip call so that it keeps the
88 # files from the previous MakeZip call on diff_file_list.
89 print "archiving results + diffs"
90 full_zip_file = chromium_utils.MakeZip(staging_dir,
91 'layout-test-results', diff_file_list, options.results_dir,
92 remove_archive_directory=False)[1]
93 slave_utils.CopyFileToArchiveHost(full_zip_file, dest_dir)
94
95 # Extract the files on the web server.
96 extract_dir = os.path.join(dest_dir, 'results')
97 print 'extracting zip file to %s' % extract_dir
98
99 if chromium_utils.IsWindows():
100 chromium_utils.ExtractZip(full_zip_file, extract_dir)
101 elif chromium_utils.IsLinux() or chromium_utils.IsMac():
102 remote_zip_file = os.path.join(dest_dir, os.path.basename(full_zip_file))
103 chromium_utils.SshExtractZip(archive_utils.Config.archive_host,
104 remote_zip_file, extract_dir)
105
106
107 def _CopyFileToArchiveHost(src, dest_dir):
108 """A wrapper method to copy files to the archive host.
109
110 It calls CopyFileToDir on Windows and SshCopyFiles on Linux/Mac.
111
112 TODO: we will eventually want to change the code to upload the
113 data to appengine.
114
115 Args:
116 src: full path to the src file.
117 dest_dir: destination directory on the host.
118 """
119 host = archive_utils.Config.archive_host
120 if not os.path.exists(src):
121 raise chromium_utils.ExternalError('Source path "%s" does not exist' % src)
122 chromium_utils.MakeWorldReadable(src)
123 if chromium_utils.IsWindows():
124 chromium_utils.CopyFileToDir(src, dest_dir)
125 elif chromium_utils.IsLinux() or chromium_utils.IsMac():
126 chromium_utils.SshCopyFiles(src, host, dest_dir)
127 else:
128 raise NotImplementedError(
129 'Platform "%s" is not currently supported.' % sys.platform)
130
131
132 def _MaybeMakeDirectoryOnArchiveHost(dest_dir):
133 """A wrapper method to create a directory on the archive host.
134
135 It calls MaybeMakeDirectory on Windows and SshMakeDirectory on Linux/Mac.
136
137 Args:
138 dest_dir: destination directory on the host.
139 """
140 host = archive_utils.Config.archive_host
141 if chromium_utils.IsWindows():
142 chromium_utils.MaybeMakeDirectory(dest_dir)
143 print 'saving results to %s' % dest_dir
144 elif chromium_utils.IsLinux() or chromium_utils.IsMac():
145 chromium_utils.SshMakeDirectory(host, dest_dir)
146 print 'saving results to "%s" on "%s"' % (dest_dir, host)
147 else:
148 raise NotImplementedError(
149 'Platform "%s" is not currently supported.' % sys.platform)
150
151
152 def archive_layout(options): 70 def archive_layout(options):
153 chrome_dir = os.path.abspath(options.build_dir) 71 chrome_dir = os.path.abspath(options.build_dir)
154 results_dir_basename = os.path.basename(options.results_dir) 72 results_dir_basename = os.path.basename(options.results_dir)
155 if options.results_dir is not None: 73 if options.results_dir is not None:
156 options.results_dir = os.path.abspath(os.path.join(options.build_dir, 74 options.results_dir = os.path.abspath(os.path.join(options.build_dir,
157 options.results_dir)) 75 options.results_dir))
158 else: 76 else:
159 options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR) 77 options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR)
160 print 'Archiving results from %s' % options.results_dir 78 print 'Archiving results from %s' % options.results_dir
161 staging_dir = options.staging_dir or slave_utils.GetStagingDir(chrome_dir) 79 staging_dir = options.staging_dir or slave_utils.GetStagingDir(chrome_dir)
162 print 'Staging in %s' % staging_dir 80 print 'Staging in %s' % staging_dir
163 if not os.path.exists(staging_dir): 81 if not os.path.exists(staging_dir):
164 os.makedirs(staging_dir) 82 os.makedirs(staging_dir)
165 83
166 (actual_file_list, diff_file_list) = _CollectArchiveFiles(options.results_dir) 84 actual_file_list = _CollectArchiveFiles(options.results_dir)
167 zip_file = chromium_utils.MakeZip(staging_dir, 85 zip_file = chromium_utils.MakeZip(staging_dir,
168 results_dir_basename, 86 results_dir_basename,
169 actual_file_list, 87 actual_file_list,
170 options.results_dir)[1] 88 options.results_dir)[1]
171 # TODO(crbug.com/655202): Stop separately uploading failing_results.json. 89 # TODO(crbug.com/655202): Stop separately uploading failing_results.json.
172 full_results_json = os.path.join(options.results_dir, 'full_results.json') 90 full_results_json = os.path.join(options.results_dir, 'full_results.json')
173 failing_results_json = os.path.join(options.results_dir, 91 failing_results_json = os.path.join(options.results_dir,
174 'failing_results.json') 92 'failing_results.json')
175 93
176 # Extract the build name of this slave (e.g., 'chrome-release') from its 94 # Extract the build name of this slave (e.g., 'chrome-release') from its
177 # configuration file if not provided as a param. 95 # configuration file if not provided as a param.
178 build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir) 96 build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir)
179 build_name = re.sub('[ .()]', '_', build_name) 97 build_name = re.sub('[ .()]', '_', build_name)
180 98
181 wc_dir = os.path.dirname(chrome_dir) 99 wc_dir = os.path.dirname(chrome_dir)
182 last_change = slave_utils.GetHashOrRevision(wc_dir) 100 last_change = slave_utils.GetHashOrRevision(wc_dir)
183 101
184 # TODO(dpranke): Is it safe to assume build_number is not blank? Should we 102 # TODO(dpranke): Is it safe to assume build_number is not blank? Should we
185 # assert() this ? 103 # assert() this ?
186 build_number = str(options.build_number) 104 build_number = str(options.build_number)
187 print 'last change: %s' % last_change 105 print 'last change: %s' % last_change
188 print 'build name: %s' % build_name 106 print 'build name: %s' % build_name
189 print 'build number: %s' % build_number 107 print 'build number: %s' % build_number
190 print 'host name: %s' % socket.gethostname() 108 print 'host name: %s' % socket.gethostname()
191 109
192 if options.gs_bucket: 110 # Create a file containing last_change revision. This file will be uploaded
193 # Create a file containing last_change revision. This file will be uploaded 111 # after all layout test results are uploaded so the client can check this
194 # after all layout test results are uploaded so the client can check this 112 # file to see if the upload for the revision is complete.
195 # file to see if the upload for the revision is complete. 113 # See crbug.com/574272 for more details.
196 # See crbug.com/574272 for more details. 114 last_change_file = os.path.join(staging_dir, 'LAST_CHANGE')
197 last_change_file = os.path.join(staging_dir, 'LAST_CHANGE') 115 with open(last_change_file, 'w') as f:
198 with open(last_change_file, 'w') as f: 116 f.write(last_change)
199 f.write(last_change)
200 117
201 # Copy the results to a directory archived by build number. 118 # Copy the results to a directory archived by build number.
202 gs_base = '/'.join([options.gs_bucket, build_name, build_number]) 119 gs_base = '/'.join([options.gs_bucket, build_name, build_number])
203 gs_acl = options.gs_acl 120 gs_acl = options.gs_acl
204 # These files never change, cache for a year. 121 # These files never change, cache for a year.
205 cache_control = "public, max-age=31556926" 122 cache_control = "public, max-age=31556926"
206 slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl, 123 slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
207 cache_control=cache_control) 124 cache_control=cache_control)
208 slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl, 125 slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
209 cache_control=cache_control) 126 cache_control=cache_control)
210 127
211 # TODO(dpranke): Remove these two lines once clients are fetching the 128 # TODO(dpranke): Remove these two lines once clients are fetching the
212 # files from the layout-test-results dir. 129 # files from the layout-test-results dir.
213 slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl, 130 slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl,
214 cache_control=cache_control) 131 cache_control=cache_control)
215 slave_utils.GSUtilCopyFile(failing_results_json, gs_base, gs_acl=gs_acl, 132 slave_utils.GSUtilCopyFile(failing_results_json, gs_base, gs_acl=gs_acl,
216 cache_control=cache_control) 133 cache_control=cache_control)
217 134
218 slave_utils.GSUtilCopyFile(last_change_file, 135 slave_utils.GSUtilCopyFile(last_change_file,
219 gs_base + '/' + results_dir_basename, gs_acl=gs_acl, 136 gs_base + '/' + results_dir_basename,
220 cache_control=cache_control) 137 gs_acl=gs_acl,
138 cache_control=cache_control)
221 139
222 # And also to the 'results' directory to provide the 'latest' results 140 # And also to the 'results' directory to provide the 'latest' results
223 # and make sure they are not cached at all (Cloud Storage defaults to 141 # and make sure they are not cached at all (Cloud Storage defaults to
224 # caching w/ a max-age=3600). 142 # caching w/ a max-age=3600).
225 gs_base = '/'.join([options.gs_bucket, build_name, 'results']) 143 gs_base = '/'.join([options.gs_bucket, build_name, 'results'])
226 cache_control = 'no-cache' 144 cache_control = 'no-cache'
227 slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl, 145 slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
228 cache_control=cache_control) 146 cache_control=cache_control)
229 slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl, 147 slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
230 cache_control=cache_control) 148 cache_control=cache_control)
231 149 slave_utils.GSUtilCopyFile(last_change_file,
232 slave_utils.GSUtilCopyFile(last_change_file, 150 gs_base + '/' + results_dir_basename,
233 gs_base + '/' + results_dir_basename, gs_acl=gs_acl, 151 gs_acl=gs_acl,
234 cache_control=cache_control) 152 cache_control=cache_control)
235
236 else:
237 # Where to save layout test results.
238 dest_parent_dir = os.path.join(archive_utils.Config.www_dir_base,
239 results_dir_basename.replace('-', '_'), build_name)
240 dest_dir = os.path.join(dest_parent_dir, last_change)
241
242 _MaybeMakeDirectoryOnArchiveHost(dest_dir)
243 _CopyFileToArchiveHost(zip_file, dest_dir)
244 _CopyFileToArchiveHost(full_results_json, dest_dir)
245 _CopyFileToArchiveHost(failing_results_json, dest_dir)
246 # Not supported on Google Storage yet.
247 _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir, diff_file_list,
248 options)
249 return 0 153 return 0
250 154
251 155
252 def _ParseOptions(): 156 def _ParseOptions():
253 option_parser = optparse.OptionParser() 157 option_parser = optparse.OptionParser()
254 option_parser.add_option('', '--build-dir', help='ignored') 158 option_parser.add_option('', '--build-dir', help='ignored')
255 option_parser.add_option('', '--results-dir', 159 option_parser.add_option('', '--results-dir',
256 help='path to layout test results, relative to ' 160 help='path to layout test results, relative to '
257 'the build_dir') 161 'the build_dir')
258 option_parser.add_option('', '--builder-name', 162 option_parser.add_option('', '--builder-name',
(...skipping 10 matching lines...) Expand all
269 'instead of the master.')) 173 'instead of the master.'))
270 option_parser.add_option('', '--gs-acl', 174 option_parser.add_option('', '--gs-acl',
271 default=None, 175 default=None,
272 help=('The ACL of the google storage files.')) 176 help=('The ACL of the google storage files.'))
273 option_parser.add_option('--staging-dir', 177 option_parser.add_option('--staging-dir',
274 help='Directory to use for staging the archives. ' 178 help='Directory to use for staging the archives. '
275 'Default behavior is to automatically detect ' 179 'Default behavior is to automatically detect '
276 'slave\'s build directory.') 180 'slave\'s build directory.')
277 chromium_utils.AddPropertiesOptions(option_parser) 181 chromium_utils.AddPropertiesOptions(option_parser)
278 options, _ = option_parser.parse_args() 182 options, _ = option_parser.parse_args()
183 if not options.gs_bucket:
184 option_parser.error('--gs-bucket is required.')
279 options.build_dir = build_directory.GetBuildOutputDirectory() 185 options.build_dir = build_directory.GetBuildOutputDirectory()
280 return options 186 return options
281 187
282 188
283 def main(): 189 def main():
284 options = _ParseOptions() 190 options = _ParseOptions()
285 logging.basicConfig(level=logging.INFO, 191 logging.basicConfig(level=logging.INFO,
286 format='%(asctime)s %(filename)s:%(lineno)-3d' 192 format='%(asctime)s %(filename)s:%(lineno)-3d'
287 ' %(levelname)s %(message)s', 193 ' %(levelname)s %(message)s',
288 datefmt='%y%m%d %H:%M:%S') 194 datefmt='%y%m%d %H:%M:%S')
289 return archive_layout(options) 195 return archive_layout(options)
290 196
291 197
292 if '__main__' == __name__: 198 if '__main__' == __name__:
293 sys.exit(main()) 199 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | scripts/slave/chromium/archive_layout_test_results_unittest.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698