Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(43)

Side by Side Diff: tools/verify_images_for_gm_results.py

Issue 18743006: Add script to verify that image files exist for every actual_result checksum (Closed) Base URL: http://skia.googlecode.com/svn/trunk/
Patch Set: Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/python
2
3 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
4 # Use of this source code is governed by a BSD-style license that can be
5 # found in the LICENSE file.
6
7
8 """ Look through skia-autogen, searching for all checksums which should have
9 corresponding files in Google Storage, and verify that those files exist. """
10
11
12 import json
13 import posixpath
14 import re
15 import subprocess
16 import sys
17
18
19 AUTOGEN_URL = 'http://skia-autogen.googlecode.com/svn/gm-actual'
20 GS_URL = 'gs://chromium-skia-gm/gm'
21 TEST_NAME_PATTERN = re.compile('(\S+)_(\S+).png')
22
23
24 def FileNameToGSURL(filename, hash_type, hash_value):
25 """ Convert a file name given in a checksum file to the URL of the
26 corresponding image file in Google Storage.
27
28 filename: string; the file name to convert. Takes the form specified by
29 TEST_NAME_PATTERN.
30 hash_type: string; the type of the checksum.
31 hash_value: string; the checksum itself.
32 """
33 test_name = TEST_NAME_PATTERN.match(filename).group(1)
34 if not test_name:
35 raise Exception('Invalid test name for file: %s' % filename)
36 return '%s/%s/%s/%s.png' % (GS_URL, hash_type, test_name, hash_value)
37
38
39 def FindURLSInJSON(json_file, gs_urls):
40 """ Extract Google Storage URLs from a JSON file in svn, adding them to the
41 gs_urls dictionary.
42
43 json_file: string; URL of the JSON file.
44 gs_urls: dict; stores Google Storage URLs as keys and lists of the JSON files
45 which reference them.
46
47 Example gs_urls:
48 { 'gs://chromium-skia-gm/gm/sometest/12345.png': [
49 'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini/Test-Mac10. 6-MacMini4.1-GeForce320M-x86-Debug/base-macmini/actual-results.json',
50 'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini-10_8/Test-M ac10.8-MacMini4.1-GeForce320M-x86-Debug/base-macmini-10_8/actual-results.json',
51 ]
52 }
53 """
54 output = subprocess.check_output(['svn', 'cat', json_file])
55 json_content = json.loads(output)
56 for dict_type in ['actual-results']:
57 for result_type in json_content[dict_type]:
58 if json_content[dict_type][result_type]:
59 for result in json_content[dict_type][result_type].keys():
60 hash_type, hash_value = json_content[dict_type][result_type][result]
61 gs_url = FileNameToGSURL(result, hash_type, str(hash_value))
62 if gs_urls.get(gs_url):
63 gs_urls[gs_url].append(json_file)
64 else:
65 gs_urls[gs_url] = [json_file]
66
67
68 def _FindJSONFiles(url, json_files):
69 """ Helper function for FindJsonFiles. Recursively explore the repository,
70 adding JSON files to a list.
71
72 url: string; URL of the repository (or subdirectory thereof) to explore.
73 json_files: list to which JSON file urls will be added.
74 """
75 proc = subprocess.Popen(['svn', 'ls', url], stdout=subprocess.PIPE,
76 stderr=subprocess.STDOUT)
77 if proc.wait() != 0:
78 raise Exception('Failed to list svn directory.')
79 output = proc.communicate()[0].splitlines()
80 subdirs = []
81 for item in output:
82 if item.endswith(posixpath.sep):
83 subdirs.append(item)
84 elif item.endswith('.json'):
85 json_files.append(posixpath.join(url, item))
86 else:
87 print 'Warning: ignoring %s' % posixpath.join(url, item)
88 for subdir in subdirs:
89 _FindJSONFiles(posixpath.join(url, subdir), json_files)
90
91
92 def FindJSONFiles(url):
93 """ Recursively explore the given repository and return a list of the JSON
94 files it contains.
95
96 url: string; URL of the repository to explore.
97 """
98 print 'Searching for JSON files in %s' % url
99 json_files = []
100 _FindJSONFiles(url, json_files)
101 return json_files
102
103
104 def FindURLs(url):
105 """ Find Google Storage URLs inside of JSON files in the given repository.
106 Returns a dictionary whose keys are Google Storage URLs and values are lists
107 of the JSON files which reference them.
108
109 url: string; URL of the repository to explore.
110
111 Example output:
112 { 'gs://chromium-skia-gm/gm/sometest/12345.png': [
113 'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini/Test-Mac10. 6-MacMini4.1-GeForce320M-x86-Debug/base-macmini/actual-results.json',
114 'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini-10_8/Test-M ac10.8-MacMini4.1-GeForce320M-x86-Debug/base-macmini-10_8/actual-results.json',
115 ]
116 }
117 """
118 gs_urls = {}
119 for json_file in FindJSONFiles(url):
120 print 'Looking for checksums in %s' % json_file
121 FindURLSInJSON(json_file, gs_urls)
122 return gs_urls
123
124
125 def VerifyURL(url):
126 """ Verify that the given URL exists.
127
128 url: string; the Google Storage URL of the image file in question.
129 """
130 proc = subprocess.Popen(['gsutil', 'ls', url], stdout=subprocess.PIPE,
131 stderr=subprocess.STDOUT)
132 if proc.wait() != 0:
133 return False
134 return True
135
136
137 def VerifyURLs(urls):
138 """ Verify that each of the given URLs exists. Return a list of which URLs do
139 not exist.
140
141 urls: dictionary; URLs of the image files in question.
142 """
143 print 'Verifying that images exist for URLs...'
144 missing = []
145 for url in urls.iterkeys():
146 if not VerifyURL(url):
147 print 'Missing: %s, referenced by: \n %s' % (url, '\n '.join(urls[url]))
148 missing.append(url)
149 return missing
150
151
152 def Main():
153 urls = FindURLs(AUTOGEN_URL)
154 missing = VerifyURLs(urls)
155 if missing:
156 print 'Found %d Missing files.' % len(missing)
157 return 1
158
159
160 if __name__ == '__main__':
161 sys.exit(Main())
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698