Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(511)

Side by Side Diff: tools/svndiff.py

Issue 19444003: svndiff.py: fix --add-new for JSON-format expectations (Closed) Base URL: http://skia.googlecode.com/svn/trunk/
Patch Set: line_wraps Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 ''' 2 '''
3 Copyright 2012 Google Inc. 3 Copyright 2012 Google Inc.
4 4
5 Use of this source code is governed by a BSD-style license that can be 5 Use of this source code is governed by a BSD-style license that can be
6 found in the LICENSE file. 6 found in the LICENSE file.
7 ''' 7 '''
8 8
9 ''' 9 '''
10 Generates a visual diff of all pending changes in the local SVN checkout. 10 Generates a visual diff of all pending changes in the local SVN checkout.
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 for try_path in possible_paths: 89 for try_path in possible_paths:
90 if os.path.isfile(try_path): 90 if os.path.isfile(try_path):
91 return try_path 91 return try_path
92 raise Exception('cannot find skdiff in paths %s; maybe you need to ' 92 raise Exception('cannot find skdiff in paths %s; maybe you need to '
93 'specify the %s option or build skdiff?' % ( 93 'specify the %s option or build skdiff?' % (
94 possible_paths, OPTION_PATH_TO_SKDIFF)) 94 possible_paths, OPTION_PATH_TO_SKDIFF))
95 95
96 def _DownloadUrlToFile(source_url, dest_path): 96 def _DownloadUrlToFile(source_url, dest_path):
97 """Download source_url, and save its contents to dest_path. 97 """Download source_url, and save its contents to dest_path.
98 Raises an exception if there were any problems.""" 98 Raises an exception if there were any problems."""
99 reader = urllib2.urlopen(source_url) 99 try:
100 writer = open(dest_path, 'wb') 100 reader = urllib2.urlopen(source_url)
101 writer.write(reader.read()) 101 writer = open(dest_path, 'wb')
102 writer.close() 102 writer.write(reader.read())
103 writer.close()
104 except BaseException as e:
105 raise Exception(
106 '%s: unable to download source_url %s to dest_path %s' % (
107 e, source_url, dest_path))
103 108
104 def _CreateGSUrl(imagename, hash_type, hash_digest): 109 def _CreateGSUrl(imagename, hash_type, hash_digest):
105 """Return the HTTP URL we can use to download this particular version of 110 """Return the HTTP URL we can use to download this particular version of
106 the actually-generated GM image with this imagename. 111 the actually-generated GM image with this imagename.
107 112
108 imagename: name of the test image, e.g. 'perlinnoise_msaa4.png' 113 imagename: name of the test image, e.g. 'perlinnoise_msaa4.png'
109 hash_type: string indicating the hash type used to generate hash_digest, 114 hash_type: string indicating the hash type used to generate hash_digest,
110 e.g. gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5 115 e.g. gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5
111 hash_digest: the hash digest of the image to retrieve 116 hash_digest: the hash digest of the image to retrieve
112 """ 117 """
113 return gm_json.CreateGmActualUrl( 118 return gm_json.CreateGmActualUrl(
114 test_name=IMAGE_FILENAME_RE.match(imagename).group(1), 119 test_name=IMAGE_FILENAME_RE.match(imagename).group(1),
115 hash_type=hash_type, 120 hash_type=hash_type,
116 hash_digest=hash_digest) 121 hash_digest=hash_digest)
117 122
118 def _CallJsonDiff(old_json_path, new_json_path, 123 def _CallJsonDiff(old_json_path, new_json_path,
119 old_flattened_dir, new_flattened_dir, 124 old_flattened_dir, new_flattened_dir,
120 filename_prefix): 125 filename_prefix):
121 """Using jsondiff.py, write the images that differ between two GM 126 """Using jsondiff.py, write the images that differ between two GM
122 expectations summary files (old and new) into old_flattened_dir and 127 expectations summary files (old and new) into old_flattened_dir and
123 new_flattened_dir. 128 new_flattened_dir.
124 129
125 filename_prefix: prefix to prepend to filenames of all images we write 130 filename_prefix: prefix to prepend to filenames of all images we write
126 into the flattened directories 131 into the flattened directories
127 """ 132 """
128 json_differ = jsondiff.GMDiffer() 133 json_differ = jsondiff.GMDiffer()
129 diff_dict = json_differ.GenerateDiffDict(oldfile=old_json_path, 134 diff_dict = json_differ.GenerateDiffDict(oldfile=old_json_path,
130 newfile=new_json_path) 135 newfile=new_json_path)
136 print 'Downloading %d before-and-after image pairs...' % len(diff_dict)
131 for (imagename, results) in diff_dict.iteritems(): 137 for (imagename, results) in diff_dict.iteritems():
132 old_checksum = results['old']
133 new_checksum = results['new']
134 # TODO(epoger): Currently, this assumes that all images have been 138 # TODO(epoger): Currently, this assumes that all images have been
135 # checksummed using gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5 139 # checksummed using gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5
136 old_image_url = _CreateGSUrl( 140
137 imagename=imagename, 141 old_checksum = results['old']
138 hash_type=gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5, 142 if old_checksum:
139 hash_digest=old_checksum) 143 old_image_url = _CreateGSUrl(
140 new_image_url = _CreateGSUrl( 144 imagename=imagename,
141 imagename=imagename, 145 hash_type=gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5,
142 hash_type=gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5, 146 hash_digest=old_checksum)
143 hash_digest=new_checksum) 147 _DownloadUrlToFile(
144 _DownloadUrlToFile( 148 source_url=old_image_url,
145 source_url=old_image_url, 149 dest_path=os.path.join(old_flattened_dir,
146 dest_path=os.path.join(old_flattened_dir, 150 filename_prefix + imagename))
147 filename_prefix + imagename)) 151
148 _DownloadUrlToFile( 152 new_checksum = results['new']
149 source_url=new_image_url, 153 if new_checksum:
150 dest_path=os.path.join(new_flattened_dir, 154 new_image_url = _CreateGSUrl(
151 filename_prefix + imagename)) 155 imagename=imagename,
156 hash_type=gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5,
157 hash_digest=new_checksum)
158 _DownloadUrlToFile(
159 source_url=new_image_url,
160 dest_path=os.path.join(new_flattened_dir,
161 filename_prefix + imagename))
152 162
153 def SvnDiff(path_to_skdiff, dest_dir, source_dir): 163 def SvnDiff(path_to_skdiff, dest_dir, source_dir):
154 """Generates a visual diff of all pending changes in source_dir. 164 """Generates a visual diff of all pending changes in source_dir.
155 165
156 @param path_to_skdiff 166 @param path_to_skdiff
157 @param dest_dir existing directory within which to write results 167 @param dest_dir existing directory within which to write results
158 @param source_dir 168 @param source_dir
159 """ 169 """
160 # Validate parameters, filling in default values if necessary and possible. 170 # Validate parameters, filling in default values if necessary and possible.
161 path_to_skdiff = os.path.abspath(FindPathToSkDiff(path_to_skdiff)) 171 path_to_skdiff = os.path.abspath(FindPathToSkDiff(path_to_skdiff))
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
195 new_json_path=modified_file_path, 205 new_json_path=modified_file_path,
196 old_flattened_dir=original_flattened_dir, 206 old_flattened_dir=original_flattened_dir,
197 new_flattened_dir=modified_flattened_dir, 207 new_flattened_dir=modified_flattened_dir,
198 filename_prefix=platform_prefix) 208 filename_prefix=platform_prefix)
199 os.remove(original_file_path) 209 os.remove(original_file_path)
200 else: 210 else:
201 dest_filename = re.sub(os.sep, '__', modified_file_path) 211 dest_filename = re.sub(os.sep, '__', modified_file_path)
202 # If the file had STATUS_DELETED, it won't exist anymore... 212 # If the file had STATUS_DELETED, it won't exist anymore...
203 if os.path.isfile(modified_file_path): 213 if os.path.isfile(modified_file_path):
204 shutil.copyfile(modified_file_path, 214 shutil.copyfile(modified_file_path,
205 os.path.join(modified_flattened_dir, dest_filena me)) 215 os.path.join(modified_flattened_dir,
216 dest_filename))
206 svn_repo.ExportBaseVersionOfFile( 217 svn_repo.ExportBaseVersionOfFile(
207 modified_file_path, 218 modified_file_path,
208 os.path.join(original_flattened_dir, dest_filename)) 219 os.path.join(original_flattened_dir, dest_filename))
209 220
210 # Run skdiff: compare original_flattened_dir against modified_flattened_dir 221 # Run skdiff: compare original_flattened_dir against modified_flattened_dir
211 RunCommand('%s %s %s %s' % (path_to_skdiff, original_flattened_dir, 222 RunCommand('%s %s %s %s' % (path_to_skdiff, original_flattened_dir,
212 modified_flattened_dir, diff_dir)) 223 modified_flattened_dir, diff_dir))
213 print '\nskdiff results are ready in file://%s/index.html' % diff_dir 224 print '\nskdiff results are ready in file://%s/index.html' % diff_dir
214 225
215 def RaiseUsageException(): 226 def RaiseUsageException():
(...skipping 20 matching lines...) Expand all
236 action='store', type='string', default=None, 247 action='store', type='string', default=None,
237 help='path to already-built skdiff tool; if not set, ' 248 help='path to already-built skdiff tool; if not set, '
238 'will search for it in typical directories near this ' 249 'will search for it in typical directories near this '
239 'script') 250 'script')
240 parser.add_option(OPTION_SOURCE_DIR, 251 parser.add_option(OPTION_SOURCE_DIR,
241 action='store', type='string', default='.', 252 action='store', type='string', default='.',
242 help='root directory within which to compare all ' + 253 help='root directory within which to compare all ' +
243 'files; defaults to "%default"') 254 'files; defaults to "%default"')
244 (options, args) = parser.parse_args() 255 (options, args) = parser.parse_args()
245 Main(options, args) 256 Main(options, args)
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698