Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(141)

Unified Diff: tools/rebaseline.py

Issue 18348018: rebaseline.py: if expectations dir contains JSON format results, update those instead of image files (Closed) Base URL: http://skia.googlecode.com/svn/trunk/
Patch Set: Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: tools/rebaseline.py
===================================================================
--- tools/rebaseline.py (revision 9897)
+++ tools/rebaseline.py (working copy)
@@ -14,6 +14,7 @@
'''
# System-level imports
+# EPOGER: check if any of these are no longer needed
import argparse
import os
import re
@@ -41,6 +42,7 @@
sys.path.append(GM_DIRECTORY)
import gm_json
+# EPOGER: make this a command-line argument
JSON_EXPECTATIONS_FILENAME='expected-results.json'
# Mapping of gm-expectations subdir (under
@@ -78,91 +80,40 @@
pass
# Object that rebaselines a JSON expectations file (not individual image files).
-#
-# TODO(epoger): Most of this is just the code from the old ImageRebaseliner...
-# some of it will need to be updated in order to properly rebaseline JSON files.
-# There is a lot of code duplicated between here and ImageRebaseliner, but
-# that's fine because we will delete ImageRebaseliner soon.
class JsonRebaseliner(object):
# params:
- # expectations_root: root directory of all expectations
- # json_base_url: base URL from which to read json_filename
- # json_filename: filename (under json_base_url) from which to read a
- # summary of results; typically "actual-results.json"
+ # expectations_root: root directory of all expectations JSON files
+ # actuals_base_url: base URL from which to read actual-result JSON files
+ # actuals_filename: filename (under actuals_base_url) from which to read a
+ # summary of results; typically "actual-results.json"
# tests: list of tests to rebaseline, or None if we should rebaseline
# whatever files the JSON results summary file tells us to
# configs: which configs to run for each test; this should only be
# specified if the list of tests was also specified (otherwise,
# the JSON file will give us test names and configs)
- # dry_run: if True, instead of actually downloading files or adding
- # files to checkout, display a list of operations that
- # we would normally perform
# add_new: if True, add expectations for tests which don't have any yet
- # missing_json_is_fatal: whether to halt execution if we cannot read a
- # JSON actual result summary file
- def __init__(self, expectations_root, json_base_url, json_filename,
- tests=None, configs=None, dry_run=False,
- add_new=False, missing_json_is_fatal=False):
- raise ValueError('JsonRebaseliner not yet implemented') # TODO(epoger)
+ def __init__(self, expectations_root, actuals_base_url, actuals_filename,
+ tests=None, configs=None, add_new=False):
if configs and not tests:
raise ValueError('configs should only be specified if tests ' +
'were specified also')
self._expectations_root = expectations_root
self._tests = tests
self._configs = configs
- self._json_base_url = json_base_url
- self._json_filename = json_filename
- self._dry_run = dry_run
+ self._actuals_base_url = actuals_base_url
+ self._actuals_filename = actuals_filename
self._add_new = add_new
- self._missing_json_is_fatal = missing_json_is_fatal
- self._googlestorage_gm_actuals_root = (
- 'http://chromium-skia-gm.commondatastorage.googleapis.com/gm')
self._testname_pattern = re.compile('(\S+)_(\S+).png')
- self._is_svn_checkout = (
- os.path.exists('.svn') or
- os.path.exists(os.path.join(os.pardir, '.svn')))
- self._is_git_checkout = (
- os.path.exists('.git') or
- os.path.exists(os.path.join(os.pardir, '.git')))
- # If dry_run is False, execute subprocess.call(cmd).
- # If dry_run is True, print the command we would have otherwise run.
+ # EPOGER: check if this (or any other method) is no longer needed.
+ # Execute subprocess.call(cmd).
# Raises a CommandFailedException if the command fails.
def _Call(self, cmd):
- if self._dry_run:
- print '%s' % ' '.join(cmd)
- return
if subprocess.call(cmd) != 0:
raise CommandFailedException('error running command: ' +
' '.join(cmd))
- # Download a single actual result from GoogleStorage, returning True if it
- # succeeded.
- def _DownloadFromGoogleStorage(self, infilename, outfilename, all_results):
- test_name = self._testname_pattern.match(infilename).group(1)
- if not test_name:
- print '# unable to find test_name for infilename %s' % infilename
- return False
- try:
- hash_type, hash_value = all_results[infilename]
- except KeyError:
- print ('# unable to find filename %s in all_results dict' %
- infilename)
- return False
- except ValueError as e:
- print '# ValueError reading filename %s from all_results dict: %s'%(
- infilename, e)
- return False
- url = '%s/%s/%s/%s.png' % (self._googlestorage_gm_actuals_root,
- hash_type, test_name, hash_value)
- try:
- self._DownloadFile(source_url=url, dest_filename=outfilename)
- return True
- except CommandFailedException:
- print '# Couldn\'t fetch gs_url %s' % url
- return False
-
# Download a single actual result from skia-autogen, returning True if it
# succeeded.
def _DownloadFromAutogen(self, infilename, outfilename,
@@ -190,18 +141,14 @@
'--output', temp_filename ])
self._Call([ 'mv', temp_filename, dest_filename ])
- # Returns the full contents of a URL, as a single string.
- #
- # Unlike standard URL handling, we allow relative "file:" URLs;
- # for example, "file:one/two" resolves to the file ./one/two
- # (relative to current working dir)
- def _GetContentsOfUrl(self, url):
- file_prefix = 'file:'
- if url.startswith(file_prefix):
- filename = url[len(file_prefix):]
- return open(filename, 'r').read()
+ # Returns the full contents of filepath, as a single string.
+ # If filepath looks like a URL, try to read it that way instead of as
+ # a path on local storage.
+ def _GetFileContents(self, filepath):
+ if filepath.startswith('http:') or filepath.startswith('https:'):
+ return urllib2.urlopen(filepath).read()
else:
- return urllib2.urlopen(url).read()
+ return open(filepath, 'r').read()
# Returns a dictionary of actual results from actual-results.json file.
#
@@ -212,10 +159,8 @@
# u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716]
# }
#
- # If the JSON actual result summary file cannot be loaded, the behavior
- # depends on self._missing_json_is_fatal:
- # - if true: execution will halt with an exception
- # - if false: we will log an error message but return an empty dictionary
+ # If the JSON actual result summary file cannot be loaded, raise an
+ # exception.
#
# params:
# json_url: URL pointing to a JSON actual result summary file
@@ -224,16 +169,7 @@
# gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ;
# if None, then include ALL sections.
def _GetActualResults(self, json_url, sections=None):
- try:
- json_contents = self._GetContentsOfUrl(json_url)
- except (urllib2.HTTPError, IOError):
- message = 'unable to load JSON summary URL %s' % json_url
- if self._missing_json_is_fatal:
- raise ValueError(message)
- else:
- print '# %s' % message
- return {}
-
+ json_contents = self._GetFileContents(json_url)
json_dict = gm_json.LoadFromString(json_contents)
results_to_return = {}
actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
@@ -261,9 +197,6 @@
# - JSONKEY_ACTUALRESULTS_FAILED
#
def _GetFilesToRebaseline(self, json_url, add_new):
- if self._dry_run:
- print ''
- print '#'
print ('# Getting files to rebaseline from JSON summary URL %s ...'
% json_url)
sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED]
@@ -274,15 +207,11 @@
files_to_rebaseline = results_to_rebaseline.keys()
files_to_rebaseline.sort()
print '# ... found files_to_rebaseline %s' % files_to_rebaseline
- if self._dry_run:
- print '#'
return files_to_rebaseline
# Rebaseline a single file.
def _RebaselineOneFile(self, expectations_subdir, builder_name,
infilename, outfilename, all_results):
- if self._dry_run:
- print ''
print '# ' + infilename
# First try to download this result image from Google Storage.
@@ -308,18 +237,6 @@
print '# Couldn\'t fetch infilename ' + infilename
return
- # Add this file to version control (if appropriate).
- if self._add_new:
- if self._is_svn_checkout:
- cmd = [ 'svn', 'add', '--quiet', outfilename ]
- self._Call(cmd)
- cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type',
- 'image/png', outfilename ];
- self._Call(cmd)
- elif self._is_git_checkout:
- cmd = [ 'git', 'add', outfilename ]
- self._Call(cmd)
-
# Rebaseline the given configs for a single test.
#
# params:
@@ -337,8 +254,6 @@
else:
configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16',
'msaa4' ]
- if self._dry_run:
- print ''
print '# ' + expectations_subdir + ':'
for config in configs:
infilename = test + '_' + config + '.png'
@@ -356,11 +271,39 @@
# subdir : e.g. 'base-shuttle-win7-intel-float'
# builder : e.g. 'Test-Win7-ShuttleA-HD2000-x86-Release'
def RebaselineSubdir(self, subdir, builder):
- json_url = '/'.join([self._json_base_url,
- subdir, builder, subdir,
- self._json_filename])
- all_results = self._GetActualResults(json_url=json_url)
+ # Read in the actual result summary, and extract all the tests whose
+ # results we need to update.
+ actuals_url = '/'.join([self._actuals_base_url,
+ subdir, builder, subdir,
+ self._actuals_filename])
+ sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED]
+ if self._add_new:
+ sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON)
+ results_to_update = self._GetActualResults(json_url=actuals_url,
+ sections=sections)
+ #print 'EPOGER: results_to_update is...\n%s\n\n' % results_to_update
+ # EPOGER implement this section...
+ # If tests or configs were set, throw out any new expectations that
+ # don't match.
+
+ # Read in current expectations.
+ expectations_json_filepath = os.path.join(
+ self._expectations_root, subdir, JSON_EXPECTATIONS_FILENAME)
+ expectations_dict = gm_json.LoadFromFile(expectations_json_filepath)
+ #print 'EPOGER: expectations_dict is...\n%s\n\n' % expectations_dict
+
+ # EPOGER implement this section...
+ # Update the expectations in memory.
+
+ # Write out updated expectations.
+ gm_json.WriteToFile(expectations_dict, expectations_json_filepath)
+
+ # EPOGER: delete
+ def OLDRebaselineSubdir(self, subdir, builder):
+ results_to_rebaseline = self._GetActualResults(json_url=json_url,
+ sections=sections)
+
if self._tests:
for test in self._tests:
self._RebaselineOneTest(expectations_subdir=subdir,
@@ -399,13 +342,13 @@
'contain one or more base-* subdirectories. Defaults to ' +
'%(default)s',
default='.')
-parser.add_argument('--json-base-url',
- help='base URL from which to read JSON_FILENAME ' +
+parser.add_argument('--actuals-base-url',
+ help='base URL from which to read ACTUALS_FILENAME ' +
'files; defaults to %(default)s',
default='http://skia-autogen.googlecode.com/svn/gm-actual')
-parser.add_argument('--json-filename',
- help='filename (under JSON_BASE_URL) to read a summary ' +
- 'of results from; defaults to %(default)s',
+parser.add_argument('--actuals-filename',
+ help='filename (under ACTUALS_BASE_URL) to read a ' +
+ 'summary of results from; defaults to %(default)s',
default='actual-results.json')
parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+',
help='which platform subdirectories to rebaseline; ' +
@@ -440,23 +383,19 @@
expectations_json_file = os.path.join(args.expectations_root, subdir,
JSON_EXPECTATIONS_FILENAME)
if os.path.isfile(expectations_json_file):
- sys.stderr.write('ERROR: JsonRebaseliner is not implemented yet.\n')
- sys.exit(1)
rebaseliner = JsonRebaseliner(
expectations_root=args.expectations_root,
tests=args.tests, configs=args.configs,
- dry_run=args.dry_run,
- json_base_url=args.json_base_url,
- json_filename=args.json_filename,
- add_new=args.add_new,
- missing_json_is_fatal=missing_json_is_fatal)
+ actuals_base_url=args.actuals_base_url,
+ actuals_filename=args.actuals_filename,
+ add_new=args.add_new)
else:
rebaseliner = rebaseline_imagefiles.ImageRebaseliner(
expectations_root=args.expectations_root,
tests=args.tests, configs=args.configs,
dry_run=args.dry_run,
- json_base_url=args.json_base_url,
- json_filename=args.json_filename,
+ json_base_url=args.actuals_base_url,
+ json_filename=args.actuals_filename,
add_new=args.add_new,
missing_json_is_fatal=missing_json_is_fatal)
rebaseliner.RebaselineSubdir(subdir=subdir, builder=builder)

Powered by Google App Engine
This is Rietveld 408576698