OLD | NEW |
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 | 2 |
3 """ | 3 """ |
4 Copyright 2014 Google Inc. | 4 Copyright 2014 Google Inc. |
5 | 5 |
6 Use of this source code is governed by a BSD-style license that can be | 6 Use of this source code is governed by a BSD-style license that can be |
7 found in the LICENSE file. | 7 found in the LICENSE file. |
8 | 8 |
9 Compare results of two render_pictures runs. | 9 Compare results of two render_pictures runs. |
10 | 10 |
11 TODO(epoger): Start using this module to compare ALL images (whether they | 11 TODO(epoger): Start using this module to compare ALL images (whether they |
12 were generated from GMs or SKPs), and rename it accordingly. | 12 were generated from GMs or SKPs), and rename it accordingly. |
13 """ | 13 """ |
14 | 14 |
15 # System-level imports | 15 # System-level imports |
16 import logging | 16 import logging |
17 import os | 17 import os |
18 import shutil | 18 import shutil |
| 19 import subprocess |
19 import tempfile | 20 import tempfile |
20 import time | 21 import time |
21 | 22 |
22 # Must fix up PYTHONPATH before importing from within Skia | 23 # Must fix up PYTHONPATH before importing from within Skia |
23 import fix_pythonpath # pylint: disable=W0611 | 24 import fix_pythonpath # pylint: disable=W0611 |
24 | 25 |
25 # Imports from within Skia | 26 # Imports from within Skia |
| 27 from py.utils import git_utils |
26 from py.utils import gs_utils | 28 from py.utils import gs_utils |
27 from py.utils import url_utils | 29 from py.utils import url_utils |
28 import buildbot_globals | 30 import buildbot_globals |
29 import column | 31 import column |
30 import gm_json | 32 import gm_json |
31 import imagediffdb | 33 import imagediffdb |
32 import imagepair | 34 import imagepair |
33 import imagepairset | 35 import imagepairset |
34 import results | 36 import results |
35 | 37 |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
141 self._setA_label = setA_label or setA_section | 143 self._setA_label = setA_label or setA_section |
142 self._setB_label = setB_label or setB_section | 144 self._setB_label = setB_label or setB_section |
143 else: | 145 else: |
144 self._setA_label = setA_label or 'setA' | 146 self._setA_label = setA_label or 'setA' |
145 self._setB_label = setB_label or 'setB' | 147 self._setB_label = setB_label or 'setB' |
146 | 148 |
147 tempdir = tempfile.mkdtemp() | 149 tempdir = tempfile.mkdtemp() |
148 try: | 150 try: |
149 setA_root = os.path.join(tempdir, 'setA') | 151 setA_root = os.path.join(tempdir, 'setA') |
150 setB_root = os.path.join(tempdir, 'setB') | 152 setB_root = os.path.join(tempdir, 'setB') |
| 153 setA_repo_revision = None |
| 154 setB_repo_revision = None |
151 for source_dir in setA_dirs: | 155 for source_dir in setA_dirs: |
152 self._copy_dir_contents(source_dir=source_dir, dest_dir=setA_root) | 156 self._copy_dir_contents(source_dir=source_dir, dest_dir=setA_root) |
| 157 # TODO(stephana): There is a potential race condition here... we copy |
| 158 # the contents out of the source_dir, and THEN we get the commithash |
| 159 # of source_dir. If source_dir points at a git checkout, and that |
| 160 # checkout is updated (by a different thread/process) during this |
| 161 # operation, then the contents and commithash will be out of sync. |
| 162 setA_repo_revision = self._get_repo_revision( |
| 163 source_dir=source_dir, assert_if_not=setA_repo_revision) |
153 for source_dir in setB_dirs: | 164 for source_dir in setB_dirs: |
154 self._copy_dir_contents(source_dir=source_dir, dest_dir=setB_root) | 165 self._copy_dir_contents(source_dir=source_dir, dest_dir=setB_root) |
| 166 setB_repo_revision = self._get_repo_revision( |
| 167 source_dir=source_dir, assert_if_not=setB_repo_revision) |
| 168 |
| 169 self._setA_descriptions = { |
| 170 results.KEY__SET_DESCRIPTIONS__DIR: setA_dirs, |
| 171 results.KEY__SET_DESCRIPTIONS__REPO_REVISION: setA_repo_revision, |
| 172 results.KEY__SET_DESCRIPTIONS__SECTION: setA_section, |
| 173 } |
| 174 self._setB_descriptions = { |
| 175 results.KEY__SET_DESCRIPTIONS__DIR: setB_dirs, |
| 176 results.KEY__SET_DESCRIPTIONS__REPO_REVISION: setB_repo_revision, |
| 177 results.KEY__SET_DESCRIPTIONS__SECTION: setB_section, |
| 178 } |
155 | 179 |
156 time_start = int(time.time()) | 180 time_start = int(time.time()) |
157 self._results = self._load_result_pairs( | 181 self._results = self._load_result_pairs( |
158 setA_root=setA_root, setA_section=setA_section, | 182 setA_root=setA_root, setB_root=setB_root, |
159 setB_root=setB_root, setB_section=setB_section) | 183 setA_section=setA_section, setB_section=setB_section) |
160 if self._results: | 184 if self._results: |
161 self._timestamp = int(time.time()) | 185 self._timestamp = int(time.time()) |
162 logging.info('Number of download file collisions: %s' % | 186 logging.info('Number of download file collisions: %s' % |
163 imagediffdb.global_file_collisions) | 187 imagediffdb.global_file_collisions) |
164 logging.info('Results complete; took %d seconds.' % | 188 logging.info('Results complete; took %d seconds.' % |
165 (self._timestamp - time_start)) | 189 (self._timestamp - time_start)) |
166 finally: | 190 finally: |
167 shutil.rmtree(tempdir) | 191 shutil.rmtree(tempdir) |
168 | 192 |
169 def _load_result_pairs(self, setA_root, setA_section, setB_root, | 193 def _load_result_pairs(self, setA_root, setB_root, |
170 setB_section): | 194 setA_section, setB_section): |
171 """Loads all JSON image summaries from 2 directory trees and compares them. | 195 """Loads all JSON image summaries from 2 directory trees and compares them. |
172 | 196 |
| 197 TODO(stephana): This method is only called from within __init__(); it might |
| 198 make more sense to just roll the content of this method into __init__(). |
| 199 |
173 Args: | 200 Args: |
174 setA_root: root directory containing JSON summaries of rendering results | 201 setA_root: root directory containing JSON summaries of rendering results |
| 202 setB_root: root directory containing JSON summaries of rendering results |
175 setA_section: which section (gm_json.JSONKEY_ACTUALRESULTS or | 203 setA_section: which section (gm_json.JSONKEY_ACTUALRESULTS or |
176 gm_json.JSONKEY_EXPECTEDRESULTS) to load from the summaries in setA | 204 gm_json.JSONKEY_EXPECTEDRESULTS) to load from the summaries in setA |
177 setB_root: root directory containing JSON summaries of rendering results | |
178 setB_section: which section (gm_json.JSONKEY_ACTUALRESULTS or | 205 setB_section: which section (gm_json.JSONKEY_ACTUALRESULTS or |
179 gm_json.JSONKEY_EXPECTEDRESULTS) to load from the summaries in setB | 206 gm_json.JSONKEY_EXPECTEDRESULTS) to load from the summaries in setB |
180 | 207 |
181 Returns the summary of all image diff results (or None, depending on | 208 Returns the summary of all image diff results (or None, depending on |
182 self._prefetch_only). | 209 self._prefetch_only). |
183 """ | 210 """ |
184 logging.info('Reading JSON image summaries from dirs %s and %s...' % ( | 211 logging.info('Reading JSON image summaries from dirs %s and %s...' % ( |
185 setA_root, setB_root)) | 212 setA_root, setB_root)) |
186 setA_dicts = self._read_dicts_from_root(setA_root) | 213 setA_dicts = self._read_dicts_from_root(setA_root) |
187 setB_dicts = self._read_dicts_from_root(setB_root) | 214 setB_dicts = self._read_dicts_from_root(setB_root) |
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
393 """ | 420 """ |
394 if gs_utils.GSUtils.is_gs_url(source_dir): | 421 if gs_utils.GSUtils.is_gs_url(source_dir): |
395 (bucket, path) = gs_utils.GSUtils.split_gs_url(source_dir) | 422 (bucket, path) = gs_utils.GSUtils.split_gs_url(source_dir) |
396 self._gs.download_dir_contents(source_bucket=bucket, source_dir=path, | 423 self._gs.download_dir_contents(source_bucket=bucket, source_dir=path, |
397 dest_dir=dest_dir) | 424 dest_dir=dest_dir) |
398 elif source_dir.lower().startswith(REPO_URL_PREFIX): | 425 elif source_dir.lower().startswith(REPO_URL_PREFIX): |
399 repo_dir = os.path.join(REPO_BASEPATH, source_dir[len(REPO_URL_PREFIX):]) | 426 repo_dir = os.path.join(REPO_BASEPATH, source_dir[len(REPO_URL_PREFIX):]) |
400 shutil.copytree(repo_dir, dest_dir) | 427 shutil.copytree(repo_dir, dest_dir) |
401 else: | 428 else: |
402 shutil.copytree(source_dir, dest_dir) | 429 shutil.copytree(source_dir, dest_dir) |
| 430 |
| 431 def _get_repo_revision(self, source_dir, assert_if_not=None): |
| 432 """Get the commit hash of source_dir, IF it refers to a git checkout. |
| 433 |
| 434 Args: |
| 435 source_dir: path to source dir (GS URL, local filepath, or a special |
| 436 "repo:" URL type that points at a file within our Skia checkout; |
| 437 only the "repo:" URL type will have a commit hash. |
| 438 assert_if_not: if not None, raise an Exception if source_dir has a |
| 439 commit hash and that hash is not equal to this |
| 440 """ |
| 441 if source_dir.lower().startswith(REPO_URL_PREFIX): |
| 442 repo_dir = os.path.join(REPO_BASEPATH, source_dir[len(REPO_URL_PREFIX):]) |
| 443 revision = subprocess.check_output( |
| 444 args=[git_utils.GIT, 'rev-parse', 'HEAD'], cwd=repo_dir).strip() |
| 445 if assert_if_not and revision != assert_if_not: |
| 446 raise Exception('found revision %s that did not match %s' % ( |
| 447 revision, assert_if_not)) |
| 448 return revision |
| 449 else: |
| 450 return None |
OLD | NEW |