| OLD | NEW |
| 1 #!/usr/bin/python | 1 #!/usr/bin/python |
| 2 | 2 |
| 3 """ | 3 """ |
| 4 Copyright 2013 Google Inc. | 4 Copyright 2013 Google Inc. |
| 5 | 5 |
| 6 Use of this source code is governed by a BSD-style license that can be | 6 Use of this source code is governed by a BSD-style license that can be |
| 7 found in the LICENSE file. | 7 found in the LICENSE file. |
| 8 | 8 |
| 9 Calulate differences between image pairs, and store them in a database. | 9 Calulate differences between image pairs, and store them in a database. |
| 10 """ | 10 """ |
| 11 | 11 |
| 12 # System-level imports | 12 # System-level imports |
| 13 import contextlib | 13 import contextlib |
| 14 import errno |
| 14 import json | 15 import json |
| 15 import logging | 16 import logging |
| 16 import os | 17 import os |
| 18 import Queue |
| 17 import re | 19 import re |
| 18 import shutil | 20 import shutil |
| 19 import tempfile | 21 import tempfile |
| 22 import threading |
| 23 import time |
| 20 import urllib | 24 import urllib |
| 21 | 25 |
| 22 # Must fix up PYTHONPATH before importing from within Skia | 26 # Must fix up PYTHONPATH before importing from within Skia |
| 23 import fix_pythonpath # pylint: disable=W0611 | 27 import fix_pythonpath # pylint: disable=W0611 |
| 24 | 28 |
| 25 # Imports from within Skia | 29 # Imports from within Skia |
| 26 import find_run_binary | 30 import find_run_binary |
| 31 from py.utils import gs_utils |
| 32 |
| 27 | 33 |
| 28 SKPDIFF_BINARY = find_run_binary.find_path_to_program('skpdiff') | 34 SKPDIFF_BINARY = find_run_binary.find_path_to_program('skpdiff') |
| 29 | 35 |
| 30 DEFAULT_IMAGE_SUFFIX = '.png' | 36 DEFAULT_IMAGE_SUFFIX = '.png' |
| 31 DEFAULT_IMAGES_SUBDIR = 'images' | 37 DEFAULT_IMAGES_SUBDIR = 'images' |
| 38 # TODO(epoger): Figure out a better default number of threads; for now, |
| 39 # using a conservative default value. |
| 40 DEFAULT_NUM_WORKER_THREADS = 1 |
| 32 | 41 |
| 33 DISALLOWED_FILEPATH_CHAR_REGEX = re.compile('[^\w\-]') | 42 DISALLOWED_FILEPATH_CHAR_REGEX = re.compile('[^\w\-]') |
| 34 | 43 |
| 35 RGBDIFFS_SUBDIR = 'diffs' | 44 RGBDIFFS_SUBDIR = 'diffs' |
| 36 WHITEDIFFS_SUBDIR = 'whitediffs' | 45 WHITEDIFFS_SUBDIR = 'whitediffs' |
| 37 | 46 |
| 38 # Keys used within DiffRecord dictionary representations. | 47 # Keys used within DiffRecord dictionary representations. |
| 39 # NOTE: Keep these in sync with static/constants.js | 48 # NOTE: Keep these in sync with static/constants.js |
| 40 KEY__DIFFERENCES__MAX_DIFF_PER_CHANNEL = 'maxDiffPerChannel' | 49 KEY__DIFFERENCES__MAX_DIFF_PER_CHANNEL = 'maxDiffPerChannel' |
| 41 KEY__DIFFERENCES__NUM_DIFF_PIXELS = 'numDifferingPixels' | 50 KEY__DIFFERENCES__NUM_DIFF_PIXELS = 'numDifferingPixels' |
| 42 KEY__DIFFERENCES__PERCENT_DIFF_PIXELS = 'percentDifferingPixels' | 51 KEY__DIFFERENCES__PERCENT_DIFF_PIXELS = 'percentDifferingPixels' |
| 43 KEY__DIFFERENCES__PERCEPTUAL_DIFF = 'perceptualDifference' | 52 KEY__DIFFERENCES__PERCEPTUAL_DIFF = 'perceptualDifference' |
| 44 | 53 |
| 54 # Special values within ImageDiffDB._diff_dict |
| 55 _DIFFRECORD_FAILED = 'failed' |
| 56 _DIFFRECORD_PENDING = 'pending' |
| 57 |
| 58 # Temporary variable to keep track of how many times we download |
| 59 # the same file in multiple threads. |
| 60 # TODO(epoger): Delete this, once we see that the number stays close to 0. |
| 61 global_file_collisions = 0 |
| 62 |
| 45 | 63 |
| 46 class DiffRecord(object): | 64 class DiffRecord(object): |
| 47 """ Record of differences between two images. """ | 65 """ Record of differences between two images. """ |
| 48 | 66 |
| 49 def __init__(self, storage_root, | 67 def __init__(self, gs, storage_root, |
| 50 expected_image_url, expected_image_locator, | 68 expected_image_url, expected_image_locator, |
| 51 actual_image_url, actual_image_locator, | 69 actual_image_url, actual_image_locator, |
| 52 expected_images_subdir=DEFAULT_IMAGES_SUBDIR, | 70 expected_images_subdir=DEFAULT_IMAGES_SUBDIR, |
| 53 actual_images_subdir=DEFAULT_IMAGES_SUBDIR, | 71 actual_images_subdir=DEFAULT_IMAGES_SUBDIR, |
| 54 image_suffix=DEFAULT_IMAGE_SUFFIX): | 72 image_suffix=DEFAULT_IMAGE_SUFFIX): |
| 55 """Download this pair of images (unless we already have them on local disk), | 73 """Download this pair of images (unless we already have them on local disk), |
| 56 and prepare a DiffRecord for them. | 74 and prepare a DiffRecord for them. |
| 57 | 75 |
| 58 TODO(epoger): Make this asynchronously download images, rather than blocking | |
| 59 until the images have been downloaded and processed. | |
| 60 | |
| 61 Args: | 76 Args: |
| 77 gs: instance of GSUtils object we can use to download images |
| 62 storage_root: root directory on local disk within which we store all | 78 storage_root: root directory on local disk within which we store all |
| 63 images | 79 images |
| 64 expected_image_url: file or HTTP url from which we will download the | 80 expected_image_url: file, GS, or HTTP url from which we will download the |
| 65 expected image | 81 expected image |
| 66 expected_image_locator: a unique ID string under which we will store the | 82 expected_image_locator: a unique ID string under which we will store the |
| 67 expected image within storage_root (probably including a checksum to | 83 expected image within storage_root (probably including a checksum to |
| 68 guarantee uniqueness) | 84 guarantee uniqueness) |
| 69 actual_image_url: file or HTTP url from which we will download the | 85 actual_image_url: file, GS, or HTTP url from which we will download the |
| 70 actual image | 86 actual image |
| 71 actual_image_locator: a unique ID string under which we will store the | 87 actual_image_locator: a unique ID string under which we will store the |
| 72 actual image within storage_root (probably including a checksum to | 88 actual image within storage_root (probably including a checksum to |
| 73 guarantee uniqueness) | 89 guarantee uniqueness) |
| 74 expected_images_subdir: the subdirectory expected images are stored in. | 90 expected_images_subdir: the subdirectory expected images are stored in. |
| 75 actual_images_subdir: the subdirectory actual images are stored in. | 91 actual_images_subdir: the subdirectory actual images are stored in. |
| 76 image_suffix: the suffix of images. | 92 image_suffix: the suffix of images. |
| 77 """ | 93 """ |
| 78 expected_image_locator = _sanitize_locator(expected_image_locator) | 94 expected_image_locator = _sanitize_locator(expected_image_locator) |
| 79 actual_image_locator = _sanitize_locator(actual_image_locator) | 95 actual_image_locator = _sanitize_locator(actual_image_locator) |
| 80 | 96 |
| 81 # Download the expected/actual images, if we don't have them already. | 97 # Download the expected/actual images, if we don't have them already. |
| 82 # TODO(rmistry): Add a parameter that just tries to use already-present | |
| 83 # image files rather than downloading them. | |
| 84 expected_image_file = os.path.join( | 98 expected_image_file = os.path.join( |
| 85 storage_root, expected_images_subdir, | 99 storage_root, expected_images_subdir, |
| 86 str(expected_image_locator) + image_suffix) | 100 str(expected_image_locator) + image_suffix) |
| 87 actual_image_file = os.path.join( | 101 actual_image_file = os.path.join( |
| 88 storage_root, actual_images_subdir, | 102 storage_root, actual_images_subdir, |
| 89 str(actual_image_locator) + image_suffix) | 103 str(actual_image_locator) + image_suffix) |
| 90 try: | 104 try: |
| 91 _download_file(expected_image_file, expected_image_url) | 105 _download_file(gs, expected_image_file, expected_image_url) |
| 92 except Exception: | 106 except Exception: |
| 93 logging.exception('unable to download expected_image_url %s to file %s' % | 107 logging.exception('unable to download expected_image_url %s to file %s' % |
| 94 (expected_image_url, expected_image_file)) | 108 (expected_image_url, expected_image_file)) |
| 95 raise | 109 raise |
| 96 try: | 110 try: |
| 97 _download_file(actual_image_file, actual_image_url) | 111 _download_file(gs, actual_image_file, actual_image_url) |
| 98 except Exception: | 112 except Exception: |
| 99 logging.exception('unable to download actual_image_url %s to file %s' % | 113 logging.exception('unable to download actual_image_url %s to file %s' % |
| 100 (actual_image_url, actual_image_file)) | 114 (actual_image_url, actual_image_file)) |
| 101 raise | 115 raise |
| 102 | 116 |
| 103 # Get all diff images and values from skpdiff binary. | 117 # Get all diff images and values from skpdiff binary. |
| 104 skpdiff_output_dir = tempfile.mkdtemp() | 118 skpdiff_output_dir = tempfile.mkdtemp() |
| 105 try: | 119 try: |
| 106 skpdiff_summary_file = os.path.join(skpdiff_output_dir, | 120 skpdiff_summary_file = os.path.join(skpdiff_output_dir, |
| 107 'skpdiff-output.json') | 121 'skpdiff-output.json') |
| 108 skpdiff_rgbdiff_dir = os.path.join(skpdiff_output_dir, 'rgbDiff') | 122 skpdiff_rgbdiff_dir = os.path.join(skpdiff_output_dir, 'rgbDiff') |
| 109 skpdiff_whitediff_dir = os.path.join(skpdiff_output_dir, 'whiteDiff') | 123 skpdiff_whitediff_dir = os.path.join(skpdiff_output_dir, 'whiteDiff') |
| 110 expected_img = os.path.join(storage_root, expected_images_subdir, | 124 expected_img = os.path.join(storage_root, expected_images_subdir, |
| 111 str(expected_image_locator) + image_suffix) | 125 str(expected_image_locator) + image_suffix) |
| 112 actual_img = os.path.join(storage_root, actual_images_subdir, | 126 actual_img = os.path.join(storage_root, actual_images_subdir, |
| 113 str(actual_image_locator) + image_suffix) | 127 str(actual_image_locator) + image_suffix) |
| 114 | 128 |
| 115 # TODO: Call skpdiff ONCE for all image pairs, instead of calling it | 129 # TODO(epoger): Consider calling skpdiff ONCE for all image pairs, |
| 116 # repeatedly. This will allow us to parallelize a lot more work. | 130 # instead of calling it separately for each image pair. |
| 131 # Pro: we'll incur less overhead from making repeated system calls, |
| 132 # spinning up the skpdiff binary, etc. |
| 133 # Con: we would have to wait until all image pairs were loaded before |
| 134 # generating any of the diffs? |
| 117 find_run_binary.run_command( | 135 find_run_binary.run_command( |
| 118 [SKPDIFF_BINARY, '-p', expected_img, actual_img, | 136 [SKPDIFF_BINARY, '-p', expected_img, actual_img, |
| 119 '--jsonp', 'false', | 137 '--jsonp', 'false', |
| 120 '--output', skpdiff_summary_file, | 138 '--output', skpdiff_summary_file, |
| 121 '--differs', 'perceptual', 'different_pixels', | 139 '--differs', 'perceptual', 'different_pixels', |
| 122 '--rgbDiffDir', skpdiff_rgbdiff_dir, | 140 '--rgbDiffDir', skpdiff_rgbdiff_dir, |
| 123 '--whiteDiffDir', skpdiff_whitediff_dir, | 141 '--whiteDiffDir', skpdiff_whitediff_dir, |
| 124 ]) | 142 ]) |
| 125 | 143 |
| 126 # Get information out of the skpdiff_summary_file. | 144 # Get information out of the skpdiff_summary_file. |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 204 self.get_percent_pixels_differing(), | 222 self.get_percent_pixels_differing(), |
| 205 KEY__DIFFERENCES__MAX_DIFF_PER_CHANNEL: self._max_diff_per_channel, | 223 KEY__DIFFERENCES__MAX_DIFF_PER_CHANNEL: self._max_diff_per_channel, |
| 206 KEY__DIFFERENCES__PERCEPTUAL_DIFF: self._perceptual_difference, | 224 KEY__DIFFERENCES__PERCEPTUAL_DIFF: self._perceptual_difference, |
| 207 } | 225 } |
| 208 | 226 |
| 209 | 227 |
| 210 class ImageDiffDB(object): | 228 class ImageDiffDB(object): |
| 211 """ Calculates differences between image pairs, maintaining a database of | 229 """ Calculates differences between image pairs, maintaining a database of |
| 212 them for download.""" | 230 them for download.""" |
| 213 | 231 |
| 214 def __init__(self, storage_root): | 232 def __init__(self, storage_root, gs=None, |
| 233 num_worker_threads=DEFAULT_NUM_WORKER_THREADS): |
| 215 """ | 234 """ |
| 216 Args: | 235 Args: |
| 217 storage_root: string; root path within the DB will store all of its stuff | 236 storage_root: string; root path within the DB will store all of its stuff |
| 237 gs: instance of GSUtils object we can use to download images |
| 238 num_worker_threads: how many threads that download images and |
| 239 generate diffs simultaneously |
| 218 """ | 240 """ |
| 219 self._storage_root = storage_root | 241 self._storage_root = storage_root |
| 242 self._gs = gs |
| 220 | 243 |
| 221 # Dictionary of DiffRecords, keyed by (expected_image_locator, | 244 # Dictionary of DiffRecords, keyed by (expected_image_locator, |
| 222 # actual_image_locator) tuples. | 245 # actual_image_locator) tuples. |
| 246 # Values can also be _DIFFRECORD_PENDING, _DIFFRECORD_FAILED. |
| 247 # |
| 248 # Any thread that modifies _diff_dict must first acquire |
| 249 # _diff_dict_writelock! |
| 250 # |
| 251 # TODO(epoger): Disk is limitless, but RAM is not... so, we should probably |
| 252 # remove items from self._diff_dict if they haven't been accessed for a |
| 253 # long time. We can always regenerate them by diffing the images we |
| 254 # previously downloaded to local disk. |
| 255 # I guess we should figure out how expensive it is to download vs diff the |
| 256 # image pairs... if diffing them is expensive too, we can write these |
| 257 # _diff_dict objects out to disk if there's too many to hold in RAM. |
| 258 # Or we could use virtual memory to handle that automatically. |
| 223 self._diff_dict = {} | 259 self._diff_dict = {} |
| 260 self._diff_dict_writelock = threading.RLock() |
| 261 |
| 262 # Set up the queue for asynchronously loading DiffRecords, and start the |
| 263 # worker threads reading from it. |
| 264 self._tasks_queue = Queue.Queue(maxsize=2*num_worker_threads) |
| 265 self._workers = [] |
| 266 for i in range(num_worker_threads): |
| 267 worker = threading.Thread(target=self.worker, args=(i,)) |
| 268 worker.daemon = True |
| 269 worker.start() |
| 270 self._workers.append(worker) |
| 271 |
| 272 def worker(self, worker_num): |
| 273 """Launch a worker thread that pulls tasks off self._tasks_queue. |
| 274 |
| 275 Args: |
| 276 worker_num: (integer) which worker this is |
| 277 """ |
| 278 while True: |
| 279 params = self._tasks_queue.get() |
| 280 key, expected_image_url, actual_image_url = params |
| 281 try: |
| 282 diff_record = DiffRecord( |
| 283 self._gs, self._storage_root, |
| 284 expected_image_url=expected_image_url, |
| 285 expected_image_locator=key[0], |
| 286 actual_image_url=actual_image_url, |
| 287 actual_image_locator=key[1]) |
| 288 except Exception: |
| 289 logging.exception( |
| 290 'exception while creating DiffRecord for key %s' % str(key)) |
| 291 diff_record = _DIFFRECORD_FAILED |
| 292 self._diff_dict_writelock.acquire() |
| 293 try: |
| 294 self._diff_dict[key] = diff_record |
| 295 finally: |
| 296 self._diff_dict_writelock.release() |
| 224 | 297 |
| 225 @property | 298 @property |
| 226 def storage_root(self): | 299 def storage_root(self): |
| 227 return self._storage_root | 300 return self._storage_root |
| 228 | 301 |
| 229 def add_image_pair(self, | 302 def add_image_pair(self, |
| 230 expected_image_url, expected_image_locator, | 303 expected_image_url, expected_image_locator, |
| 231 actual_image_url, actual_image_locator): | 304 actual_image_url, actual_image_locator): |
| 232 """Download this pair of images (unless we already have them on local disk), | 305 """Asynchronously prepare a DiffRecord for a pair of images. |
| 233 and prepare a DiffRecord for them. | |
| 234 | 306 |
| 235 TODO(epoger): Make this asynchronously download images, rather than blocking | 307 This method will return quickly; calls to get_diff_record() will block |
| 236 until the images have been downloaded and processed. | 308 until the DiffRecord is available (or we have given up on creating it). |
| 237 When we do that, we should probably add a new method that will block | 309 |
| 238 until all of the images have been downloaded and processed. Otherwise, | 310 If we already have a DiffRecord for this particular image pair, no work |
| 239 we won't know when it's safe to start calling get_diff_record(). | 311 will be done. |
| 240 jcgregorio notes: maybe just make ImageDiffDB thread-safe and create a | |
| 241 thread-pool/worker queue at a higher level that just uses ImageDiffDB? | |
| 242 | 312 |
| 243 Args: | 313 Args: |
| 244 expected_image_url: file or HTTP url from which we will download the | 314 expected_image_url: file, GS, or HTTP url from which we will download the |
| 245 expected image | 315 expected image |
| 246 expected_image_locator: a unique ID string under which we will store the | 316 expected_image_locator: a unique ID string under which we will store the |
| 247 expected image within storage_root (probably including a checksum to | 317 expected image within storage_root (probably including a checksum to |
| 248 guarantee uniqueness) | 318 guarantee uniqueness) |
| 249 actual_image_url: file or HTTP url from which we will download the | 319 actual_image_url: file, GS, or HTTP url from which we will download the |
| 250 actual image | 320 actual image |
| 251 actual_image_locator: a unique ID string under which we will store the | 321 actual_image_locator: a unique ID string under which we will store the |
| 252 actual image within storage_root (probably including a checksum to | 322 actual image within storage_root (probably including a checksum to |
| 253 guarantee uniqueness) | 323 guarantee uniqueness) |
| 254 """ | 324 """ |
| 255 expected_image_locator = _sanitize_locator(expected_image_locator) | 325 expected_image_locator = _sanitize_locator(expected_image_locator) |
| 256 actual_image_locator = _sanitize_locator(actual_image_locator) | 326 actual_image_locator = _sanitize_locator(actual_image_locator) |
| 257 key = (expected_image_locator, actual_image_locator) | 327 key = (expected_image_locator, actual_image_locator) |
| 258 if not key in self._diff_dict: | 328 must_add_to_queue = False |
| 259 try: | 329 |
| 260 new_diff_record = DiffRecord( | 330 self._diff_dict_writelock.acquire() |
| 261 self._storage_root, | 331 try: |
| 262 expected_image_url=expected_image_url, | 332 if not key in self._diff_dict: |
| 263 expected_image_locator=expected_image_locator, | 333 # If we have already requested a diff between these two images, |
| 264 actual_image_url=actual_image_url, | 334 # we don't need to request it again. |
| 265 actual_image_locator=actual_image_locator) | 335 must_add_to_queue = True |
| 266 except Exception: | 336 self._diff_dict[key] = _DIFFRECORD_PENDING |
| 267 # If we can't create a real DiffRecord for this (expected, actual) pair, | 337 finally: |
| 268 # store None and the UI will show whatever information we DO have. | 338 self._diff_dict_writelock.release() |
| 269 # Fixes http://skbug.com/2368 . | 339 |
| 270 logging.exception( | 340 if must_add_to_queue: |
| 271 'got exception while creating a DiffRecord for ' | 341 self._tasks_queue.put((key, expected_image_url, actual_image_url)) |
| 272 'expected_image_url=%s , actual_image_url=%s; returning None' % ( | |
| 273 expected_image_url, actual_image_url)) | |
| 274 new_diff_record = None | |
| 275 self._diff_dict[key] = new_diff_record | |
| 276 | 342 |
| 277 def get_diff_record(self, expected_image_locator, actual_image_locator): | 343 def get_diff_record(self, expected_image_locator, actual_image_locator): |
| 278 """Returns the DiffRecord for this image pair. | 344 """Returns the DiffRecord for this image pair. |
| 279 | 345 |
| 280 Raises a KeyError if we don't have a DiffRecord for this image pair. | 346 This call will block until the diff record is available, or we were unable |
| 347 to generate it. |
| 348 |
| 349 Args: |
| 350 expected_image_locator: a unique ID string under which we will store the |
| 351 expected image within storage_root (probably including a checksum to |
| 352 guarantee uniqueness) |
| 353 actual_image_locator: a unique ID string under which we will store the |
| 354 actual image within storage_root (probably including a checksum to |
| 355 guarantee uniqueness) |
| 356 |
| 357 Returns the DiffRecord for this image pair, or None if we were unable to |
| 358 generate one. |
| 281 """ | 359 """ |
| 282 key = (_sanitize_locator(expected_image_locator), | 360 key = (_sanitize_locator(expected_image_locator), |
| 283 _sanitize_locator(actual_image_locator)) | 361 _sanitize_locator(actual_image_locator)) |
| 284 return self._diff_dict[key] | 362 diff_record = self._diff_dict[key] |
| 363 |
| 364 # If we have no results yet, block until we do. |
| 365 while diff_record == _DIFFRECORD_PENDING: |
| 366 time.sleep(1) |
| 367 diff_record = self._diff_dict[key] |
| 368 |
| 369 # Once we have the result... |
| 370 if diff_record == _DIFFRECORD_FAILED: |
| 371 logging.error( |
| 372 'failed to create a DiffRecord for expected_image_locator=%s , ' |
| 373 'actual_image_locator=%s' % ( |
| 374 expected_image_locator, actual_image_locator)) |
| 375 return None |
| 376 else: |
| 377 return diff_record |
| 285 | 378 |
| 286 | 379 |
| 287 # Utility functions | 380 # Utility functions |
| 288 | 381 |
| 289 def _download_file(local_filepath, url): | 382 def _download_file(gs, local_filepath, url): |
| 290 """Download a file from url to local_filepath, unless it is already there. | 383 """Download a file from url to local_filepath, unless it is already there. |
| 291 | 384 |
| 292 Args: | 385 Args: |
| 386 gs: instance of GSUtils object, in case the url points at Google Storage |
| 293 local_filepath: path on local disk where the image should be stored | 387 local_filepath: path on local disk where the image should be stored |
| 294 url: URL from which we can download the image if we don't have it yet | 388 url: HTTP or GS URL from which we can download the image if we don't have |
| 389 it yet |
| 295 """ | 390 """ |
| 391 global global_file_collisions |
| 296 if not os.path.exists(local_filepath): | 392 if not os.path.exists(local_filepath): |
| 297 _mkdir_unless_exists(os.path.dirname(local_filepath)) | 393 _mkdir_unless_exists(os.path.dirname(local_filepath)) |
| 298 with contextlib.closing(urllib.urlopen(url)) as url_handle: | 394 |
| 299 with open(local_filepath, 'wb') as file_handle: | 395 # First download the file contents into a unique filename, and |
| 300 shutil.copyfileobj(fsrc=url_handle, fdst=file_handle) | 396 # then rename that file. That way, if multiple threads are downloading |
| 397 # the same filename at the same time, they won't interfere with each |
| 398 # other (they will both download the file, and one will "win" in the end) |
| 399 temp_filename = '%s-%d' % (local_filepath, |
| 400 threading.current_thread().ident) |
| 401 if gs_utils.GSUtils.is_gs_url(url): |
| 402 (bucket, path) = gs_utils.GSUtils.split_gs_url(url) |
| 403 gs.download_file(source_bucket=bucket, source_path=path, |
| 404 dest_path=temp_filename) |
| 405 else: |
| 406 with contextlib.closing(urllib.urlopen(url)) as url_handle: |
| 407 with open(temp_filename, 'wb') as file_handle: |
| 408 shutil.copyfileobj(fsrc=url_handle, fdst=file_handle) |
| 409 |
| 410 # Rename the file to its real filename. |
| 411 # Keep count of how many colliding downloads we encounter; |
| 412 # if it's a large number, we may want to change our download strategy |
| 413 # to minimize repeated downloads. |
| 414 if os.path.exists(local_filepath): |
| 415 global_file_collisions += 1 |
| 416 else: |
| 417 os.rename(temp_filename, local_filepath) |
| 301 | 418 |
| 302 | 419 |
| 303 def _mkdir_unless_exists(path): | 420 def _mkdir_unless_exists(path): |
| 304 """Unless path refers to an already-existing directory, create it. | 421 """Unless path refers to an already-existing directory, create it. |
| 305 | 422 |
| 306 Args: | 423 Args: |
| 307 path: path on local disk | 424 path: path on local disk |
| 308 """ | 425 """ |
| 309 if not os.path.isdir(path): | 426 try: |
| 310 os.makedirs(path) | 427 os.makedirs(path) |
| 428 except OSError as e: |
| 429 if e.errno == errno.EEXIST: |
| 430 pass |
| 311 | 431 |
| 312 | 432 |
| 313 def _sanitize_locator(locator): | 433 def _sanitize_locator(locator): |
| 314 """Returns a sanitized version of a locator (one in which we know none of the | 434 """Returns a sanitized version of a locator (one in which we know none of the |
| 315 characters will have special meaning in filenames). | 435 characters will have special meaning in filenames). |
| 316 | 436 |
| 317 Args: | 437 Args: |
| 318 locator: string, or something that can be represented as a string | 438 locator: string, or something that can be represented as a string |
| 319 """ | 439 """ |
| 320 return DISALLOWED_FILEPATH_CHAR_REGEX.sub('_', str(locator)) | 440 return DISALLOWED_FILEPATH_CHAR_REGEX.sub('_', str(locator)) |
| 321 | 441 |
| 322 | 442 |
| 323 def _get_difference_locator(expected_image_locator, actual_image_locator): | 443 def _get_difference_locator(expected_image_locator, actual_image_locator): |
| 324 """Returns the locator string used to look up the diffs between expected_image | 444 """Returns the locator string used to look up the diffs between expected_image |
| 325 and actual_image. | 445 and actual_image. |
| 326 | 446 |
| 327 We must keep this function in sync with getImageDiffRelativeUrl() in | 447 We must keep this function in sync with getImageDiffRelativeUrl() in |
| 328 static/loader.js | 448 static/loader.js |
| 329 | 449 |
| 330 Args: | 450 Args: |
| 331 expected_image_locator: locator string pointing at expected image | 451 expected_image_locator: locator string pointing at expected image |
| 332 actual_image_locator: locator string pointing at actual image | 452 actual_image_locator: locator string pointing at actual image |
| 333 | 453 |
| 334 Returns: already-sanitized locator where the diffs between expected and | 454 Returns: already-sanitized locator where the diffs between expected and |
| 335 actual images can be found | 455 actual images can be found |
| 336 """ | 456 """ |
| 337 return "%s-vs-%s" % (_sanitize_locator(expected_image_locator), | 457 return "%s-vs-%s" % (_sanitize_locator(expected_image_locator), |
| 338 _sanitize_locator(actual_image_locator)) | 458 _sanitize_locator(actual_image_locator)) |
| OLD | NEW |