| OLD | NEW |
| 1 # Copyright (c) 2006-2009 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2006-2009 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 # TODO(gwilson): 1. Change text differs to use external utils. | 5 # TODO(gwilson): 1. Change text differs to use external utils. |
| 6 # 2. Change text_expectations parsing to existing | 6 # 2. Change text_expectations parsing to existing |
| 7 # logic in layout_pagckage.test_expectations. | 7 # logic in layout_pagckage.test_expectations. |
| 8 import google.path_utils | 8 import google.path_utils |
| 9 import difflib | 9 import difflib |
| 10 import errno | 10 import errno |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 174 def __init__(self, | 174 def __init__(self, |
| 175 build, | 175 build, |
| 176 builder_name, | 176 builder_name, |
| 177 exclude_known_failures, | 177 exclude_known_failures, |
| 178 test_regex, | 178 test_regex, |
| 179 output_dir, | 179 output_dir, |
| 180 max_failures, | 180 max_failures, |
| 181 verbose): | 181 verbose): |
| 182 self.build = build | 182 self.build = build |
| 183 # TODO(gwilson): add full url-encoding for the platform. | 183 # TODO(gwilson): add full url-encoding for the platform. |
| 184 self.platform = builder_name.replace(" ", "%20") | 184 self.SetPlatform(builder_name) |
| 185 self.exclude_known_failures = exclude_known_failures | 185 self.exclude_known_failures = exclude_known_failures |
| 186 self.test_regex = test_regex | 186 self.test_regex = test_regex |
| 187 self.output_dir = output_dir | 187 self.output_dir = output_dir |
| 188 self.max_failures = max_failures | 188 self.max_failures = max_failures |
| 189 self.verbose = verbose | 189 self.verbose = verbose |
| 190 self.fyi_builder = False | 190 self.fyi_builder = False |
| 191 self._flaky_test_cache = {} | 191 self._flaky_test_cache = {} |
| 192 self._test_expectations_cache = None | 192 self._test_expectations_cache = None |
| 193 # If true, scraping will still happen but no files will be downloaded. |
| 194 self.dont_download = False |
| 195 |
| 196 def SetPlatform(self, platform): |
| 197 self.platform = platform.replace(" ", "%20") |
| 193 | 198 |
| 194 # TODO(gwilson): Change this to get the last build that finished | 199 # TODO(gwilson): Change this to get the last build that finished |
| 195 # successfully. | 200 # successfully. |
| 196 def GetLastBuild(self): | 201 def GetLastBuild(self): |
| 197 """ | 202 """ |
| 198 Returns the last build number for this platform. | 203 Returns the last build number for this platform. |
| 199 If use_fyi is true, this only looks at the fyi builder. | 204 If use_fyi is true, this only looks at the fyi builder. |
| 200 """ | 205 """ |
| 201 try: | 206 try: |
| 202 return ExtractSingleRegexAtURL(GetBuilderURL(self.platform, | 207 return ExtractSingleRegexAtURL(GetBuilderURL(self.platform, |
| 203 self.fyi_builder), | 208 self.fyi_builder), |
| 204 LAST_BUILD_REGEX) | 209 LAST_BUILD_REGEX) |
| 205 except urllib2.HTTPError: | 210 except urllib2.HTTPError: |
| 206 if not self.fyi_builder: | 211 if not self.fyi_builder: |
| 207 self.fyi_builder = True | 212 self.fyi_builder = True |
| 208 return self.GetLastBuild() | 213 return self.GetLastBuild() |
| 209 | 214 |
| 210 def GetFailures(self): | 215 def GetFailures(self): |
| 211 if not self.build: | 216 if not self.build: |
| 212 self.build = self.GetLastBuild() | 217 self.build = self.GetLastBuild() |
| 213 if self.verbose: | 218 if self.verbose: |
| 214 print "Using build number %s" % self.build | 219 print "Using build number %s" % self.build |
| 215 | 220 |
| 216 self.failures = self._GetFailuresFromBuilder() | 221 self.failures = self._GetFailuresFromBuilder() |
| 217 if self.failures and self._DownloadResultResources(): | 222 if (self.failures and |
| 223 (self._DownloadResultResources() or self.dont_download)): |
| 218 return self.failures | 224 return self.failures |
| 219 return None | 225 return None |
| 220 | 226 |
| 221 def _GetFailuresFromBuilder(self): | 227 def _GetFailuresFromBuilder(self): |
| 222 """ | 228 """ |
| 223 Returns a list of failures for the given build and platform by scraping | 229 Returns a list of failures for the given build and platform by scraping |
| 224 the buildbots and parsing their results. | 230 the buildbots and parsing their results. |
| 225 The list returned contains Failure class objects. | 231 The list returned contains Failure class objects. |
| 226 """ | 232 """ |
| 227 if self.verbose: | 233 if self.verbose: |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 306 matches.append(failure) | 312 matches.append(failure) |
| 307 return matches | 313 return matches |
| 308 | 314 |
| 309 # TODO(gwilson): add support for multiple conflicting build numbers by | 315 # TODO(gwilson): add support for multiple conflicting build numbers by |
| 310 # renaming the zip file and naming the directory appropriately. | 316 # renaming the zip file and naming the directory appropriately. |
| 311 def _DownloadResultResources(self): | 317 def _DownloadResultResources(self): |
| 312 """ | 318 """ |
| 313 Finds and downloads/extracts all of the test results (pixel/text output) | 319 Finds and downloads/extracts all of the test results (pixel/text output) |
| 314 for all of the given failures. | 320 for all of the given failures. |
| 315 """ | 321 """ |
| 316 content = ScrapeURL(GetArchiveURL(self.build, | 322 revision, build_name = self._GetRevisionAndBuildFromArchiveStep() |
| 317 self.platform, | |
| 318 self.fyi_builder)) | |
| 319 revision = ExtractFirstValue(content, ARCHIVE_URL_REGEX) | |
| 320 build_name = ExtractFirstValue(content, BUILD_NAME_REGEX) | |
| 321 | 323 |
| 322 target_zip = "%s/layout-test-results-%s.zip" % (self.output_dir, | 324 target_zip = "%s/layout-test-results-%s.zip" % (self.output_dir, |
| 323 self.build) | 325 self.build) |
| 324 zip_url = GetZipFileURL(revision, build_name) | 326 zip_url = GetZipFileURL(revision, build_name) |
| 325 if self.verbose: | 327 if self.verbose: |
| 326 print "Downloading zip file from %s to %s" % (zip_url, target_zip) | 328 print "Downloading zip file from %s to %s" % (zip_url, target_zip) |
| 327 filename = self._DownloadFile(zip_url, target_zip, "b") | 329 filename = self._DownloadFile(zip_url, target_zip, "b") |
| 328 if not filename: | 330 if not filename: |
| 329 print "Could not download zip file from %s. Does it exist?" % zip_url | 331 if self.verbose: |
| 332 print "Could not download zip file from %s. Does it exist?" % zip_url |
| 330 return False | 333 return False |
| 331 | 334 |
| 332 if zipfile.is_zipfile(filename): | 335 if zipfile.is_zipfile(filename): |
| 333 zip = zipfile.ZipFile(filename) | 336 zip = zipfile.ZipFile(filename) |
| 334 if self.verbose: | 337 if self.verbose: |
| 335 print 'Extracting files...' | 338 print 'Extracting files...' |
| 336 directory = "%s/layout-test-results-%s" % (self.output_dir, self.build) | 339 directory = "%s/layout-test-results-%s" % (self.output_dir, self.build) |
| 337 CreateDirectory(directory) | 340 CreateDirectory(directory) |
| 338 for failure in self.failures: | 341 for failure in self.failures: |
| 339 if failure.text_diff_mismatch or failure.simplified_text_diff_mismatch: | 342 if failure.text_diff_mismatch or failure.simplified_text_diff_mismatch: |
| 340 self._PopulateTextFailure(failure, directory, zip) | 343 self._PopulateTextFailure(failure, directory, zip) |
| 341 if failure.image_mismatch: | 344 if failure.image_mismatch: |
| 342 self._PopulateImageFailure(failure, directory, zip) | 345 self._PopulateImageFailure(failure, directory, zip) |
| 343 failure.test_age = self._GetFileAge(failure.GetTestHome()) | 346 failure.test_age = self._GetFileAge(failure.GetTestHome()) |
| 344 failure.flakiness = self._GetFlakiness(failure.test_path, self.platform) | 347 failure.flakiness = self._GetFlakiness(failure.test_path, self.platform) |
| 345 failure.test_expectations_line = ( | 348 failure.test_expectations_line = ( |
| 346 self._GetTestExpectationsLine(failure.test_path, self.platform)) | 349 self._GetTestExpectationsLine(failure.test_path, self.platform)) |
| 347 zip.close() | 350 zip.close() |
| 348 if self.verbose: | 351 if self.verbose: |
| 349 print "Files extracted." | 352 print "Files extracted." |
| 350 print "Deleting zip file..." | 353 print "Deleting zip file..." |
| 351 os.remove(filename) | 354 os.remove(filename) |
| 352 return True | 355 return True |
| 353 else: | 356 else: |
| 354 print "Downloaded file '%s' doesn't look like a zip file." % filename | 357 if self.verbose: |
| 358 print "Downloaded file '%s' doesn't look like a zip file." % filename |
| 355 return False | 359 return False |
| 356 | 360 |
| 361 def _GetRevisionAndBuildFromArchiveStep(self): |
| 362 content = ScrapeURL(GetArchiveURL(self.build, |
| 363 self.platform, |
| 364 self.fyi_builder)) |
| 365 revision = ExtractFirstValue(content, ARCHIVE_URL_REGEX) |
| 366 build_name = ExtractFirstValue(content, BUILD_NAME_REGEX) |
| 367 return (revision, build_name) |
| 368 |
| 357 def _PopulateTextFailure(self, failure, directory, zip): | 369 def _PopulateTextFailure(self, failure, directory, zip): |
| 358 baselines = self._GetBaseline(failure.GetExpectedTextFilename(), | 370 baselines = self._GetBaseline(failure.GetExpectedTextFilename(), |
| 359 directory) | 371 directory) |
| 360 failure.text_baseline_local = baselines[0] | 372 failure.text_baseline_local = baselines[0] |
| 361 failure.text_baseline_url = baselines[1] | 373 failure.text_baseline_url = baselines[1] |
| 362 failure.text_baseline_age = ( | 374 failure.text_baseline_age = ( |
| 363 self._GetFileAge(failure.GetTextBaselineTracHome())) | 375 self._GetFileAge(failure.GetTextBaselineTracHome())) |
| 364 failure.text_actual_local = "%s/%s" % (directory, | 376 failure.text_actual_local = "%s/%s" % (directory, |
| 365 failure.GetActualTextFilename()) | 377 failure.GetActualTextFilename()) |
| 366 if self._ExtractFileFromZip(zip, | 378 if (not self.dont_download and |
| 367 failure.GetTextResultLocationInZipFile(), | 379 self._ExtractFileFromZip(zip, |
| 368 failure.text_actual_local): | 380 failure.GetTextResultLocationInZipFile(), |
| 381 failure.text_actual_local)): |
| 369 GenerateTextDiff(failure.text_baseline_local, | 382 GenerateTextDiff(failure.text_baseline_local, |
| 370 failure.text_actual_local, | 383 failure.text_actual_local, |
| 371 directory + "/" + failure.GetTextDiffFilename()) | 384 directory + "/" + failure.GetTextDiffFilename()) |
| 372 | 385 |
| 373 def _PopulateImageFailure(self, failure, directory, zip): | 386 def _PopulateImageFailure(self, failure, directory, zip): |
| 374 baselines = self._GetBaseline(failure.GetExpectedImageFilename(), | 387 baselines = self._GetBaseline(failure.GetExpectedImageFilename(), |
| 375 directory) | 388 directory) |
| 376 failure.image_baseline_local = baselines[0] | 389 failure.image_baseline_local = baselines[0] |
| 377 failure.image_baseline_url = baselines[1] | 390 failure.image_baseline_url = baselines[1] |
| 378 if baselines[0] and baselines[1]: | 391 if baselines[0] and baselines[1]: |
| 379 failure.image_baseline_age = ( | 392 failure.image_baseline_age = ( |
| 380 self._GetFileAge(failure.GetImageBaselineTracHome())) | 393 self._GetFileAge(failure.GetImageBaselineTracHome())) |
| 381 failure.image_actual_local = "%s/%s" % (directory, | 394 failure.image_actual_local = "%s/%s" % (directory, |
| 382 failure.GetActualImageFilename()) | 395 failure.GetActualImageFilename()) |
| 383 self._ExtractFileFromZip(zip, | 396 self._ExtractFileFromZip(zip, |
| 384 failure.GetImageResultLocationInZipFile(), | 397 failure.GetImageResultLocationInZipFile(), |
| 385 failure.image_actual_local) | 398 failure.image_actual_local) |
| 386 if not GeneratePNGDiff("./" + failure.image_baseline_local, | 399 if (not GeneratePNGDiff("./" + failure.image_baseline_local, |
| 387 "./" + failure.image_actual_local, | 400 "./" + failure.image_actual_local, |
| 388 "./%s/%s" % | 401 "./%s/%s" % |
| 389 (directory, failure.GetImageDiffFilename())): | 402 (directory, failure.GetImageDiffFilename())) |
| 403 and self.verbose): |
| 390 print "Could not generate PNG diff for %s" % failure.test_path | 404 print "Could not generate PNG diff for %s" % failure.test_path |
| 391 if failure.IsImageBaselineInChromium(): | 405 if failure.IsImageBaselineInChromium(): |
| 392 upstream_baselines = ( | 406 upstream_baselines = ( |
| 393 self._GetUpstreamBaseline(failure.GetExpectedImageFilename(), | 407 self._GetUpstreamBaseline(failure.GetExpectedImageFilename(), |
| 394 directory)) | 408 directory)) |
| 395 failure.image_baseline_upstream_local = upstream_baselines[0] | 409 failure.image_baseline_upstream_local = upstream_baselines[0] |
| 396 failure.image_baseline_upstream_url = upstream_baselines[1] | 410 failure.image_baseline_upstream_url = upstream_baselines[1] |
| 397 | 411 |
| 398 def _GetBaseline(self, filename, directory, upstream_only = False): | 412 def _GetBaseline(self, filename, directory, upstream_only = False): |
| 399 """ | 413 """ |
| (...skipping 12 matching lines...) Expand all Loading... |
| 412 if upstream_only: | 426 if upstream_only: |
| 413 last_index = local_filename.rfind(".") | 427 last_index = local_filename.rfind(".") |
| 414 if last_index > -1: | 428 if last_index > -1: |
| 415 local_filename = (local_filename[0:last_index] + | 429 local_filename = (local_filename[0:last_index] + |
| 416 UPSTREAM_IMAGE_FILE_ENDING) | 430 UPSTREAM_IMAGE_FILE_ENDING) |
| 417 | 431 |
| 418 download_file_modifiers = "" | 432 download_file_modifiers = "" |
| 419 if local_filename.endswith(".png"): | 433 if local_filename.endswith(".png"): |
| 420 download_file_modifiers = "b" # binary file | 434 download_file_modifiers = "b" # binary file |
| 421 | 435 |
| 422 CreateDirectory(local_filename[0:local_filename.rfind("/")]) | 436 if not self.dont_download: |
| 437 CreateDirectory(local_filename[0:local_filename.rfind("/")]) |
| 423 | 438 |
| 424 webkit_mac_location = ( | 439 webkit_mac_location = ( |
| 425 self._MangleWebkitPixelTestLocation(WEBKIT_IMAGE_BASELINE_BASE_URL_MAC, | 440 self._MangleWebkitPixelTestLocation(WEBKIT_IMAGE_BASELINE_BASE_URL_MAC, |
| 426 filename)) | 441 filename)) |
| 427 webkit_win_location = ( | 442 webkit_win_location = ( |
| 428 self._MangleWebkitPixelTestLocation(WEBKIT_IMAGE_BASELINE_BASE_URL_WIN, | 443 self._MangleWebkitPixelTestLocation(WEBKIT_IMAGE_BASELINE_BASE_URL_WIN, |
| 429 filename)) | 444 filename)) |
| 430 | 445 |
| 431 possible_files = [] | 446 possible_files = [] |
| 432 | 447 |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 486 def _GetFileAge(self, url): | 501 def _GetFileAge(self, url): |
| 487 try: | 502 try: |
| 488 if url.find(WEBKIT_TRAC_HOSTNAME) > -1: | 503 if url.find(WEBKIT_TRAC_HOSTNAME) > -1: |
| 489 return ExtractSingleRegexAtURL(url[:url.rfind("/")], | 504 return ExtractSingleRegexAtURL(url[:url.rfind("/")], |
| 490 WEBKIT_FILE_AGE_REGEX % | 505 WEBKIT_FILE_AGE_REGEX % |
| 491 url[url.find("/browser"):]) | 506 url[url.find("/browser"):]) |
| 492 else: | 507 else: |
| 493 return ExtractSingleRegexAtURL(url + "?view=log", | 508 return ExtractSingleRegexAtURL(url + "?view=log", |
| 494 CHROMIUM_FILE_AGE_REGEX) | 509 CHROMIUM_FILE_AGE_REGEX) |
| 495 except: | 510 except: |
| 496 print "Could not find age for %s. Does the file exist?" % url | 511 if self.verbose: |
| 512 print "Could not find age for %s. Does the file exist?" % url |
| 497 return None | 513 return None |
| 498 | 514 |
| 499 # Returns a flakiness on a scale of 1-50. | 515 # Returns a flakiness on a scale of 1-50. |
| 500 # TODO(gwilson): modify this to also return which of the last 10 builds failed | 516 # TODO(gwilson): modify this to also return which of the last 10 builds failed |
| 501 # for this test. | 517 # for this test. |
| 502 def _GetFlakiness(self, test_path, target_platform): | 518 def _GetFlakiness(self, test_path, target_platform): |
| 503 url = GetFlakyTestURL(target_platform) | 519 url = GetFlakyTestURL(target_platform) |
| 504 if url == "": | 520 if url == "": |
| 505 return None | 521 return None |
| 506 | 522 |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 566 def _DownloadFile(self, url, local_filename = None, modifiers = "", | 582 def _DownloadFile(self, url, local_filename = None, modifiers = "", |
| 567 force = False): | 583 force = False): |
| 568 """ | 584 """ |
| 569 Copy the contents of a file from a given URL | 585 Copy the contents of a file from a given URL |
| 570 to a local file. | 586 to a local file. |
| 571 """ | 587 """ |
| 572 try: | 588 try: |
| 573 if local_filename == None: | 589 if local_filename == None: |
| 574 local_filename = url.split('/')[-1] | 590 local_filename = url.split('/')[-1] |
| 575 if os.path.isfile(local_filename) and not force: | 591 if os.path.isfile(local_filename) and not force: |
| 576 print "File at %s already exists." % local_filename | 592 if self.verbose: |
| 593 print "File at %s already exists." % local_filename |
| 594 return local_filename |
| 595 if self.dont_download: |
| 577 return local_filename | 596 return local_filename |
| 578 webFile = urllib2.urlopen(url) | 597 webFile = urllib2.urlopen(url) |
| 579 localFile = open(local_filename, ("w%s" % modifiers)) | 598 localFile = open(local_filename, ("w%s" % modifiers)) |
| 580 localFile.write(webFile.read()) | 599 localFile.write(webFile.read()) |
| 581 webFile.close() | 600 webFile.close() |
| 582 localFile.close() | 601 localFile.close() |
| 583 except urllib2.HTTPError: | 602 except urllib2.HTTPError: |
| 584 return None | 603 return None |
| 585 except urllib2.URLError: | 604 except urllib2.URLError: |
| 586 print "The url %s is malformed." % url | 605 print "The url %s is malformed." % url |
| 587 return None | 606 return None |
| 588 return localFile.name | 607 return localFile.name |
| OLD | NEW |