OLD | NEW |
1 # Copyright (C) 2012 Google Inc. All rights reserved. | 1 # Copyright (C) 2012 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 11 matching lines...) Expand all Loading... |
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
28 | 28 |
29 """Unit tests for run_perf_tests.""" | 29 """Unit tests for run_perf_tests.""" |
30 | 30 |
31 import StringIO | 31 import StringIO |
| 32 import datetime |
32 import json | 33 import json |
33 import re | 34 import re |
34 import unittest2 as unittest | 35 import unittest2 as unittest |
35 | 36 |
36 from webkitpy.common.host_mock import MockHost | 37 from webkitpy.common.host_mock import MockHost |
37 from webkitpy.common.system.outputcapture import OutputCapture | 38 from webkitpy.common.system.outputcapture import OutputCapture |
| 39 from webkitpy.layout_tests.port.driver import DriverOutput |
38 from webkitpy.layout_tests.port.test import TestPort | 40 from webkitpy.layout_tests.port.test import TestPort |
| 41 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest |
39 from webkitpy.performance_tests.perftest import DEFAULT_TEST_RUNNER_COUNT | 42 from webkitpy.performance_tests.perftest import DEFAULT_TEST_RUNNER_COUNT |
| 43 from webkitpy.performance_tests.perftest import PerfTest |
40 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner | 44 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner |
41 | 45 |
42 | 46 |
43 class MainTest(unittest.TestCase): | 47 class MainTest(unittest.TestCase): |
44 def create_runner(self, args=[]): | 48 def create_runner(self, args=[]): |
45 options, parsed_args = PerfTestsRunner._parse_args(args) | 49 options, parsed_args = PerfTestsRunner._parse_args(args) |
46 test_port = TestPort(host=MockHost(), options=options) | 50 test_port = TestPort(host=MockHost(), options=options) |
47 runner = PerfTestsRunner(args=args, port=test_port) | 51 runner = PerfTestsRunner(args=args, port=test_port) |
48 runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspect
or') | 52 runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspect
or') |
49 runner._host.filesystem.maybe_make_directory(runner._base_path, 'Binding
s') | 53 runner._host.filesystem.maybe_make_directory(runner._base_path, 'Binding
s') |
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
209 self.assertEqual(MockFileUploader.called, ['FileUploader', 'upload_singl
e_text_file']) | 213 self.assertEqual(MockFileUploader.called, ['FileUploader', 'upload_singl
e_text_file']) |
210 | 214 |
211 MockFileUploader.reset() | 215 MockFileUploader.reset() |
212 MockFileUploader.upload_single_text_file_return_value = StringIO.StringI
O('{"status": "SomethingHasFailed", "failureStored": false}') | 216 MockFileUploader.upload_single_text_file_return_value = StringIO.StringI
O('{"status": "SomethingHasFailed", "failureStored": false}') |
213 output = OutputCapture() | 217 output = OutputCapture() |
214 output.capture_output() | 218 output.capture_output() |
215 self.assertFalse(runner._upload_json('some.host', 'some.json', '/some/pa
th', MockFileUploader)) | 219 self.assertFalse(runner._upload_json('some.host', 'some.json', '/some/pa
th', MockFileUploader)) |
216 _, _, logs = output.restore_output() | 220 _, _, logs = output.restore_output() |
217 serialized_json = json.dumps({'status': 'SomethingHasFailed', 'failureSt
ored': False}, indent=4) | 221 serialized_json = json.dumps({'status': 'SomethingHasFailed', 'failureSt
ored': False}, indent=4) |
218 self.assertEqual(logs, 'Uploaded JSON to https://some.host/some/path but
got an error:\n%s\n' % serialized_json) | 222 self.assertEqual(logs, 'Uploaded JSON to https://some.host/some/path but
got an error:\n%s\n' % serialized_json) |
| 223 |
| 224 |
| 225 class InspectorPassTestData: |
| 226 text = 'RESULT group_name: test_name= 42 ms' |
| 227 output = """Running inspector/pass.html (2 of 2) |
| 228 RESULT group_name: test_name= 42 ms |
| 229 Finished: 0.1 s |
| 230 |
| 231 """ |
| 232 |
| 233 |
| 234 class EventTargetWrapperTestData: |
| 235 text = """Running 20 times |
| 236 Ignoring warm-up run (1502) |
| 237 1504 |
| 238 1505 |
| 239 1510 |
| 240 1504 |
| 241 1507 |
| 242 1509 |
| 243 1510 |
| 244 1487 |
| 245 1488 |
| 246 1472 |
| 247 1472 |
| 248 1488 |
| 249 1473 |
| 250 1472 |
| 251 1475 |
| 252 1487 |
| 253 1486 |
| 254 1486 |
| 255 1475 |
| 256 1471 |
| 257 |
| 258 Time: |
| 259 values 1486, 1471, 1510, 1505, 1478, 1490 ms |
| 260 avg 1490 ms |
| 261 median 1488 ms |
| 262 stdev 15.13935 ms |
| 263 min 1471 ms |
| 264 max 1510 ms |
| 265 """ |
| 266 |
| 267 output = """Running Bindings/event-target-wrapper.html (1 of 2) |
| 268 RESULT Bindings: event-target-wrapper: Time= 1490.0 ms |
| 269 median= 1488.0 ms, stdev= 14.11751 ms, min= 1471.0 ms, max= 1510.0 ms |
| 270 Finished: 0.1 s |
| 271 |
| 272 """ |
| 273 |
| 274 results = {'url': 'http://trac.webkit.org/browser/trunk/PerformanceTests/Bin
dings/event-target-wrapper.html', |
| 275 'metrics': {'Time': {'current': [[1486.0, 1471.0, 1510.0, 1505.0, 1478.0
, 1490.0]] * 4}}} |
| 276 |
| 277 |
| 278 class SomeParserTestData: |
| 279 text = """Running 20 times |
| 280 Ignoring warm-up run (1115) |
| 281 |
| 282 Time: |
| 283 values 1080, 1120, 1095, 1101, 1104 ms |
| 284 avg 1100 ms |
| 285 median 1101 ms |
| 286 stdev 14.50861 ms |
| 287 min 1080 ms |
| 288 max 1120 ms |
| 289 """ |
| 290 |
| 291 output = """Running Parser/some-parser.html (2 of 2) |
| 292 RESULT Parser: some-parser: Time= 1100.0 ms |
| 293 median= 1101.0 ms, stdev= 13.31402 ms, min= 1080.0 ms, max= 1120.0 ms |
| 294 Finished: 0.1 s |
| 295 |
| 296 """ |
| 297 |
| 298 |
| 299 class MemoryTestData: |
| 300 text = """Running 20 times |
| 301 Ignoring warm-up run (1115) |
| 302 |
| 303 Time: |
| 304 values 1080, 1120, 1095, 1101, 1104 ms |
| 305 avg 1100 ms |
| 306 median 1101 ms |
| 307 stdev 14.50861 ms |
| 308 min 1080 ms |
| 309 max 1120 ms |
| 310 |
| 311 JS Heap: |
| 312 values 825000, 811000, 848000, 837000, 829000 bytes |
| 313 avg 830000 bytes |
| 314 median 829000 bytes |
| 315 stdev 13784.04875 bytes |
| 316 min 811000 bytes |
| 317 max 848000 bytes |
| 318 |
| 319 Malloc: |
| 320 values 529000, 511000, 548000, 536000, 521000 bytes |
| 321 avg 529000 bytes |
| 322 median 529000 bytes |
| 323 stdev 14124.44689 bytes |
| 324 min 511000 bytes |
| 325 max 548000 bytes |
| 326 """ |
| 327 |
| 328 output = """Running 1 tests |
| 329 Running Parser/memory-test.html (1 of 1) |
| 330 RESULT Parser: memory-test: Time= 1100.0 ms |
| 331 median= 1101.0 ms, stdev= 13.31402 ms, min= 1080.0 ms, max= 1120.0 ms |
| 332 RESULT Parser: memory-test: JSHeap= 830000.0 bytes |
| 333 median= 829000.0 bytes, stdev= 12649.11064 bytes, min= 811000.0 bytes, max= 8480
00.0 bytes |
| 334 RESULT Parser: memory-test: Malloc= 529000.0 bytes |
| 335 median= 529000.0 bytes, stdev= 12961.48139 bytes, min= 511000.0 bytes, max= 5480
00.0 bytes |
| 336 Finished: 0.1 s |
| 337 """ |
| 338 |
| 339 results = {'current': [[1080, 1120, 1095, 1101, 1104]] * 4} |
| 340 js_heap_results = {'current': [[825000, 811000, 848000, 837000, 829000]] * 4
} |
| 341 malloc_results = {'current': [[529000, 511000, 548000, 536000, 521000]] * 4} |
| 342 |
| 343 |
| 344 class TestDriver: |
| 345 def run_test(self, driver_input, stop_when_done): |
| 346 text = '' |
| 347 timeout = False |
| 348 crash = False |
| 349 if driver_input.test_name.endswith('pass.html'): |
| 350 text = InspectorPassTestData.text |
| 351 elif driver_input.test_name.endswith('timeout.html'): |
| 352 timeout = True |
| 353 elif driver_input.test_name.endswith('failed.html'): |
| 354 text = None |
| 355 elif driver_input.test_name.endswith('tonguey.html'): |
| 356 text = 'we are not expecting an output from perf tests but RESULT bl
ablabla' |
| 357 elif driver_input.test_name.endswith('crash.html'): |
| 358 crash = True |
| 359 elif driver_input.test_name.endswith('event-target-wrapper.html'): |
| 360 text = EventTargetWrapperTestData.text |
| 361 elif driver_input.test_name.endswith('some-parser.html'): |
| 362 text = SomeParserTestData.text |
| 363 elif driver_input.test_name.endswith('memory-test.html'): |
| 364 text = MemoryTestData.text |
| 365 return DriverOutput(text, '', '', '', crash=crash, timeout=timeout) |
| 366 |
| 367 def start(self): |
| 368 """do nothing""" |
| 369 |
| 370 def stop(self): |
| 371 """do nothing""" |
| 372 |
| 373 |
| 374 class IntegrationTest(unittest.TestCase): |
| 375 def _normalize_output(self, log): |
| 376 return re.sub(r'(stdev=\s+\d+\.\d{5})\d+', r'\1', re.sub(r'Finished: [0-
9\.]+ s', 'Finished: 0.1 s', log)) |
| 377 |
| 378 def _load_output_json(self, runner): |
| 379 json_content = runner._host.filesystem.read_text_file(runner._output_jso
n_path()) |
| 380 return json.loads(re.sub(r'("stdev":\s*\d+\.\d{5})\d+', r'\1', json_cont
ent)) |
| 381 |
| 382 def create_runner(self, args=[], driver_class=TestDriver): |
| 383 options, parsed_args = PerfTestsRunner._parse_args(args) |
| 384 test_port = TestPort(host=MockHost(), options=options) |
| 385 test_port.create_driver = lambda worker_number=None, no_timeout=False: d
river_class() |
| 386 |
| 387 runner = PerfTestsRunner(args=args, port=test_port) |
| 388 runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspect
or') |
| 389 runner._host.filesystem.maybe_make_directory(runner._base_path, 'Binding
s') |
| 390 runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser'
) |
| 391 |
| 392 return runner, test_port |
| 393 |
| 394 def run_test(self, test_name): |
| 395 runner, port = self.create_runner() |
| 396 tests = [ChromiumStylePerfTest(port, test_name, runner._host.filesystem.
join('some-dir', test_name))] |
| 397 return runner._run_tests_set(tests) == 0 |
| 398 |
| 399 def test_run_passing_test(self): |
| 400 self.assertTrue(self.run_test('pass.html')) |
| 401 |
| 402 def test_run_silent_test(self): |
| 403 self.assertFalse(self.run_test('silent.html')) |
| 404 |
| 405 def test_run_failed_test(self): |
| 406 self.assertFalse(self.run_test('failed.html')) |
| 407 |
| 408 def test_run_tonguey_test(self): |
| 409 self.assertFalse(self.run_test('tonguey.html')) |
| 410 |
| 411 def test_run_timeout_test(self): |
| 412 self.assertFalse(self.run_test('timeout.html')) |
| 413 |
| 414 def test_run_crash_test(self): |
| 415 self.assertFalse(self.run_test('crash.html')) |
| 416 |
| 417 def _tests_for_runner(self, runner, test_names): |
| 418 filesystem = runner._host.filesystem |
| 419 tests = [] |
| 420 for test in test_names: |
| 421 path = filesystem.join(runner._base_path, test) |
| 422 dirname = filesystem.dirname(path) |
| 423 if test.startswith('inspector/'): |
| 424 tests.append(ChromiumStylePerfTest(runner._port, test, path)) |
| 425 else: |
| 426 tests.append(PerfTest(runner._port, test, path)) |
| 427 return tests |
| 428 |
| 429 def test_run_test_set(self): |
| 430 runner, port = self.create_runner() |
| 431 tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspecto
r/silent.html', 'inspector/failed.html', |
| 432 'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash
.html']) |
| 433 output = OutputCapture() |
| 434 output.capture_output() |
| 435 try: |
| 436 unexpected_result_count = runner._run_tests_set(tests) |
| 437 finally: |
| 438 stdout, stderr, log = output.restore_output() |
| 439 self.assertEqual(unexpected_result_count, len(tests) - 1) |
| 440 self.assertTrue('\nRESULT group_name: test_name= 42 ms\n' in log) |
| 441 |
| 442 def test_run_test_set_kills_drt_per_run(self): |
| 443 |
| 444 class TestDriverWithStopCount(TestDriver): |
| 445 stop_count = 0 |
| 446 |
| 447 def stop(self): |
| 448 TestDriverWithStopCount.stop_count += 1 |
| 449 |
| 450 runner, port = self.create_runner(driver_class=TestDriverWithStopCount) |
| 451 |
| 452 tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspecto
r/silent.html', 'inspector/failed.html', |
| 453 'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash
.html']) |
| 454 unexpected_result_count = runner._run_tests_set(tests) |
| 455 |
| 456 self.assertEqual(TestDriverWithStopCount.stop_count, 6) |
| 457 |
| 458 def test_run_test_set_for_parser_tests(self): |
| 459 runner, port = self.create_runner() |
| 460 tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.h
tml', 'Parser/some-parser.html']) |
| 461 output = OutputCapture() |
| 462 output.capture_output() |
| 463 try: |
| 464 unexpected_result_count = runner._run_tests_set(tests) |
| 465 finally: |
| 466 stdout, stderr, log = output.restore_output() |
| 467 self.assertEqual(unexpected_result_count, 0) |
| 468 self.assertEqual(self._normalize_output(log), EventTargetWrapperTestData
.output + SomeParserTestData.output) |
| 469 |
| 470 def test_run_memory_test(self): |
| 471 runner, port = self.create_runner_and_setup_results_template() |
| 472 runner._timestamp = 123456789 |
| 473 port.host.filesystem.write_text_file(runner._base_path + '/Parser/memory
-test.html', 'some content') |
| 474 |
| 475 output = OutputCapture() |
| 476 output.capture_output() |
| 477 try: |
| 478 unexpected_result_count = runner.run() |
| 479 finally: |
| 480 stdout, stderr, log = output.restore_output() |
| 481 self.assertEqual(unexpected_result_count, 0) |
| 482 self.assertEqual(self._normalize_output(log), MemoryTestData.output + '\
nMOCK: user.open_url: file://...\n') |
| 483 parser_tests = self._load_output_json(runner)[0]['tests']['Parser']['tes
ts'] |
| 484 self.assertEqual(parser_tests['memory-test']['metrics']['Time'], MemoryT
estData.results) |
| 485 self.assertEqual(parser_tests['memory-test']['metrics']['JSHeap'], Memor
yTestData.js_heap_results) |
| 486 self.assertEqual(parser_tests['memory-test']['metrics']['Malloc'], Memor
yTestData.malloc_results) |
| 487 |
| 488 def _test_run_with_json_output(self, runner, filesystem, upload_succeeds=Fal
se, results_shown=True, expected_exit_code=0, repeat=1, compare_logs=True): |
| 489 filesystem.write_text_file(runner._base_path + '/inspector/pass.html', '
some content') |
| 490 filesystem.write_text_file(runner._base_path + '/Bindings/event-target-w
rapper.html', 'some content') |
| 491 |
| 492 uploaded = [False] |
| 493 |
| 494 def mock_upload_json(hostname, json_path, host_path=None): |
| 495 # FIXME: Get rid of the hard-coded perf.webkit.org once we've comple
ted the transition. |
| 496 self.assertIn(hostname, ['some.host']) |
| 497 self.assertIn(json_path, ['/mock-checkout/output.json']) |
| 498 self.assertIn(host_path, [None, '/api/report']) |
| 499 uploaded[0] = upload_succeeds |
| 500 return upload_succeeds |
| 501 |
| 502 runner._upload_json = mock_upload_json |
| 503 runner._timestamp = 123456789 |
| 504 runner._utc_timestamp = datetime.datetime(2013, 2, 8, 15, 19, 37, 460000
) |
| 505 output_capture = OutputCapture() |
| 506 output_capture.capture_output() |
| 507 try: |
| 508 self.assertEqual(runner.run(), expected_exit_code) |
| 509 finally: |
| 510 stdout, stderr, logs = output_capture.restore_output() |
| 511 |
| 512 if not expected_exit_code and compare_logs: |
| 513 expected_logs = '' |
| 514 for i in xrange(repeat): |
| 515 runs = ' (Run %d of %d)' % (i + 1, repeat) if repeat > 1 else '' |
| 516 expected_logs += 'Running 2 tests%s\n' % runs + EventTargetWrapp
erTestData.output + InspectorPassTestData.output |
| 517 if results_shown: |
| 518 expected_logs += 'MOCK: user.open_url: file://...\n' |
| 519 self.assertEqual(self._normalize_output(logs), expected_logs) |
| 520 |
| 521 self.assertEqual(uploaded[0], upload_succeeds) |
| 522 |
| 523 return logs |
| 524 |
| 525 _event_target_wrapper_and_inspector_results = { |
| 526 "Bindings": |
| 527 {"url": "http://trac.webkit.org/browser/trunk/PerformanceTests/Bindi
ngs", |
| 528 "tests": {"event-target-wrapper": EventTargetWrapperTestData.results
}}} |
| 529 |
| 530 def test_run_with_json_output(self): |
| 531 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 532 '--test-results-server=some.host']) |
| 533 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=True) |
| 534 self.assertEqual(self._load_output_json(runner), [{ |
| 535 "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_targ
et_wrapper_and_inspector_results, |
| 536 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}]) |
| 537 |
| 538 filesystem = port.host.filesystem |
| 539 self.assertTrue(filesystem.isfile(runner._output_json_path())) |
| 540 self.assertTrue(filesystem.isfile(filesystem.splitext(runner._output_jso
n_path())[0] + '.html')) |
| 541 |
| 542 def test_run_with_description(self): |
| 543 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 544 '--test-results-server=some.host', '--description', 'some descriptio
n']) |
| 545 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=True) |
| 546 self.assertEqual(self._load_output_json(runner), [{ |
| 547 "buildTime": "2013-02-08T15:19:37.460000", "description": "some desc
ription", |
| 548 "tests": self._event_target_wrapper_and_inspector_results, |
| 549 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}]) |
| 550 |
| 551 def create_runner_and_setup_results_template(self, args=[]): |
| 552 runner, port = self.create_runner(args) |
| 553 filesystem = port.host.filesystem |
| 554 filesystem.write_text_file(runner._base_path + '/resources/results-templ
ate.html', |
| 555 'BEGIN<script src="%AbsolutePathToWebKitTrunk%/some.js"></script>' |
| 556 '<script src="%AbsolutePathToWebKitTrunk%/other.js"></script><script
>%PeformanceTestsResultsJSON%</script>END') |
| 557 filesystem.write_text_file(runner._base_path + '/Dromaeo/resources/droma
eo/web/lib/jquery-1.6.4.js', 'jquery content') |
| 558 return runner, port |
| 559 |
| 560 def test_run_respects_no_results(self): |
| 561 runner, port = self.create_runner(args=['--output-json-path=/mock-checko
ut/output.json', |
| 562 '--test-results-server=some.host', '--no-results']) |
| 563 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=False, results_shown=False) |
| 564 self.assertFalse(port.host.filesystem.isfile('/mock-checkout/output.json
')) |
| 565 |
| 566 def test_run_generates_json_by_default(self): |
| 567 runner, port = self.create_runner_and_setup_results_template() |
| 568 filesystem = port.host.filesystem |
| 569 output_json_path = runner._output_json_path() |
| 570 results_page_path = filesystem.splitext(output_json_path)[0] + '.html' |
| 571 |
| 572 self.assertFalse(filesystem.isfile(output_json_path)) |
| 573 self.assertFalse(filesystem.isfile(results_page_path)) |
| 574 |
| 575 self._test_run_with_json_output(runner, port.host.filesystem) |
| 576 |
| 577 self.assertEqual(self._load_output_json(runner), [{ |
| 578 "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_targ
et_wrapper_and_inspector_results, |
| 579 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}]) |
| 580 |
| 581 self.assertTrue(filesystem.isfile(output_json_path)) |
| 582 self.assertTrue(filesystem.isfile(results_page_path)) |
| 583 |
| 584 def test_run_merges_output_by_default(self): |
| 585 runner, port = self.create_runner_and_setup_results_template() |
| 586 filesystem = port.host.filesystem |
| 587 output_json_path = runner._output_json_path() |
| 588 |
| 589 filesystem.write_text_file(output_json_path, '[{"previous": "results"}]'
) |
| 590 |
| 591 self._test_run_with_json_output(runner, port.host.filesystem) |
| 592 |
| 593 self.assertEqual(self._load_output_json(runner), [{"previous": "results"
}, { |
| 594 "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_targ
et_wrapper_and_inspector_results, |
| 595 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}]) |
| 596 self.assertTrue(filesystem.isfile(filesystem.splitext(output_json_path)[
0] + '.html')) |
| 597 |
| 598 def test_run_respects_reset_results(self): |
| 599 runner, port = self.create_runner_and_setup_results_template(args=["--re
set-results"]) |
| 600 filesystem = port.host.filesystem |
| 601 output_json_path = runner._output_json_path() |
| 602 |
| 603 filesystem.write_text_file(output_json_path, '[{"previous": "results"}]'
) |
| 604 |
| 605 self._test_run_with_json_output(runner, port.host.filesystem) |
| 606 |
| 607 self.assertEqual(self._load_output_json(runner), [{ |
| 608 "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_targ
et_wrapper_and_inspector_results, |
| 609 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}]) |
| 610 self.assertTrue(filesystem.isfile(filesystem.splitext(output_json_path)[
0] + '.html')) |
| 611 pass |
| 612 |
| 613 def test_run_generates_and_show_results_page(self): |
| 614 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json']) |
| 615 page_shown = [] |
| 616 port.show_results_html_file = lambda path: page_shown.append(path) |
| 617 filesystem = port.host.filesystem |
| 618 self._test_run_with_json_output(runner, filesystem, results_shown=False) |
| 619 |
| 620 expected_entry = {"buildTime": "2013-02-08T15:19:37.460000", "tests": se
lf._event_target_wrapper_and_inspector_results, |
| 621 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}} |
| 622 |
| 623 self.maxDiff = None |
| 624 self.assertEqual(runner._output_json_path(), '/mock-checkout/output.json
') |
| 625 self.assertEqual(self._load_output_json(runner), [expected_entry]) |
| 626 self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html')
, |
| 627 'BEGIN<script src="/test.checkout/some.js"></script><script src="/te
st.checkout/other.js"></script>' |
| 628 '<script>%s</script>END' % port.host.filesystem.read_text_file(runne
r._output_json_path())) |
| 629 self.assertEqual(page_shown[0], '/mock-checkout/output.html') |
| 630 |
| 631 self._test_run_with_json_output(runner, filesystem, results_shown=False) |
| 632 self.assertEqual(runner._output_json_path(), '/mock-checkout/output.json
') |
| 633 self.assertEqual(self._load_output_json(runner), [expected_entry, expect
ed_entry]) |
| 634 self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html')
, |
| 635 'BEGIN<script src="/test.checkout/some.js"></script><script src="/te
st.checkout/other.js"></script>' |
| 636 '<script>%s</script>END' % port.host.filesystem.read_text_file(runne
r._output_json_path())) |
| 637 |
| 638 def test_run_respects_no_show_results(self): |
| 639 show_results_html_file = lambda path: page_shown.append(path) |
| 640 |
| 641 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json']) |
| 642 page_shown = [] |
| 643 port.show_results_html_file = show_results_html_file |
| 644 self._test_run_with_json_output(runner, port.host.filesystem, results_sh
own=False) |
| 645 self.assertEqual(page_shown[0], '/mock-checkout/output.html') |
| 646 |
| 647 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 648 '--no-show-results']) |
| 649 page_shown = [] |
| 650 port.show_results_html_file = show_results_html_file |
| 651 self._test_run_with_json_output(runner, port.host.filesystem, results_sh
own=False) |
| 652 self.assertEqual(page_shown, []) |
| 653 |
| 654 def test_run_with_bad_output_json(self): |
| 655 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json']) |
| 656 port.host.filesystem.write_text_file('/mock-checkout/output.json', 'bad
json') |
| 657 self._test_run_with_json_output(runner, port.host.filesystem, expected_e
xit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE) |
| 658 port.host.filesystem.write_text_file('/mock-checkout/output.json', '{"an
other bad json": "1"}') |
| 659 self._test_run_with_json_output(runner, port.host.filesystem, expected_e
xit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE) |
| 660 |
| 661 def test_run_with_slave_config_json(self): |
| 662 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 663 '--slave-config-json-path=/mock-checkout/slave-config.json', '--test
-results-server=some.host']) |
| 664 port.host.filesystem.write_text_file('/mock-checkout/slave-config.json',
'{"key": "value"}') |
| 665 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=True) |
| 666 self.assertEqual(self._load_output_json(runner), [{ |
| 667 "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_targ
et_wrapper_and_inspector_results, |
| 668 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}, "builderKey": "value"}]) |
| 669 |
| 670 def test_run_with_bad_slave_config_json(self): |
| 671 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 672 '--slave-config-json-path=/mock-checkout/slave-config.json', '--test
-results-server=some.host']) |
| 673 logs = self._test_run_with_json_output(runner, port.host.filesystem, exp
ected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON) |
| 674 self.assertTrue('Missing slave configuration JSON file: /mock-checkout/s
lave-config.json' in logs) |
| 675 port.host.filesystem.write_text_file('/mock-checkout/slave-config.json',
'bad json') |
| 676 self._test_run_with_json_output(runner, port.host.filesystem, expected_e
xit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON) |
| 677 port.host.filesystem.write_text_file('/mock-checkout/slave-config.json',
'["another bad json"]') |
| 678 self._test_run_with_json_output(runner, port.host.filesystem, expected_e
xit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON) |
| 679 |
| 680 def test_run_with_multiple_repositories(self): |
| 681 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 682 '--test-results-server=some.host']) |
| 683 port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some',
'/mock-checkout/some')] |
| 684 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=True) |
| 685 self.assertEqual(self._load_output_json(runner), [{ |
| 686 "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_targ
et_wrapper_and_inspector_results, |
| 687 "revisions": {"webkit": {"timestamp": "2013-02-01 08:48:05 +0000", "
revision": "5678"}, |
| 688 "some": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678
"}}}]) |
| 689 |
| 690 def test_run_with_upload_json(self): |
| 691 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 692 '--test-results-server', 'some.host', '--platform', 'platform1', '--
builder-name', 'builder1', '--build-number', '123']) |
| 693 |
| 694 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=True) |
| 695 generated_json = json.loads(port.host.filesystem.files['/mock-checkout/o
utput.json']) |
| 696 self.assertEqual(generated_json[0]['platform'], 'platform1') |
| 697 self.assertEqual(generated_json[0]['builderName'], 'builder1') |
| 698 self.assertEqual(generated_json[0]['buildNumber'], 123) |
| 699 |
| 700 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=False, expected_exit_code=PerfTestsRunner.EXIT_CODE_FAILED_UPLOADING) |
| 701 |
| 702 def test_run_with_upload_json_should_generate_perf_webkit_json(self): |
| 703 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 704 '--test-results-server', 'some.host', '--platform', 'platform1', '--
builder-name', 'builder1', '--build-number', '123', |
| 705 '--slave-config-json-path=/mock-checkout/slave-config.json']) |
| 706 port.host.filesystem.write_text_file('/mock-checkout/slave-config.json',
'{"key": "value1"}') |
| 707 |
| 708 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=True) |
| 709 generated_json = json.loads(port.host.filesystem.files['/mock-checkout/o
utput.json']) |
| 710 self.assertTrue(isinstance(generated_json, list)) |
| 711 self.assertEqual(len(generated_json), 1) |
| 712 |
| 713 output = generated_json[0] |
| 714 self.maxDiff = None |
| 715 self.assertEqual(output['platform'], 'platform1') |
| 716 self.assertEqual(output['buildNumber'], 123) |
| 717 self.assertEqual(output['buildTime'], '2013-02-08T15:19:37.460000') |
| 718 self.assertEqual(output['builderName'], 'builder1') |
| 719 self.assertEqual(output['builderKey'], 'value1') |
| 720 self.assertEqual(output['revisions'], {'blink': {'revision': '5678', 'ti
mestamp': '2013-02-01 08:48:05 +0000'}}) |
| 721 self.assertEqual(output['tests'].keys(), ['Bindings']) |
| 722 self.assertEqual(sorted(output['tests']['Bindings'].keys()), ['tests', '
url']) |
| 723 self.assertEqual(output['tests']['Bindings']['url'], 'http://trac.webkit
.org/browser/trunk/PerformanceTests/Bindings') |
| 724 self.assertEqual(output['tests']['Bindings']['tests'].keys(), ['event-ta
rget-wrapper']) |
| 725 self.assertEqual(output['tests']['Bindings']['tests']['event-target-wrap
per'], { |
| 726 'url': 'http://trac.webkit.org/browser/trunk/PerformanceTests/Bindin
gs/event-target-wrapper.html', |
| 727 'metrics': {'Time': {'current': [[1486.0, 1471.0, 1510.0, 1505.0, 14
78.0, 1490.0]] * 4}}}) |
| 728 |
| 729 def test_run_with_repeat(self): |
| 730 self.maxDiff = None |
| 731 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 732 '--test-results-server=some.host', '--repeat', '5']) |
| 733 self._test_run_with_json_output(runner, port.host.filesystem, upload_suc
ceeds=True, repeat=5) |
| 734 self.assertEqual(self._load_output_json(runner), [ |
| 735 {"buildTime": "2013-02-08T15:19:37.460000", |
| 736 "tests": self._event_target_wrapper_and_inspector_results, |
| 737 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}, |
| 738 {"buildTime": "2013-02-08T15:19:37.460000", |
| 739 "tests": self._event_target_wrapper_and_inspector_results, |
| 740 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}, |
| 741 {"buildTime": "2013-02-08T15:19:37.460000", |
| 742 "tests": self._event_target_wrapper_and_inspector_results, |
| 743 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}, |
| 744 {"buildTime": "2013-02-08T15:19:37.460000", |
| 745 "tests": self._event_target_wrapper_and_inspector_results, |
| 746 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}, |
| 747 {"buildTime": "2013-02-08T15:19:37.460000", |
| 748 "tests": self._event_target_wrapper_and_inspector_results, |
| 749 "revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "r
evision": "5678"}}}]) |
| 750 |
| 751 def test_run_with_test_runner_count(self): |
| 752 runner, port = self.create_runner_and_setup_results_template(args=['--ou
tput-json-path=/mock-checkout/output.json', |
| 753 '--test-runner-count=3']) |
| 754 self._test_run_with_json_output(runner, port.host.filesystem, compare_lo
gs=False) |
| 755 generated_json = json.loads(port.host.filesystem.files['/mock-checkout/o
utput.json']) |
| 756 self.assertTrue(isinstance(generated_json, list)) |
| 757 self.assertEqual(len(generated_json), 1) |
| 758 |
| 759 output = generated_json[0]['tests']['Bindings']['tests']['event-target-w
rapper']['metrics']['Time']['current'] |
| 760 self.assertEqual(len(output), 3) |
| 761 expectedMetrics = EventTargetWrapperTestData.results['metrics']['Time'][
'current'][0] |
| 762 for metrics in output: |
| 763 self.assertEqual(metrics, expectedMetrics) |
OLD | NEW |