| OLD | NEW |
| 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import logging | 5 import logging |
| 6 import os | 6 import os |
| 7 import re | 7 import re |
| 8 | 8 |
| 9 from pylib import pexpect | 9 from pylib import pexpect |
| 10 from pylib import ports | 10 from pylib import ports |
| 11 from pylib.base import base_test_result | 11 from pylib.base import base_test_result |
| 12 from pylib.base import base_test_runner | 12 from pylib.base import base_test_runner |
| 13 from pylib.device import device_errors | 13 from pylib.device import device_errors |
| 14 from pylib.local import local_test_server_spawner | 14 from pylib.local import local_test_server_spawner |
| 15 from pylib.perf import perf_control | 15 from pylib.perf import perf_control |
| 16 | 16 |
| 17 # Test case statuses. |
| 18 RE_RUN = re.compile('\\[ RUN \\] ?(.*)\r\n') |
| 19 RE_FAIL = re.compile('\\[ FAILED \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') |
| 20 RE_OK = re.compile('\\[ OK \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') |
| 21 |
| 22 # Test run statuses. |
| 23 RE_PASSED = re.compile('\\[ PASSED \\] ?(.*)\r\n') |
| 24 RE_RUNNER_FAIL = re.compile('\\[ RUNNER_FAILED \\] ?(.*)\r\n') |
| 25 # Signal handlers are installed before starting tests |
| 26 # to output the CRASHED marker when a crash happens. |
| 27 RE_CRASH = re.compile('\\[ CRASHED \\](.*)\r\n') |
| 28 |
| 17 | 29 |
| 18 def _TestSuiteRequiresMockTestServer(suite_name): | 30 def _TestSuiteRequiresMockTestServer(suite_name): |
| 19 """Returns True if the test suite requires mock test server.""" | 31 """Returns True if the test suite requires mock test server.""" |
| 20 tests_require_net_test_server = ['unit_tests', 'net_unittests', | 32 tests_require_net_test_server = ['unit_tests', 'net_unittests', |
| 21 'content_unittests', | 33 'content_unittests', |
| 22 'content_browsertests'] | 34 'content_browsertests'] |
| 23 return (suite_name in | 35 return (suite_name in |
| 24 tests_require_net_test_server) | 36 tests_require_net_test_server) |
| 25 | 37 |
| 26 def _TestSuiteRequiresHighPerfMode(suite_name): | 38 def _TestSuiteRequiresHighPerfMode(suite_name): |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 70 """Process the test output. | 82 """Process the test output. |
| 71 | 83 |
| 72 Args: | 84 Args: |
| 73 p: An instance of pexpect spawn class. | 85 p: An instance of pexpect spawn class. |
| 74 | 86 |
| 75 Returns: | 87 Returns: |
| 76 A TestRunResults object. | 88 A TestRunResults object. |
| 77 """ | 89 """ |
| 78 results = base_test_result.TestRunResults() | 90 results = base_test_result.TestRunResults() |
| 79 | 91 |
| 80 # Test case statuses. | |
| 81 re_run = re.compile('\\[ RUN \\] ?(.*)\r\n') | |
| 82 re_fail = re.compile('\\[ FAILED \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') | |
| 83 re_ok = re.compile('\\[ OK \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') | |
| 84 | |
| 85 # Test run statuses. | |
| 86 re_passed = re.compile('\\[ PASSED \\] ?(.*)\r\n') | |
| 87 re_runner_fail = re.compile('\\[ RUNNER_FAILED \\] ?(.*)\r\n') | |
| 88 # Signal handlers are installed before starting tests | |
| 89 # to output the CRASHED marker when a crash happens. | |
| 90 re_crash = re.compile('\\[ CRASHED \\](.*)\r\n') | |
| 91 | |
| 92 log = '' | 92 log = '' |
| 93 try: | 93 try: |
| 94 while True: | 94 while True: |
| 95 full_test_name = None | 95 full_test_name = None |
| 96 | 96 |
| 97 found = p.expect([re_run, re_passed, re_runner_fail], | 97 found = p.expect([RE_RUN, RE_PASSED, RE_RUNNER_FAIL], |
| 98 timeout=self._timeout) | 98 timeout=self._timeout) |
| 99 if found == 1: # re_passed | 99 if found == 1: # RE_PASSED |
| 100 break | 100 break |
| 101 elif found == 2: # re_runner_fail | 101 elif found == 2: # RE_RUNNER_FAIL |
| 102 break | 102 break |
| 103 else: # re_run | 103 else: # RE_RUN |
| 104 full_test_name = p.match.group(1).replace('\r', '') | 104 full_test_name = p.match.group(1).replace('\r', '') |
| 105 found = p.expect([re_ok, re_fail, re_crash], timeout=self._timeout) | 105 found = p.expect([RE_OK, RE_FAIL, RE_CRASH], timeout=self._timeout) |
| 106 log = p.before.replace('\r', '') | 106 log = p.before.replace('\r', '') |
| 107 if found == 0: # re_ok | 107 if found == 0: # RE_OK |
| 108 if full_test_name == p.match.group(1).replace('\r', ''): | 108 if full_test_name == p.match.group(1).replace('\r', ''): |
| 109 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 | 109 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 |
| 110 results.AddResult(base_test_result.BaseTestResult( | 110 results.AddResult(base_test_result.BaseTestResult( |
| 111 full_test_name, base_test_result.ResultType.PASS, | 111 full_test_name, base_test_result.ResultType.PASS, |
| 112 duration=duration_ms, log=log)) | 112 duration=duration_ms, log=log)) |
| 113 elif found == 2: # re_crash | 113 elif found == 2: # RE_CRASH |
| 114 results.AddResult(base_test_result.BaseTestResult( | 114 results.AddResult(base_test_result.BaseTestResult( |
| 115 full_test_name, base_test_result.ResultType.CRASH, | 115 full_test_name, base_test_result.ResultType.CRASH, |
| 116 log=log)) | 116 log=log)) |
| 117 break | 117 break |
| 118 else: # re_fail | 118 else: # RE_FAIL |
| 119 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 | 119 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 |
| 120 results.AddResult(base_test_result.BaseTestResult( | 120 results.AddResult(base_test_result.BaseTestResult( |
| 121 full_test_name, base_test_result.ResultType.FAIL, | 121 full_test_name, base_test_result.ResultType.FAIL, |
| 122 duration=duration_ms, log=log)) | 122 duration=duration_ms, log=log)) |
| 123 except pexpect.EOF: | 123 except pexpect.EOF: |
| 124 logging.error('Test terminated - EOF') | 124 logging.error('Test terminated - EOF') |
| 125 # We're here because either the device went offline, or the test harness | 125 # We're here because either the device went offline, or the test harness |
| 126 # crashed without outputting the CRASHED marker (crbug.com/175538). | 126 # crashed without outputting the CRASHED marker (crbug.com/175538). |
| 127 if not self.device.IsOnline(): | 127 if not self.device.IsOnline(): |
| 128 raise device_errors.DeviceUnreachableError( | 128 raise device_errors.DeviceUnreachableError( |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 187 #override | 187 #override |
| 188 def TearDown(self): | 188 def TearDown(self): |
| 189 """Cleans up the test enviroment for the test suite.""" | 189 """Cleans up the test enviroment for the test suite.""" |
| 190 for s in self._servers: | 190 for s in self._servers: |
| 191 s.TearDown() | 191 s.TearDown() |
| 192 if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name): | 192 if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name): |
| 193 self._perf_controller.SetDefaultPerfMode() | 193 self._perf_controller.SetDefaultPerfMode() |
| 194 self.test_package.ClearApplicationState(self.device) | 194 self.test_package.ClearApplicationState(self.device) |
| 195 self.tool.CleanUpEnvironment() | 195 self.tool.CleanUpEnvironment() |
| 196 super(TestRunner, self).TearDown() | 196 super(TestRunner, self).TearDown() |
| OLD | NEW |