OLD | NEW |
---|---|
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import logging | 5 import logging |
6 import os | 6 import os |
7 import re | 7 import re |
8 | 8 |
9 from pylib import pexpect | 9 from pylib import pexpect |
10 from pylib import ports | 10 from pylib import ports |
11 from pylib.base import base_test_result | 11 from pylib.base import base_test_result |
12 from pylib.base import base_test_runner | 12 from pylib.base import base_test_runner |
13 from pylib.device import device_errors | 13 from pylib.device import device_errors |
14 from pylib.local import local_test_server_spawner | 14 from pylib.local import local_test_server_spawner |
15 from pylib.perf import perf_control | 15 from pylib.perf import perf_control |
16 | 16 |
17 # Test case statuses. | |
18 g_re_run = re.compile('\\[ RUN \\] ?(.*)\r\n') | |
jbudorick
2015/01/16 14:21:41
Constants should be ALL_CAPS, not g_with_underscor
Jaekyun Seok (inactive)
2015/01/18 23:01:18
Done.
| |
19 g_re_fail = re.compile('\\[ FAILED \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') | |
20 g_re_ok = re.compile('\\[ OK \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') | |
21 | |
22 # Test run statuses. | |
23 g_re_passed = re.compile('\\[ PASSED \\] ?(.*)\r\n') | |
24 g_re_runner_fail = re.compile('\\[ RUNNER_FAILED \\] ?(.*)\r\n') | |
25 # Signal handlers are installed before starting tests | |
26 # to output the CRASHED marker when a crash happens. | |
27 g_re_crash = re.compile('\\[ CRASHED \\](.*)\r\n') | |
28 | |
17 | 29 |
18 def _TestSuiteRequiresMockTestServer(suite_name): | 30 def _TestSuiteRequiresMockTestServer(suite_name): |
19 """Returns True if the test suite requires mock test server.""" | 31 """Returns True if the test suite requires mock test server.""" |
20 tests_require_net_test_server = ['unit_tests', 'net_unittests', | 32 tests_require_net_test_server = ['unit_tests', 'net_unittests', |
21 'content_unittests', | 33 'content_unittests', |
22 'content_browsertests'] | 34 'content_browsertests'] |
23 return (suite_name in | 35 return (suite_name in |
24 tests_require_net_test_server) | 36 tests_require_net_test_server) |
25 | 37 |
26 def _TestSuiteRequiresHighPerfMode(suite_name): | 38 def _TestSuiteRequiresHighPerfMode(suite_name): |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
70 """Process the test output. | 82 """Process the test output. |
71 | 83 |
72 Args: | 84 Args: |
73 p: An instance of pexpect spawn class. | 85 p: An instance of pexpect spawn class. |
74 | 86 |
75 Returns: | 87 Returns: |
76 A TestRunResults object. | 88 A TestRunResults object. |
77 """ | 89 """ |
78 results = base_test_result.TestRunResults() | 90 results = base_test_result.TestRunResults() |
79 | 91 |
80 # Test case statuses. | |
81 re_run = re.compile('\\[ RUN \\] ?(.*)\r\n') | |
82 re_fail = re.compile('\\[ FAILED \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') | |
83 re_ok = re.compile('\\[ OK \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n') | |
84 | |
85 # Test run statuses. | |
86 re_passed = re.compile('\\[ PASSED \\] ?(.*)\r\n') | |
87 re_runner_fail = re.compile('\\[ RUNNER_FAILED \\] ?(.*)\r\n') | |
88 # Signal handlers are installed before starting tests | |
89 # to output the CRASHED marker when a crash happens. | |
90 re_crash = re.compile('\\[ CRASHED \\](.*)\r\n') | |
91 | |
92 log = '' | 92 log = '' |
93 try: | 93 try: |
94 while True: | 94 while True: |
95 full_test_name = None | 95 full_test_name = None |
96 | 96 |
97 found = p.expect([re_run, re_passed, re_runner_fail], | 97 found = p.expect([g_re_run, g_re_passed, g_re_runner_fail], |
98 timeout=self._timeout) | 98 timeout=self._timeout) |
99 if found == 1: # re_passed | 99 if found == 1: # g_re_passed |
100 break | 100 break |
101 elif found == 2: # re_runner_fail | 101 elif found == 2: # g_re_runner_fail |
102 break | 102 break |
103 else: # re_run | 103 else: # g_re_run |
104 full_test_name = p.match.group(1).replace('\r', '') | 104 full_test_name = p.match.group(1).replace('\r', '') |
105 found = p.expect([re_ok, re_fail, re_crash], timeout=self._timeout) | 105 found = p.expect([g_re_ok, g_re_fail, g_re_crash], |
106 timeout=self._timeout) | |
106 log = p.before.replace('\r', '') | 107 log = p.before.replace('\r', '') |
107 if found == 0: # re_ok | 108 if found == 0: # g_re_ok |
108 if full_test_name == p.match.group(1).replace('\r', ''): | 109 if full_test_name == p.match.group(1).replace('\r', ''): |
109 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 | 110 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 |
110 results.AddResult(base_test_result.BaseTestResult( | 111 results.AddResult(base_test_result.BaseTestResult( |
111 full_test_name, base_test_result.ResultType.PASS, | 112 full_test_name, base_test_result.ResultType.PASS, |
112 duration=duration_ms, log=log)) | 113 duration=duration_ms, log=log)) |
113 elif found == 2: # re_crash | 114 elif found == 2: # g_re_crash |
114 results.AddResult(base_test_result.BaseTestResult( | 115 results.AddResult(base_test_result.BaseTestResult( |
115 full_test_name, base_test_result.ResultType.CRASH, | 116 full_test_name, base_test_result.ResultType.CRASH, |
116 log=log)) | 117 log=log)) |
117 break | 118 break |
118 else: # re_fail | 119 else: # g_re_fail |
119 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 | 120 duration_ms = int(p.match.group(3)) if p.match.group(3) else 0 |
120 results.AddResult(base_test_result.BaseTestResult( | 121 results.AddResult(base_test_result.BaseTestResult( |
121 full_test_name, base_test_result.ResultType.FAIL, | 122 full_test_name, base_test_result.ResultType.FAIL, |
122 duration=duration_ms, log=log)) | 123 duration=duration_ms, log=log)) |
123 except pexpect.EOF: | 124 except pexpect.EOF: |
124 logging.error('Test terminated - EOF') | 125 logging.error('Test terminated - EOF') |
125 # We're here because either the device went offline, or the test harness | 126 # We're here because either the device went offline, or the test harness |
126 # crashed without outputting the CRASHED marker (crbug.com/175538). | 127 # crashed without outputting the CRASHED marker (crbug.com/175538). |
127 if not self.device.IsOnline(): | 128 if not self.device.IsOnline(): |
128 raise device_errors.DeviceUnreachableError( | 129 raise device_errors.DeviceUnreachableError( |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
187 #override | 188 #override |
188 def TearDown(self): | 189 def TearDown(self): |
189 """Cleans up the test enviroment for the test suite.""" | 190 """Cleans up the test enviroment for the test suite.""" |
190 for s in self._servers: | 191 for s in self._servers: |
191 s.TearDown() | 192 s.TearDown() |
192 if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name): | 193 if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name): |
193 self._perf_controller.SetDefaultPerfMode() | 194 self._perf_controller.SetDefaultPerfMode() |
194 self.test_package.ClearApplicationState(self.device) | 195 self.test_package.ClearApplicationState(self.device) |
195 self.tool.CleanUpEnvironment() | 196 self.tool.CleanUpEnvironment() |
196 super(TestRunner, self).TearDown() | 197 super(TestRunner, self).TearDown() |
OLD | NEW |