OLD | NEW |
1 # Copyright (C) 2013 Google Inc. All rights reserved. | 1 # Copyright (C) 2013 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 26 matching lines...) Expand all Loading... |
37 from webkitpy.layout_tests.port.test import TestDriver | 37 from webkitpy.layout_tests.port.test import TestDriver |
38 from webkitpy.layout_tests.port.test import TestPort | 38 from webkitpy.layout_tests.port.test import TestPort |
39 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest | 39 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest |
40 from webkitpy.performance_tests.perftest import PerfTest | 40 from webkitpy.performance_tests.perftest import PerfTest |
41 from webkitpy.performance_tests.perftest import PerfTestMetric | 41 from webkitpy.performance_tests.perftest import PerfTestMetric |
42 from webkitpy.performance_tests.perftest import PerfTestFactory | 42 from webkitpy.performance_tests.perftest import PerfTestFactory |
43 from webkitpy.performance_tests.perftest import SingleProcessPerfTest | 43 from webkitpy.performance_tests.perftest import SingleProcessPerfTest |
44 | 44 |
45 | 45 |
46 class MockPort(TestPort): | 46 class MockPort(TestPort): |
| 47 |
47 def __init__(self, custom_run_test=None): | 48 def __init__(self, custom_run_test=None): |
48 super(MockPort, self).__init__(host=MockHost(), custom_run_test=custom_r
un_test) | 49 super(MockPort, self).__init__(host=MockHost(), custom_run_test=custom_r
un_test) |
49 | 50 |
50 | 51 |
51 class TestPerfTestMetric(unittest.TestCase): | 52 class TestPerfTestMetric(unittest.TestCase): |
| 53 |
52 def test_init_set_missing_unit(self): | 54 def test_init_set_missing_unit(self): |
53 self.assertEqual(PerfTestMetric('Time', iterations=[1, 2, 3, 4, 5]).unit
(), 'ms') | 55 self.assertEqual(PerfTestMetric('Time', iterations=[1, 2, 3, 4, 5]).unit
(), 'ms') |
54 self.assertEqual(PerfTestMetric('Malloc', iterations=[1, 2, 3, 4, 5]).un
it(), 'bytes') | 56 self.assertEqual(PerfTestMetric('Malloc', iterations=[1, 2, 3, 4, 5]).un
it(), 'bytes') |
55 self.assertEqual(PerfTestMetric('JSHeap', iterations=[1, 2, 3, 4, 5]).un
it(), 'bytes') | 57 self.assertEqual(PerfTestMetric('JSHeap', iterations=[1, 2, 3, 4, 5]).un
it(), 'bytes') |
56 | 58 |
57 def test_init_set_time_metric(self): | 59 def test_init_set_time_metric(self): |
58 self.assertEqual(PerfTestMetric('Time', 'ms').name(), 'Time') | 60 self.assertEqual(PerfTestMetric('Time', 'ms').name(), 'Time') |
59 self.assertEqual(PerfTestMetric('Time', 'fps').name(), 'FrameRate') | 61 self.assertEqual(PerfTestMetric('Time', 'fps').name(), 'FrameRate') |
60 self.assertEqual(PerfTestMetric('Time', 'runs/s').name(), 'Runs') | 62 self.assertEqual(PerfTestMetric('Time', 'runs/s').name(), 'Runs') |
61 | 63 |
(...skipping 21 matching lines...) Expand all Loading... |
83 self.assertTrue(metric2.has_values()) | 85 self.assertTrue(metric2.has_values()) |
84 self.assertEqual(metric.flattened_iteration_values(), [1, 2]) | 86 self.assertEqual(metric.flattened_iteration_values(), [1, 2]) |
85 self.assertEqual(metric2.flattened_iteration_values(), [3]) | 87 self.assertEqual(metric2.flattened_iteration_values(), [3]) |
86 | 88 |
87 metric.append_group([4, 5]) | 89 metric.append_group([4, 5]) |
88 self.assertEqual(metric.grouped_iteration_values(), [[1], [2], [4, 5]]) | 90 self.assertEqual(metric.grouped_iteration_values(), [[1], [2], [4, 5]]) |
89 self.assertEqual(metric.flattened_iteration_values(), [1, 2, 4, 5]) | 91 self.assertEqual(metric.flattened_iteration_values(), [1, 2, 4, 5]) |
90 | 92 |
91 | 93 |
92 class TestPerfTest(unittest.TestCase): | 94 class TestPerfTest(unittest.TestCase): |
| 95 |
93 def _assert_results_are_correct(self, test, output): | 96 def _assert_results_are_correct(self, test, output): |
94 test.run_single = lambda driver, path, time_out_ms: output | 97 test.run_single = lambda driver, path, time_out_ms: output |
95 self.assertTrue(test._run_with_driver(None, None)) | 98 self.assertTrue(test._run_with_driver(None, None)) |
96 self.assertEqual(test._metrics.keys(), ['Time']) | 99 self.assertEqual(test._metrics.keys(), ['Time']) |
97 self.assertEqual(test._metrics['Time'].flattened_iteration_values(), [10
80, 1120, 1095, 1101, 1104]) | 100 self.assertEqual(test._metrics['Time'].flattened_iteration_values(), [10
80, 1120, 1095, 1101, 1104]) |
98 | 101 |
99 def test_parse_output(self): | 102 def test_parse_output(self): |
100 output = DriverOutput(""" | 103 output = DriverOutput(""" |
101 Running 20 times | 104 Running 20 times |
102 Ignoring warm-up run (1115) | 105 Ignoring warm-up run (1115) |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
169 test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test') | 172 test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test') |
170 output_with_lines_to_ignore = DriverOutput('', image=None, image_hash=No
ne, audio=None, error=""" | 173 output_with_lines_to_ignore = DriverOutput('', image=None, image_hash=No
ne, audio=None, error=""" |
171 Unknown option: --foo-bar | 174 Unknown option: --foo-bar |
172 Should not be ignored | 175 Should not be ignored |
173 [WARNING:proxy_service.cc] bad moon a-rising | 176 [WARNING:proxy_service.cc] bad moon a-rising |
174 [WARNING:chrome.cc] Something went wrong | 177 [WARNING:chrome.cc] Something went wrong |
175 [INFO:SkFontHost_android.cpp(1158)] Use Test Config File Main /data/local/tmp/dr
t/android_main_fonts.xml, Fallback /data/local/tmp/drt/android_fallback_fonts.xm
l, Font Dir /data/local/tmp/drt/fonts/ | 178 [INFO:SkFontHost_android.cpp(1158)] Use Test Config File Main /data/local/tmp/dr
t/android_main_fonts.xml, Fallback /data/local/tmp/drt/android_fallback_fonts.xm
l, Font Dir /data/local/tmp/drt/fonts/ |
176 [ERROR:main.cc] The sky has fallen""") | 179 [ERROR:main.cc] The sky has fallen""") |
177 test._filter_output(output_with_lines_to_ignore) | 180 test._filter_output(output_with_lines_to_ignore) |
178 self.assertEqual(output_with_lines_to_ignore.error, | 181 self.assertEqual(output_with_lines_to_ignore.error, |
179 "Should not be ignored\n" | 182 "Should not be ignored\n" |
180 "[WARNING:chrome.cc] Something went wrong\n" | 183 "[WARNING:chrome.cc] Something went wrong\n" |
181 "[ERROR:main.cc] The sky has fallen") | 184 "[ERROR:main.cc] The sky has fallen") |
182 | 185 |
183 def test_parse_output_with_subtests(self): | 186 def test_parse_output_with_subtests(self): |
184 output = DriverOutput(""" | 187 output = DriverOutput(""" |
185 Running 20 times | 188 Running 20 times |
186 some test: [1, 2, 3, 4, 5] | 189 some test: [1, 2, 3, 4, 5] |
187 other test = else: [6, 7, 8, 9, 10] | 190 other test = else: [6, 7, 8, 9, 10] |
188 Ignoring warm-up run (1115) | 191 Ignoring warm-up run (1115) |
189 | 192 |
190 Time: | 193 Time: |
191 values 1080, 1120, 1095, 1101, 1104 ms | 194 values 1080, 1120, 1095, 1101, 1104 ms |
192 avg 1100 ms | 195 avg 1100 ms |
193 median 1101 ms | 196 median 1101 ms |
194 stdev 14.50862 ms | 197 stdev 14.50862 ms |
195 min 1080 ms | 198 min 1080 ms |
196 max 1120 ms | 199 max 1120 ms |
197 """, image=None, image_hash=None, audio=None) | 200 """, image=None, image_hash=None, audio=None) |
198 output_capture = OutputCapture() | 201 output_capture = OutputCapture() |
199 output_capture.capture_output() | 202 output_capture.capture_output() |
200 try: | 203 try: |
201 test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test') | 204 test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test') |
202 self._assert_results_are_correct(test, output) | 205 self._assert_results_are_correct(test, output) |
203 finally: | 206 finally: |
204 actual_stdout, actual_stderr, actual_logs = output_capture.restore_o
utput() | 207 actual_stdout, actual_stderr, actual_logs = output_capture.restore_o
utput() |
205 self.assertEqual(actual_stdout, '') | 208 self.assertEqual(actual_stdout, '') |
206 self.assertEqual(actual_stderr, '') | 209 self.assertEqual(actual_stderr, '') |
207 self.assertEqual(actual_logs, '') | 210 self.assertEqual(actual_logs, '') |
208 | 211 |
209 | 212 |
210 class TestSingleProcessPerfTest(unittest.TestCase): | 213 class TestSingleProcessPerfTest(unittest.TestCase): |
| 214 |
211 def test_use_only_one_process(self): | 215 def test_use_only_one_process(self): |
212 called = [0] | 216 called = [0] |
213 | 217 |
214 def run_single(driver, path, time_out_ms): | 218 def run_single(driver, path, time_out_ms): |
215 called[0] += 1 | 219 called[0] += 1 |
216 return DriverOutput(""" | 220 return DriverOutput(""" |
217 Running 20 times | 221 Running 20 times |
218 Ignoring warm-up run (1115) | 222 Ignoring warm-up run (1115) |
219 | 223 |
220 Time: | 224 Time: |
221 values 1080, 1120, 1095, 1101, 1104 ms | 225 values 1080, 1120, 1095, 1101, 1104 ms |
222 avg 1100 ms | 226 avg 1100 ms |
223 median 1101 ms | 227 median 1101 ms |
224 stdev 14.50862 ms | 228 stdev 14.50862 ms |
225 min 1080 ms | 229 min 1080 ms |
226 max 1120 ms""", image=None, image_hash=None, audio=None) | 230 max 1120 ms""", image=None, image_hash=None, audio=None) |
227 | 231 |
228 test = SingleProcessPerfTest(MockPort(), 'some-test', '/path/some-dir/so
me-test') | 232 test = SingleProcessPerfTest(MockPort(), 'some-test', '/path/some-dir/so
me-test') |
229 test.run_single = run_single | 233 test.run_single = run_single |
230 self.assertTrue(test.run(0)) | 234 self.assertTrue(test.run(0)) |
231 self.assertEqual(called[0], 1) | 235 self.assertEqual(called[0], 1) |
232 | 236 |
233 | 237 |
234 class TestPerfTestFactory(unittest.TestCase): | 238 class TestPerfTestFactory(unittest.TestCase): |
| 239 |
235 def test_regular_test(self): | 240 def test_regular_test(self): |
236 test = PerfTestFactory.create_perf_test(MockPort(), 'some-dir/some-test'
, '/path/some-dir/some-test') | 241 test = PerfTestFactory.create_perf_test(MockPort(), 'some-dir/some-test'
, '/path/some-dir/some-test') |
237 self.assertEqual(test.__class__, PerfTest) | 242 self.assertEqual(test.__class__, PerfTest) |
238 | 243 |
239 def test_inspector_test(self): | 244 def test_inspector_test(self): |
240 test = PerfTestFactory.create_perf_test(MockPort(), 'inspector/some-test
', '/path/inspector/some-test') | 245 test = PerfTestFactory.create_perf_test(MockPort(), 'inspector/some-test
', '/path/inspector/some-test') |
241 self.assertEqual(test.__class__, ChromiumStylePerfTest) | 246 self.assertEqual(test.__class__, ChromiumStylePerfTest) |
OLD | NEW |