OLD | NEW |
| (Empty) |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 import logging | |
6 import optparse | |
7 import os | |
8 import random | |
9 import sys | |
10 import time | |
11 | |
12 from telemetry import decorators | |
13 from telemetry.core import exceptions | |
14 from telemetry.core import util | |
15 from telemetry.core import wpr_modes | |
16 from telemetry.page import shared_page_state | |
17 from telemetry.page import page_test | |
18 from telemetry.page.actions import page_action | |
19 from telemetry.results import results_options | |
20 from telemetry.user_story import user_story_filter | |
21 from telemetry.util import cloud_storage | |
22 from telemetry.util import exception_formatter | |
23 from telemetry.value import failure | |
24 from telemetry.value import skip | |
25 | |
26 | |
27 def AddCommandLineArgs(parser): | |
28 user_story_filter.UserStoryFilter.AddCommandLineArgs(parser) | |
29 results_options.AddResultsOptions(parser) | |
30 | |
31 # Page set options | |
32 group = optparse.OptionGroup(parser, 'Page set ordering and repeat options') | |
33 group.add_option('--pageset-shuffle', action='store_true', | |
34 dest='pageset_shuffle', | |
35 help='Shuffle the order of pages within a pageset.') | |
36 group.add_option('--pageset-shuffle-order-file', | |
37 dest='pageset_shuffle_order_file', default=None, | |
38 help='Filename of an output of a previously run test on the current ' | |
39 'pageset. The tests will run in the same order again, overriding ' | |
40 'what is specified by --page-repeat and --pageset-repeat.') | |
41 group.add_option('--page-repeat', default=1, type='int', | |
42 help='Number of times to repeat each individual page ' | |
43 'before proceeding with the next page in the pageset.') | |
44 group.add_option('--pageset-repeat', default=1, type='int', | |
45 help='Number of times to repeat the entire pageset.') | |
46 group.add_option('--max-failures', default=None, type='int', | |
47 help='Maximum number of test failures before aborting ' | |
48 'the run. Defaults to the number specified by the ' | |
49 'PageTest.') | |
50 parser.add_option_group(group) | |
51 | |
52 # WPR options | |
53 group = optparse.OptionGroup(parser, 'Web Page Replay options') | |
54 group.add_option('--use-live-sites', | |
55 dest='use_live_sites', action='store_true', | |
56 help='Run against live sites and ignore the Web Page Replay archives.') | |
57 parser.add_option_group(group) | |
58 | |
59 parser.add_option('-d', '--also-run-disabled-tests', | |
60 dest='run_disabled_tests', | |
61 action='store_true', default=False, | |
62 help='Ignore @Disabled and @Enabled restrictions.') | |
63 | |
64 def ProcessCommandLineArgs(parser, args): | |
65 user_story_filter.UserStoryFilter.ProcessCommandLineArgs(parser, args) | |
66 results_options.ProcessCommandLineArgs(parser, args) | |
67 | |
68 # Page set options | |
69 if args.pageset_shuffle_order_file and not args.pageset_shuffle: | |
70 parser.error('--pageset-shuffle-order-file requires --pageset-shuffle.') | |
71 | |
72 if args.page_repeat < 1: | |
73 parser.error('--page-repeat must be a positive integer.') | |
74 if args.pageset_repeat < 1: | |
75 parser.error('--pageset-repeat must be a positive integer.') | |
76 | |
77 | |
78 def _RunPageAndHandleExceptionIfNeeded(test, page_set, expectations, | |
79 page, results, state): | |
80 expectation = None | |
81 def ProcessError(): | |
82 if expectation == 'fail': | |
83 msg = 'Expected exception while running %s' % page.url | |
84 exception_formatter.PrintFormattedException(msg=msg) | |
85 else: | |
86 msg = 'Exception while running %s' % page.url | |
87 results.AddValue(failure.FailureValue(page, sys.exc_info())) | |
88 | |
89 try: | |
90 state.WillRunPage(page, page_set) | |
91 expectation, skip_value = state.GetPageExpectationAndSkipValue(expectations) | |
92 if expectation == 'skip': | |
93 assert skip_value | |
94 results.AddValue(skip_value) | |
95 return | |
96 state.RunPage(results) | |
97 except page_test.TestNotSupportedOnPlatformFailure: | |
98 raise | |
99 except (page_test.Failure, util.TimeoutException, exceptions.LoginException, | |
100 exceptions.ProfilingException): | |
101 ProcessError() | |
102 except exceptions.AppCrashException: | |
103 ProcessError() | |
104 state.TearDown(results) | |
105 if test.is_multi_tab_test: | |
106 logging.error('Aborting multi-tab test after browser or tab crashed at ' | |
107 'page %s' % page.url) | |
108 test.RequestExit() | |
109 return | |
110 except page_action.PageActionNotSupported as e: | |
111 results.AddValue(skip.SkipValue(page, 'Unsupported page action: %s' % e)) | |
112 except Exception: | |
113 exception_formatter.PrintFormattedException( | |
114 msg='Unhandled exception while running %s' % page.url) | |
115 results.AddValue(failure.FailureValue(page, sys.exc_info())) | |
116 else: | |
117 if expectation == 'fail': | |
118 logging.warning('%s was expected to fail, but passed.\n', page.url) | |
119 finally: | |
120 state.DidRunPage(results) | |
121 | |
122 | |
123 @decorators.Cache | |
124 def _UpdatePageSetArchivesIfChanged(page_set): | |
125 # Scan every serving directory for .sha1 files | |
126 # and download them from Cloud Storage. Assume all data is public. | |
127 all_serving_dirs = page_set.serving_dirs.copy() | |
128 # Add individual page dirs to all serving dirs. | |
129 for page in page_set: | |
130 if page.is_file: | |
131 all_serving_dirs.add(page.serving_dir) | |
132 # Scan all serving dirs. | |
133 for serving_dir in all_serving_dirs: | |
134 if os.path.splitdrive(serving_dir)[1] == '/': | |
135 raise ValueError('Trying to serve root directory from HTTP server.') | |
136 for dirpath, _, filenames in os.walk(serving_dir): | |
137 for filename in filenames: | |
138 path, extension = os.path.splitext( | |
139 os.path.join(dirpath, filename)) | |
140 if extension != '.sha1': | |
141 continue | |
142 cloud_storage.GetIfChanged(path, page_set.bucket) | |
143 | |
144 | |
145 def Run(test, page_set, expectations, finder_options, results): | |
146 """Runs a given test against a given page_set with the given options.""" | |
147 test.ValidatePageSet(page_set) | |
148 | |
149 # Reorder page set based on options. | |
150 pages = _ShuffleAndFilterPageSet(page_set, finder_options) | |
151 | |
152 if not finder_options.use_live_sites: | |
153 if finder_options.browser_options.wpr_mode != wpr_modes.WPR_RECORD: | |
154 _UpdatePageSetArchivesIfChanged(page_set) | |
155 pages = _CheckArchives(page_set, pages, results) | |
156 | |
157 for page in list(pages): | |
158 if not test.CanRunForPage(page): | |
159 results.WillRunPage(page) | |
160 logging.debug('Skipping test: it cannot run for %s', page.url) | |
161 results.AddValue(skip.SkipValue(page, 'Test cannot run')) | |
162 results.DidRunPage(page) | |
163 pages.remove(page) | |
164 | |
165 if not pages: | |
166 return | |
167 | |
168 state = shared_page_state.SharedPageState(test, finder_options, page_set) | |
169 pages_with_discarded_first_result = set() | |
170 max_failures = finder_options.max_failures # command-line gets priority | |
171 if max_failures is None: | |
172 max_failures = test.max_failures # may be None | |
173 | |
174 try: | |
175 test.WillRunTest(finder_options) | |
176 for _ in xrange(finder_options.pageset_repeat): | |
177 for page in pages: | |
178 if test.IsExiting(): | |
179 break | |
180 for _ in xrange(finder_options.page_repeat): | |
181 results.WillRunPage(page) | |
182 try: | |
183 _WaitForThermalThrottlingIfNeeded(state.platform) | |
184 _RunPageAndHandleExceptionIfNeeded( | |
185 test, page_set, expectations, page, results, state) | |
186 except Exception: | |
187 # Tear down & restart the state for unhandled exceptions thrown by | |
188 # _RunPageAndHandleExceptionIfNeeded. | |
189 results.AddValue(failure.FailureValue(page, sys.exc_info())) | |
190 state.TearDown(results) | |
191 state = shared_page_state.SharedPageState( | |
192 test, finder_options, page_set) | |
193 finally: | |
194 _CheckThermalThrottling(state.platform) | |
195 discard_run = (test.discard_first_result and | |
196 page not in pages_with_discarded_first_result) | |
197 if discard_run: | |
198 pages_with_discarded_first_result.add(page) | |
199 results.DidRunPage(page, discard_run=discard_run) | |
200 if max_failures is not None and len(results.failures) > max_failures: | |
201 logging.error('Too many failures. Aborting.') | |
202 test.RequestExit() | |
203 finally: | |
204 state.TearDown(results) | |
205 | |
206 | |
207 def _ShuffleAndFilterPageSet(page_set, finder_options): | |
208 if finder_options.pageset_shuffle_order_file: | |
209 return page_set.ReorderPageSet(finder_options.pageset_shuffle_order_file) | |
210 pages = [page for page in page_set.pages[:] | |
211 if user_story_filter.UserStoryFilter.IsSelected(page)] | |
212 if finder_options.pageset_shuffle: | |
213 random.shuffle(pages) | |
214 return pages | |
215 | |
216 | |
217 def _CheckArchives(page_set, pages, results): | |
218 """Returns a subset of pages that are local or have WPR archives. | |
219 | |
220 Logs warnings if any are missing. | |
221 """ | |
222 # Warn of any problems with the entire page set. | |
223 if any(not p.is_local for p in pages): | |
224 if not page_set.archive_data_file: | |
225 logging.warning('The page set is missing an "archive_data_file" ' | |
226 'property. Skipping any live sites. To include them, ' | |
227 'pass the flag --use-live-sites.') | |
228 if not page_set.wpr_archive_info: | |
229 logging.warning('The archive info file is missing. ' | |
230 'To fix this, either add svn-internal to your ' | |
231 '.gclient using http://goto/read-src-internal, ' | |
232 'or create a new archive using record_wpr.') | |
233 | |
234 # Warn of any problems with individual pages and return valid pages. | |
235 pages_missing_archive_path = [] | |
236 pages_missing_archive_data = [] | |
237 valid_pages = [] | |
238 for page in pages: | |
239 if not page.is_local and not page.archive_path: | |
240 pages_missing_archive_path.append(page) | |
241 elif not page.is_local and not os.path.isfile(page.archive_path): | |
242 pages_missing_archive_data.append(page) | |
243 else: | |
244 valid_pages.append(page) | |
245 if pages_missing_archive_path: | |
246 logging.warning('The page set archives for some pages do not exist. ' | |
247 'Skipping those pages. To fix this, record those pages ' | |
248 'using record_wpr. To ignore this warning and run ' | |
249 'against live sites, pass the flag --use-live-sites.') | |
250 if pages_missing_archive_data: | |
251 logging.warning('The page set archives for some pages are missing. ' | |
252 'Someone forgot to check them in, or they were deleted. ' | |
253 'Skipping those pages. To fix this, record those pages ' | |
254 'using record_wpr. To ignore this warning and run ' | |
255 'against live sites, pass the flag --use-live-sites.') | |
256 for page in pages_missing_archive_path + pages_missing_archive_data: | |
257 results.WillRunPage(page) | |
258 results.AddValue(failure.FailureValue.FromMessage( | |
259 page, 'Page set archive doesn\'t exist.')) | |
260 results.DidRunPage(page) | |
261 return valid_pages | |
262 | |
263 | |
264 def _WaitForThermalThrottlingIfNeeded(platform): | |
265 if not platform.CanMonitorThermalThrottling(): | |
266 return | |
267 thermal_throttling_retry = 0 | |
268 while (platform.IsThermallyThrottled() and | |
269 thermal_throttling_retry < 3): | |
270 logging.warning('Thermally throttled, waiting (%d)...', | |
271 thermal_throttling_retry) | |
272 thermal_throttling_retry += 1 | |
273 time.sleep(thermal_throttling_retry * 2) | |
274 | |
275 if thermal_throttling_retry and platform.IsThermallyThrottled(): | |
276 logging.warning('Device is thermally throttled before running ' | |
277 'performance tests, results will vary.') | |
278 | |
279 | |
280 def _CheckThermalThrottling(platform): | |
281 if not platform.CanMonitorThermalThrottling(): | |
282 return | |
283 if platform.HasBeenThermallyThrottled(): | |
284 logging.warning('Device has been thermally throttled during ' | |
285 'performance tests, results will vary.') | |
OLD | NEW |