Chromium Code Reviews| Index: tools/telemetry/telemetry/user_story/user_story_runner.py |
| diff --git a/tools/telemetry/telemetry/user_story/user_story_runner.py b/tools/telemetry/telemetry/user_story/user_story_runner.py |
| index bdc9a40bca57c3849c409e54ce4947fbaff1e866..99ea0315b7cf8c785942067678a136e88f395f1f 100644 |
| --- a/tools/telemetry/telemetry/user_story/user_story_runner.py |
| +++ b/tools/telemetry/telemetry/user_story/user_story_runner.py |
| @@ -201,7 +201,7 @@ def Run(test, user_story_set, expectations, finder_options, results): |
| # patch. |
| isinstance(user_story_set, page_set_module.PageSet)): |
| _UpdateUserStoryArchivesIfChanged(user_story_set) |
| - user_stories = _CheckArchives(user_story_set, user_stories, results) |
| + user_stories = _CheckArchives(user_story_set, user_stories) |
| for user_story in list(user_stories): |
| if not test.CanRunForPage(user_story): |
| @@ -275,51 +275,57 @@ def _ShuffleAndFilterUserStorySet(user_story_set, finder_options): |
| return user_stories |
| -def _CheckArchives(page_set, pages, results): |
| - """Returns a subset of pages that are local or have WPR archives. |
| +def _CheckArchives(page_set, pages): |
|
nednguyen
2014/12/03 19:35:59
I think this _CheckArchives should take in (archiv
aiolos (Not reviewing)
2014/12/04 00:29:31
Hmm. Good point. We don't actually need to return
nednguyen
2014/12/04 05:17:31
Are you sure about this?
https://code.google.com/p
aiolos (Not reviewing)
2014/12/04 19:23:31
Hmm. I'd made the change earlier, and had one of t
nednguyen
2014/12/04 19:38:09
Yes, please.
aiolos (Not reviewing)
2014/12/04 20:13:38
Can I ask why it's better to pass in three attribu
|
| + """Verifies that all pages are local or have WPR archives. |
| - Logs warnings if any are missing. |
| + Logs warnings if any are missing and returns an empty list. |
| """ |
| - # Warn of any problems with the entire page set. |
| + # Report any problems with the entire page set. |
| if any(not p.is_local for p in pages): |
| if not page_set.archive_data_file: |
| - logging.warning('The page set is missing an "archive_data_file" ' |
| - 'property. Skipping any live sites. To include them, ' |
| - 'pass the flag --use-live-sites.') |
| + logging.error('The page set is missing an "archive_data_file" ' |
| + 'property.\nTo run from live sites pass the flag ' |
| + '--use-live-sites.\nTo create an archive file add an ' |
| + 'archive_data_file property to the page set and then ' |
| + 'run record_wpr.') |
| + return [] |
| if not page_set.wpr_archive_info: |
| - logging.warning('The archive info file is missing. ' |
| - 'To fix this, either add svn-internal to your ' |
| - '.gclient using http://goto/read-src-internal, ' |
| - 'or create a new archive using record_wpr.') |
| + logging.error('The archive info file is missing.\n' |
| + 'To fix this, either add svn-internal to your ' |
| + '.gclient using http://goto/read-src-internal, ' |
| + 'or create a new archive using record_wpr.') |
| + return [] |
| - # Warn of any problems with individual pages and return valid pages. |
| + # Report any problems with individual pages. |
| pages_missing_archive_path = [] |
| pages_missing_archive_data = [] |
| - valid_pages = [] |
| for page in pages: |
| if not page.is_local and not page.archive_path: |
| pages_missing_archive_path.append(page) |
| elif not page.is_local and not os.path.isfile(page.archive_path): |
| pages_missing_archive_data.append(page) |
| - else: |
| - valid_pages.append(page) |
| if pages_missing_archive_path: |
| - logging.warning('The page set archives for some pages do not exist. ' |
| - 'Skipping those pages. To fix this, record those pages ' |
| - 'using record_wpr. To ignore this warning and run ' |
| - 'against live sites, pass the flag --use-live-sites.') |
| + logging.error('The page set archives for some pages do not exist.\n' |
| + 'To fix this, record those pages using record_wpr.\n' |
| + 'To ignore this warning and run against live sites, ' |
| + 'pass the flag --use-live-sites.') |
| + logging.error( |
| + 'Pages without archives: %s', |
| + ', '.join(page.display_name for page in pages_missing_archive_path)) |
| if pages_missing_archive_data: |
| - logging.warning('The page set archives for some pages are missing. ' |
| - 'Someone forgot to check them in, or they were deleted. ' |
| - 'Skipping those pages. To fix this, record those pages ' |
| - 'using record_wpr. To ignore this warning and run ' |
| - 'against live sites, pass the flag --use-live-sites.') |
| - for page in pages_missing_archive_path + pages_missing_archive_data: |
| - results.WillRunPage(page) |
| - results.AddValue(failure.FailureValue.FromMessage( |
| - page, 'Page set archive doesn\'t exist.')) |
| - results.DidRunPage(page) |
| - return valid_pages |
| + logging.error('The page set archives for some pages are missing.\n' |
| + 'Someone forgot to check them in, uploaded them to the ' |
| + 'wrong cloud storage bucket, or they were deleted.\n' |
| + 'To fix this, record those pages using record_wpr.\n' |
| + 'To ignore this warning and run against live sites, ' |
| + 'pass the flag --use-live-sites.') |
| + logging.error( |
| + 'Pages missing archives: %s', |
| + ', '.join(page.display_name for page in pages_missing_archive_data)) |
| + if pages_missing_archive_path or pages_missing_archive_data: |
| + return [] |
| + # Only run valid pages if no problems with the page set or individual pages. |
|
nednguyen
2014/12/03 19:35:59
Base on this, I think this _Check... can just rais
aiolos (Not reviewing)
2014/12/04 00:29:31
The output was clearer that it wasn't a failure in
nednguyen
2014/12/04 05:17:31
If you want this to exits gracefully, you can make
aiolos (Not reviewing)
2014/12/04 19:23:31
Done.
|
| + return pages |
| def _WaitForThermalThrottlingIfNeeded(platform): |