OLD | NEW |
| (Empty) |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 // This is a gTest-based test that runs the Selenium Core testsuite in Chrome | |
6 // using the UITest automation. The number of total and failed tests are | |
7 // written to stdout. | |
8 // | |
9 // TODO(darin): output the names of the failed tests so we can easily track | |
10 // deviations from the expected output. | |
11 | |
12 #include <list> | |
13 #include <set> | |
14 | |
15 #include "base/file_path.h" | |
16 #include "base/file_util.h" | |
17 #include "base/path_service.h" | |
18 #include "base/string_split.h" | |
19 #include "base/string_util.h" | |
20 #include "base/test/test_timeouts.h" | |
21 #include "base/utf_string_conversions.h" | |
22 #include "chrome/common/chrome_paths.h" | |
23 #include "chrome/test/automation/tab_proxy.h" | |
24 #include "chrome/test/automation/window_proxy.h" | |
25 #include "chrome/test/ui/ui_test.h" | |
26 #include "net/base/net_util.h" | |
27 | |
28 #ifdef SIMULATE_RUN | |
29 #include "base/rand_util.h" | |
30 #endif | |
31 | |
32 // Uncomment this to exercise this test without actually running the selenium | |
33 // test, which can take a while to run. This define is useful when modifying | |
34 // the analysis code. | |
35 //#define SIMULATE_RUN 1 | |
36 | |
37 namespace { | |
38 | |
39 // This file is a comma separated list of tests that are currently failing. | |
40 const char kExpectedFailuresFileName[] = "expected_failures.txt"; | |
41 | |
42 class SeleniumTest : public UITest { | |
43 public: | |
44 SeleniumTest() { | |
45 show_window_ = true; | |
46 } | |
47 typedef std::list<std::string> ResultsList; | |
48 typedef std::set<std::string> ResultsSet; | |
49 | |
50 // Parses a selenium results string, which is of the form: | |
51 // "5.selectFrame,6.click,24.selectAndWait,24.verifyTitle" | |
52 void ParseResults(const std::string& input, ResultsSet* output) { | |
53 if (input.empty()) | |
54 return; | |
55 | |
56 std::vector<std::string> tokens; | |
57 base::SplitString(input, ',', &tokens); | |
58 for (size_t i = 0; i < tokens.size(); ++i) { | |
59 TrimWhitespaceASCII(tokens[i], TRIM_ALL, &tokens[i]); | |
60 output->insert(tokens[i]); | |
61 } | |
62 } | |
63 | |
64 // Find the elements of "b" that are not in "a" | |
65 void CompareSets(const ResultsSet& a, const ResultsSet& b, | |
66 ResultsList* only_in_b) { | |
67 ResultsSet::const_iterator it = b.begin(); | |
68 for (; it != b.end(); ++it) { | |
69 if (a.find(*it) == a.end()) | |
70 only_in_b->push_back(*it); | |
71 } | |
72 } | |
73 | |
74 // The results file is in trunk/chrome/test/selenium/ | |
75 FilePath GetResultsFilePath() { | |
76 FilePath results_path; | |
77 PathService::Get(chrome::DIR_TEST_DATA, &results_path); | |
78 results_path = results_path.DirName(); | |
79 results_path = results_path.AppendASCII("selenium"); | |
80 results_path = results_path.AppendASCII(kExpectedFailuresFileName); | |
81 return results_path; | |
82 } | |
83 | |
84 bool ReadExpectedResults(std::string* results) { | |
85 FilePath results_path = GetResultsFilePath(); | |
86 return file_util::ReadFileToString(results_path, results); | |
87 } | |
88 | |
89 void RunSelenium(std::wstring* total, std::wstring* failed) { | |
90 #ifdef SIMULATE_RUN | |
91 *total = L"100"; | |
92 const wchar_t* kBogusFailures[] = { | |
93 L"5.selectFrame,6.click,24.selectAndWait,24.verifyTitle", | |
94 L"5.selectFrame,6.click,13.verifyLocation,13.verifyLocation,13.click," | |
95 L"24.selectAndWait,24.verifyTitle", | |
96 L"5.selectFrame,6.click,24.selectAndWait" | |
97 }; | |
98 *failed = kBogusFailures[base::RandInt(0, 2)]; | |
99 #else | |
100 FilePath test_path; | |
101 PathService::Get(chrome::DIR_TEST_DATA, &test_path); | |
102 test_path = test_path.DirName(); | |
103 test_path = test_path.DirName(); | |
104 test_path = test_path.DirName(); | |
105 test_path = test_path.AppendASCII("data"); | |
106 test_path = test_path.AppendASCII("selenium_core"); | |
107 test_path = test_path.AppendASCII("core"); | |
108 test_path = test_path.AppendASCII("TestRunner.html"); | |
109 | |
110 GURL test_url(net::FilePathToFileURL(test_path)); | |
111 scoped_refptr<TabProxy> tab(GetActiveTab()); | |
112 tab->NavigateToURL(test_url); | |
113 | |
114 // Wait for the test to finish. | |
115 ASSERT_TRUE(WaitUntilCookieValue( | |
116 tab.get(), test_url, "__tests_finished", | |
117 TestTimeouts::huge_test_timeout_ms(), "1")); | |
118 | |
119 std::string cookie; | |
120 ASSERT_TRUE(tab->GetCookieByName(test_url, "__num_tests_total", &cookie)); | |
121 *total = UTF8ToWide(cookie); | |
122 ASSERT_FALSE(total->empty()); | |
123 ASSERT_TRUE(tab->GetCookieByName(test_url, "__tests_failed", &cookie)); | |
124 *failed = UTF8ToWide(cookie); | |
125 // The __tests_failed cookie will be empty if all the tests pass. | |
126 #endif | |
127 } | |
128 | |
129 void RunTest(ResultsList* new_passes_list, ResultsList* new_failures_list) { | |
130 std::string expected_failures; | |
131 bool have_expected_results = ReadExpectedResults(&expected_failures); | |
132 ASSERT_TRUE(have_expected_results); | |
133 | |
134 std::wstring total, failed; | |
135 RunSelenium(&total, &failed); | |
136 if (total.empty()) | |
137 return; | |
138 | |
139 printf("\n"); | |
140 wprintf(L"__num_tests_total = [%s]\n", total.c_str()); | |
141 wprintf(L"__tests_failed = [%s]\n", failed.c_str()); | |
142 | |
143 std::string cur_failures = WideToUTF8(failed); | |
144 | |
145 ResultsSet expected_failures_set; | |
146 ParseResults(expected_failures, &expected_failures_set); | |
147 | |
148 ResultsSet cur_failures_set; | |
149 ParseResults(cur_failures, &cur_failures_set); | |
150 | |
151 // Compute the list of new passes and failures | |
152 CompareSets(cur_failures_set, expected_failures_set, new_passes_list); | |
153 CompareSets(expected_failures_set, cur_failures_set, new_failures_list); | |
154 } | |
155 }; | |
156 | |
157 } // namespace | |
158 | |
159 TEST_F(SeleniumTest, Core) { | |
160 ResultsList new_passes_list, new_failures_list; | |
161 RunTest(&new_passes_list, &new_failures_list); | |
162 | |
163 if (!new_failures_list.empty()) { | |
164 ADD_FAILURE(); | |
165 printf("new tests failing:\n"); | |
166 ResultsList::const_iterator it = new_failures_list.begin(); | |
167 for (; it != new_failures_list.end(); ++it) | |
168 printf(" %s\n", it->c_str()); | |
169 printf("\n"); | |
170 } | |
171 | |
172 if (!new_passes_list.empty()) { | |
173 printf("new tests passing:\n"); | |
174 ResultsList::const_iterator it = new_passes_list.begin(); | |
175 for (; it != new_passes_list.end(); ++it) | |
176 printf(" %s\n", it->c_str()); | |
177 printf("\n"); | |
178 } | |
179 } | |
OLD | NEW |