Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "base/test/unit_test_launcher.h" | |
| 6 | |
| 7 #include "base/command_line.h" | |
| 8 #include "base/compiler_specific.h" | |
| 9 #include "base/files/scoped_temp_dir.h" | |
| 10 #include "base/file_util.h" | |
| 11 #include "base/format_macros.h" | |
| 12 #include "base/stl_util.h" | |
| 13 #include "base/strings/string_util.h" | |
| 14 #include "base/test/gtest_xml_util.h" | |
| 15 #include "base/test/test_launcher.h" | |
| 16 #include "base/test/test_timeouts.h" | |
| 17 #include "testing/gtest/include/gtest/gtest.h" | |
| 18 | |
| 19 namespace base { | |
| 20 | |
| 21 namespace { | |
| 22 | |
| 23 // This constant controls how many tests are run in a single batch. | |
| 24 const size_t kTestBatchLimit = 10; | |
| 25 | |
| 26 // Flag to enable the new launcher logic. | |
| 27 // TODO(phajdan.jr): Remove it, http://crbug.com/236893 . | |
| 28 const char kBraveNewTestLauncherFlag[] = "brave-new-test-launcher"; | |
| 29 | |
| 30 // Flag to run all tests in a single process. | |
| 31 const char kSingleProcessTestsFlag[] = "single-process-tests"; | |
|
M-A Ruel
2013/08/15 01:59:14
I feel this flag could be easily confused with --s
Paweł Hajdan Jr.
2013/08/15 21:54:45
Right, I'm trying to at least improve things over
| |
| 32 | |
| 33 // Returns command line for child GTest process based on the command line | |
| 34 // of current process. |test_names| is a vector of test full names | |
| 35 // (e.g. "A.B"), |output_file| is path to the GTest XML output file. | |
| 36 CommandLine GetCommandLineForChildGTestProcess( | |
| 37 const std::vector<std::string>& test_names, | |
| 38 const base::FilePath& output_file) { | |
| 39 CommandLine new_cmd_line(CommandLine::ForCurrentProcess()->GetProgram()); | |
| 40 CommandLine::SwitchMap switches = | |
| 41 CommandLine::ForCurrentProcess()->GetSwitches(); | |
| 42 | |
| 43 switches.erase(kGTestOutputFlag); | |
| 44 new_cmd_line.AppendSwitchPath( | |
| 45 kGTestOutputFlag, | |
| 46 base::FilePath(FILE_PATH_LITERAL("xml:") + output_file.value())); | |
| 47 | |
| 48 for (CommandLine::SwitchMap::const_iterator iter = switches.begin(); | |
| 49 iter != switches.end(); ++iter) { | |
| 50 new_cmd_line.AppendSwitchNative((*iter).first, (*iter).second); | |
|
M-A Ruel
2013/08/15 01:59:14
why not iter->first, iter->second?
Paweł Hajdan Jr.
2013/08/15 21:54:45
Done.
| |
| 51 } | |
| 52 | |
| 53 new_cmd_line.AppendSwitchASCII(kGTestFilterFlag, | |
| 54 JoinString(test_names, ":")); | |
| 55 new_cmd_line.AppendSwitch(kSingleProcessTestsFlag); | |
| 56 new_cmd_line.AppendSwitch(kBraveNewTestLauncherFlag); | |
| 57 | |
| 58 return new_cmd_line; | |
| 59 } | |
| 60 | |
| 61 class UnitTestLauncherDelegate : public TestLauncherDelegate { | |
| 62 private: | |
| 63 struct TestLaunchInfo { | |
| 64 std::string GetFullName() const { | |
| 65 return test_case_name + "." + test_name; | |
| 66 } | |
| 67 | |
| 68 std::string test_case_name; | |
| 69 std::string test_name; | |
| 70 TestResultCallback callback; | |
| 71 }; | |
| 72 | |
| 73 virtual bool ShouldRunTest(const testing::TestCase* test_case, | |
| 74 const testing::TestInfo* test_info) OVERRIDE { | |
| 75 // There is no additional logic to disable specific tests. | |
| 76 return true; | |
| 77 } | |
| 78 | |
| 79 virtual void RunTest(const testing::TestCase* test_case, | |
| 80 const testing::TestInfo* test_info, | |
| 81 const TestResultCallback& callback) OVERRIDE { | |
| 82 TestLaunchInfo launch_info; | |
| 83 launch_info.test_case_name = test_case->name(); | |
| 84 launch_info.test_name = test_info->name(); | |
| 85 launch_info.callback = callback; | |
| 86 tests_.push_back(launch_info); | |
| 87 | |
| 88 // Run tests in batches no larger than the limit. | |
| 89 if (tests_.size() >= kTestBatchLimit) | |
| 90 RunRemainingTests(); | |
| 91 } | |
| 92 | |
| 93 virtual void RunRemainingTests() OVERRIDE { | |
| 94 if (tests_.empty()) | |
| 95 return; | |
| 96 | |
| 97 // Create a dedicated temporary directory for each run to ensure clean | |
|
M-A Ruel
2013/08/15 01:59:14
// Create a dedicated temporary directory to store
Paweł Hajdan Jr.
2013/08/15 21:54:45
Done.
| |
| 98 // state and make it possible to launch multiple processes in parallel. | |
| 99 base::FilePath output_file; | |
| 100 base::ScopedTempDir temp_dir; | |
| 101 // TODO(phajdan.jr): Handle the failure gracefully. | |
|
M-A Ruel
2013/08/15 01:59:14
I don't think it's worth, at that point it could b
Paweł Hajdan Jr.
2013/08/15 21:54:45
Done.
| |
| 102 CHECK(temp_dir.CreateUniqueTempDir()); | |
| 103 output_file = temp_dir.path().AppendASCII("test_results.xml"); | |
| 104 | |
| 105 std::vector<std::string> test_names; | |
| 106 for (size_t i = 0; i < tests_.size(); i++) | |
| 107 test_names.push_back(tests_[i].GetFullName()); | |
| 108 | |
| 109 CommandLine cmd_line( | |
| 110 GetCommandLineForChildGTestProcess(test_names, output_file)); | |
| 111 | |
| 112 // Adjust the timeout depending on how many tests we're running | |
| 113 // (note that e.g. the last batch of tests will be smaller). | |
|
M-A Ruel
2013/08/15 01:59:14
// TODO(phajdan.jr): Use timeout adaptative based
Paweł Hajdan Jr.
2013/08/15 21:54:45
Done.
| |
| 114 base::TimeDelta timeout = | |
| 115 test_names.size() * TestTimeouts::action_timeout(); | |
| 116 | |
| 117 // TODO(phajdan.jr): Distinguish between test failures and crashes. | |
| 118 bool was_timeout = false; | |
| 119 int exit_code = LaunchChildGTestProcess(cmd_line, | |
| 120 std::string(), | |
| 121 timeout, | |
| 122 &was_timeout); | |
| 123 | |
| 124 ProcessTestResults(output_file, exit_code); | |
| 125 | |
| 126 tests_.clear(); | |
| 127 } | |
| 128 | |
| 129 void ProcessTestResults(const base::FilePath& output_file, int exit_code) { | |
| 130 std::vector<TestResult> test_results; | |
| 131 bool have_test_results = ProcessGTestOutput(output_file, &test_results); | |
| 132 | |
| 133 if (have_test_results) { | |
| 134 // TODO(phajdan.jr): Check for duplicates and mismatches between | |
| 135 // the results we got from XML file and tests we intended to run. | |
| 136 std::map<std::string, bool> results_map; | |
| 137 for (size_t i = 0; i < test_results.size(); i++) | |
| 138 results_map[test_results[i].GetFullName()] = test_results[i].success; | |
| 139 | |
| 140 for (size_t i = 0; i < tests_.size(); i++) { | |
| 141 TestResult test_result; | |
| 142 test_result.test_case_name = tests_[i].test_case_name; | |
| 143 test_result.test_name = tests_[i].test_name; | |
| 144 test_result.success = results_map[tests_[i].GetFullName()]; | |
| 145 tests_[i].callback.Run(test_result); | |
| 146 } | |
| 147 | |
| 148 // TODO(phajdan.jr): Handle the case where the exit code is non-zero | |
| 149 // but results file indicates that all tests passed (e.g. crash during | |
| 150 // shutdown). | |
| 151 } else { | |
| 152 // We do not have reliable details about test results (parsing test | |
| 153 // stdout is known to be unreliable), apply the executable exit code | |
| 154 // to all tests. | |
| 155 // TODO(phajdan.jr): Be smarter about this, e.g. retry each test | |
| 156 // individually. | |
| 157 for (size_t i = 0; i < tests_.size(); i++) { | |
| 158 TestResult test_result; | |
| 159 test_result.test_case_name = tests_[i].test_case_name; | |
| 160 test_result.test_name = tests_[i].test_name; | |
| 161 test_result.success = (exit_code == 0); | |
| 162 tests_[i].callback.Run(test_result); | |
| 163 } | |
| 164 } | |
| 165 } | |
| 166 | |
| 167 std::vector<TestLaunchInfo> tests_; | |
| 168 }; | |
| 169 | |
| 170 } // namespace | |
| 171 | |
| 172 int LaunchUnitTests(int argc, | |
| 173 char** argv, | |
| 174 const RunTestSuiteCallback& run_test_suite) { | |
| 175 CommandLine::Init(argc, argv); | |
| 176 if (CommandLine::ForCurrentProcess()->HasSwitch(kSingleProcessTestsFlag) || | |
| 177 !CommandLine::ForCurrentProcess()->HasSwitch(kBraveNewTestLauncherFlag)) { | |
| 178 return run_test_suite.Run(); | |
| 179 } | |
| 180 | |
| 181 base::TimeTicks start_time(base::TimeTicks::Now()); | |
| 182 | |
| 183 fprintf(stdout, | |
| 184 "Starting tests...\n" | |
| 185 "IMPORTANT DEBUGGING NOTE: batches of tests are run inside their own \n" | |
| 186 "process. For debugging a test inside a debugger, use the\n" | |
| 187 "--gtest_filter=<your_test_name> flag along with \n" | |
| 188 "--single-process-tests.\n"); | |
| 189 fflush(stdout); | |
| 190 | |
| 191 testing::InitGoogleTest(&argc, argv); | |
| 192 TestTimeouts::Initialize(); | |
| 193 | |
| 194 base::UnitTestLauncherDelegate delegate; | |
| 195 int exit_code = base::LaunchTests(&delegate, argc, argv); | |
| 196 | |
| 197 fprintf(stdout, | |
| 198 "Tests took %" PRId64 " seconds.\n", | |
| 199 (base::TimeTicks::Now() - start_time).InSeconds()); | |
| 200 fflush(stdout); | |
| 201 | |
| 202 return exit_code; | |
| 203 } | |
| 204 | |
| 205 } // namespace base | |
| OLD | NEW |