| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/public/test/test_launcher.h" | 5 #include "content/public/test/test_launcher.h" |
| 6 | 6 |
| 7 #include <string> | 7 #include <string> |
| 8 #include <vector> | 8 #include <vector> |
| 9 | 9 |
| 10 #include "base/command_line.h" | 10 #include "base/command_line.h" |
| 11 #include "base/containers/hash_tables.h" | 11 #include "base/containers/hash_tables.h" |
| 12 #include "base/environment.h" | 12 #include "base/environment.h" |
| 13 #include "base/file_util.h" | 13 #include "base/file_util.h" |
| 14 #include "base/files/scoped_temp_dir.h" | 14 #include "base/files/scoped_temp_dir.h" |
| 15 #include "base/logging.h" | 15 #include "base/logging.h" |
| 16 #include "base/memory/linked_ptr.h" | 16 #include "base/memory/linked_ptr.h" |
| 17 #include "base/memory/scoped_ptr.h" | 17 #include "base/memory/scoped_ptr.h" |
| 18 #include "base/process_util.h" | 18 #include "base/process_util.h" |
| 19 #include "base/strings/string_number_conversions.h" | 19 #include "base/strings/string_number_conversions.h" |
| 20 #include "base/strings/string_util.h" | 20 #include "base/strings/string_util.h" |
| 21 #include "base/strings/utf_string_conversions.h" | 21 #include "base/strings/utf_string_conversions.h" |
| 22 #include "base/test/test_launcher.h" |
| 22 #include "base/test/test_suite.h" | 23 #include "base/test/test_suite.h" |
| 23 #include "base/test/test_timeouts.h" | 24 #include "base/test/test_timeouts.h" |
| 24 #include "base/time.h" | 25 #include "base/time.h" |
| 25 #include "content/public/app/content_main.h" | 26 #include "content/public/app/content_main.h" |
| 26 #include "content/public/app/content_main_delegate.h" | 27 #include "content/public/app/content_main_delegate.h" |
| 27 #include "content/public/app/startup_helper_win.h" | 28 #include "content/public/app/startup_helper_win.h" |
| 28 #include "content/public/common/content_switches.h" | 29 #include "content/public/common/content_switches.h" |
| 29 #include "content/public/common/sandbox_init.h" | 30 #include "content/public/common/sandbox_init.h" |
| 30 #include "content/public/test/browser_test.h" | 31 #include "content/public/test/browser_test.h" |
| 31 #include "net/base/escape.h" | 32 #include "net/base/escape.h" |
| (...skipping 19 matching lines...) Expand all Loading... |
| 51 | 52 |
| 52 // Manual tests only run when --run-manual is specified. This allows writing | 53 // Manual tests only run when --run-manual is specified. This allows writing |
| 53 // tests that don't run automatically but are still in the same test binary. | 54 // tests that don't run automatically but are still in the same test binary. |
| 54 // This is useful so that a team that wants to run a few tests doesn't have to | 55 // This is useful so that a team that wants to run a few tests doesn't have to |
| 55 // add a new binary that must be compiled on all builds. | 56 // add a new binary that must be compiled on all builds. |
| 56 const char kManualTestPrefix[] = "MANUAL_"; | 57 const char kManualTestPrefix[] = "MANUAL_"; |
| 57 | 58 |
| 58 TestLauncherDelegate* g_launcher_delegate; | 59 TestLauncherDelegate* g_launcher_delegate; |
| 59 } | 60 } |
| 60 | 61 |
| 61 // The environment variable name for the total number of test shards. | |
| 62 const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS"; | |
| 63 // The environment variable name for the test shard index. | |
| 64 const char kTestShardIndex[] = "GTEST_SHARD_INDEX"; | |
| 65 | |
| 66 // The default output file for XML output. | |
| 67 const base::FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL( | |
| 68 "test_detail.xml"); | |
| 69 | |
| 70 // Quit test execution after this number of tests has timed out. | |
| 71 const int kMaxTimeouts = 5; // 45s timeout * (5 + 1) = 270s max run time. | |
| 72 | |
| 73 namespace { | 62 namespace { |
| 74 | 63 |
| 75 // Parses the environment variable var as an Int32. If it is unset, returns | 64 int DoRunTestInternal(const testing::TestCase* test_case, |
| 76 // default_val. If it is set, unsets it then converts it to Int32 before | 65 const std::string& test_name, |
| 77 // returning it. If unsetting or converting to an Int32 fails, print an | 66 CommandLine* command_line, |
| 78 // error and exit with failure. | 67 base::TimeDelta default_timeout, |
| 79 int32 Int32FromEnvOrDie(const char* const var, int32 default_val) { | 68 bool* was_timeout) { |
| 80 scoped_ptr<base::Environment> env(base::Environment::Create()); | |
| 81 std::string str_val; | |
| 82 int32 result; | |
| 83 if (!env->GetVar(var, &str_val)) | |
| 84 return default_val; | |
| 85 if (!env->UnSetVar(var)) { | |
| 86 LOG(ERROR) << "Invalid environment: we could not unset " << var << ".\n"; | |
| 87 exit(EXIT_FAILURE); | |
| 88 } | |
| 89 if (!base::StringToInt(str_val, &result)) { | |
| 90 LOG(ERROR) << "Invalid environment: " << var << " is not an integer.\n"; | |
| 91 exit(EXIT_FAILURE); | |
| 92 } | |
| 93 return result; | |
| 94 } | |
| 95 | |
| 96 // Checks whether sharding is enabled by examining the relevant | |
| 97 // environment variable values. If the variables are present, | |
| 98 // but inconsistent (i.e., shard_index >= total_shards), prints | |
| 99 // an error and exits. | |
| 100 bool ShouldShard(int32* total_shards, int32* shard_index) { | |
| 101 *total_shards = Int32FromEnvOrDie(kTestTotalShards, -1); | |
| 102 *shard_index = Int32FromEnvOrDie(kTestShardIndex, -1); | |
| 103 | |
| 104 if (*total_shards == -1 && *shard_index == -1) { | |
| 105 return false; | |
| 106 } else if (*total_shards == -1 && *shard_index != -1) { | |
| 107 LOG(ERROR) << "Invalid environment variables: you have " | |
| 108 << kTestShardIndex << " = " << *shard_index | |
| 109 << ", but have left " << kTestTotalShards << " unset.\n"; | |
| 110 exit(EXIT_FAILURE); | |
| 111 } else if (*total_shards != -1 && *shard_index == -1) { | |
| 112 LOG(ERROR) << "Invalid environment variables: you have " | |
| 113 << kTestTotalShards << " = " << *total_shards | |
| 114 << ", but have left " << kTestShardIndex << " unset.\n"; | |
| 115 exit(EXIT_FAILURE); | |
| 116 } else if (*shard_index < 0 || *shard_index >= *total_shards) { | |
| 117 LOG(ERROR) << "Invalid environment variables: we require 0 <= " | |
| 118 << kTestShardIndex << " < " << kTestTotalShards | |
| 119 << ", but you have " << kTestShardIndex << "=" << *shard_index | |
| 120 << ", " << kTestTotalShards << "=" << *total_shards << ".\n"; | |
| 121 exit(EXIT_FAILURE); | |
| 122 } | |
| 123 | |
| 124 return *total_shards > 1; | |
| 125 } | |
| 126 | |
| 127 // Given the total number of shards, the shard index, and the test id, returns | |
| 128 // true iff the test should be run on this shard. The test id is some arbitrary | |
| 129 // but unique non-negative integer assigned by this launcher to each test | |
| 130 // method. Assumes that 0 <= shard_index < total_shards, which is first | |
| 131 // verified in ShouldShard(). | |
| 132 bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) { | |
| 133 return (test_id % total_shards) == shard_index; | |
| 134 } | |
| 135 | |
| 136 // A helper class to output results. | |
| 137 // Note: as currently XML is the only supported format by gtest, we don't | |
| 138 // check output format (e.g. "xml:" prefix) here and output an XML file | |
| 139 // unconditionally. | |
| 140 // Note: we don't output per-test-case or total summary info like | |
| 141 // total failed_test_count, disabled_test_count, elapsed_time and so on. | |
| 142 // Only each test (testcase element in the XML) will have the correct | |
| 143 // failed/disabled/elapsed_time information. Each test won't include | |
| 144 // detailed failure messages either. | |
| 145 class ResultsPrinter { | |
| 146 public: | |
| 147 explicit ResultsPrinter(const CommandLine& command_line); | |
| 148 ~ResultsPrinter(); | |
| 149 void OnTestCaseStart(const char* name, int test_count) const; | |
| 150 void OnTestCaseEnd() const; | |
| 151 | |
| 152 void OnTestEnd(const char* name, const char* case_name, bool run, | |
| 153 bool failed, bool failure_ignored, double elapsed_time) const; | |
| 154 private: | |
| 155 FILE* out_; | |
| 156 | |
| 157 DISALLOW_COPY_AND_ASSIGN(ResultsPrinter); | |
| 158 }; | |
| 159 | |
| 160 ResultsPrinter::ResultsPrinter(const CommandLine& command_line) : out_(NULL) { | |
| 161 if (!command_line.HasSwitch(kGTestOutputFlag)) | |
| 162 return; | |
| 163 std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag); | |
| 164 size_t colon_pos = flag.find(':'); | |
| 165 base::FilePath path; | |
| 166 if (colon_pos != std::string::npos) { | |
| 167 base::FilePath flag_path = | |
| 168 command_line.GetSwitchValuePath(kGTestOutputFlag); | |
| 169 base::FilePath::StringType path_string = flag_path.value(); | |
| 170 path = base::FilePath(path_string.substr(colon_pos + 1)); | |
| 171 // If the given path ends with '/', consider it is a directory. | |
| 172 // Note: This does NOT check that a directory (or file) actually exists | |
| 173 // (the behavior is same as what gtest does). | |
| 174 if (path.EndsWithSeparator()) { | |
| 175 base::FilePath executable = command_line.GetProgram().BaseName(); | |
| 176 path = path.Append(executable.ReplaceExtension( | |
| 177 base::FilePath::StringType(FILE_PATH_LITERAL("xml")))); | |
| 178 } | |
| 179 } | |
| 180 if (path.value().empty()) | |
| 181 path = base::FilePath(kDefaultOutputFile); | |
| 182 base::FilePath dir_name = path.DirName(); | |
| 183 if (!file_util::DirectoryExists(dir_name)) { | |
| 184 LOG(WARNING) << "The output directory does not exist. " | |
| 185 << "Creating the directory: " << dir_name.value(); | |
| 186 // Create the directory if necessary (because the gtest does the same). | |
| 187 file_util::CreateDirectory(dir_name); | |
| 188 } | |
| 189 out_ = file_util::OpenFile(path, "w"); | |
| 190 if (!out_) { | |
| 191 LOG(ERROR) << "Cannot open output file: " | |
| 192 << path.value() << "."; | |
| 193 return; | |
| 194 } | |
| 195 fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); | |
| 196 fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\"" | |
| 197 " disabled=\"\" errors=\"\" time=\"\">\n"); | |
| 198 } | |
| 199 | |
| 200 ResultsPrinter::~ResultsPrinter() { | |
| 201 if (!out_) | |
| 202 return; | |
| 203 fprintf(out_, "</testsuites>\n"); | |
| 204 fclose(out_); | |
| 205 } | |
| 206 | |
| 207 void ResultsPrinter::OnTestCaseStart(const char* name, int test_count) const { | |
| 208 if (!out_) | |
| 209 return; | |
| 210 fprintf(out_, " <testsuite name=\"%s\" tests=\"%d\" failures=\"\"" | |
| 211 " disabled=\"\" errors=\"\" time=\"\">\n", name, test_count); | |
| 212 } | |
| 213 | |
| 214 void ResultsPrinter::OnTestCaseEnd() const { | |
| 215 if (!out_) | |
| 216 return; | |
| 217 fprintf(out_, " </testsuite>\n"); | |
| 218 } | |
| 219 | |
| 220 void ResultsPrinter::OnTestEnd(const char* name, | |
| 221 const char* case_name, | |
| 222 bool run, | |
| 223 bool failed, | |
| 224 bool failure_ignored, | |
| 225 double elapsed_time) const { | |
| 226 if (!out_) | |
| 227 return; | |
| 228 fprintf(out_, " <testcase name=\"%s\" status=\"%s\" time=\"%.3f\"" | |
| 229 " classname=\"%s\"", | |
| 230 name, run ? "run" : "notrun", elapsed_time / 1000.0, case_name); | |
| 231 if (!failed) { | |
| 232 fprintf(out_, " />\n"); | |
| 233 return; | |
| 234 } | |
| 235 fprintf(out_, ">\n"); | |
| 236 fprintf(out_, " <failure message=\"\" type=\"\"%s></failure>\n", | |
| 237 failure_ignored ? " ignored=\"true\"" : ""); | |
| 238 fprintf(out_, " </testcase>\n"); | |
| 239 } | |
| 240 | |
| 241 class TestCasePrinterHelper { | |
| 242 public: | |
| 243 TestCasePrinterHelper(const ResultsPrinter& printer, | |
| 244 const char* name, | |
| 245 int total_test_count) | |
| 246 : printer_(printer) { | |
| 247 printer_.OnTestCaseStart(name, total_test_count); | |
| 248 } | |
| 249 ~TestCasePrinterHelper() { | |
| 250 printer_.OnTestCaseEnd(); | |
| 251 } | |
| 252 private: | |
| 253 const ResultsPrinter& printer_; | |
| 254 | |
| 255 DISALLOW_COPY_AND_ASSIGN(TestCasePrinterHelper); | |
| 256 }; | |
| 257 | |
| 258 // For a basic pattern matching for gtest_filter options. (Copied from | |
| 259 // gtest.cc, see the comment below and http://crbug.com/44497) | |
| 260 bool PatternMatchesString(const char* pattern, const char* str) { | |
| 261 switch (*pattern) { | |
| 262 case '\0': | |
| 263 case ':': // Either ':' or '\0' marks the end of the pattern. | |
| 264 return *str == '\0'; | |
| 265 case '?': // Matches any single character. | |
| 266 return *str != '\0' && PatternMatchesString(pattern + 1, str + 1); | |
| 267 case '*': // Matches any string (possibly empty) of characters. | |
| 268 return (*str != '\0' && PatternMatchesString(pattern, str + 1)) || | |
| 269 PatternMatchesString(pattern + 1, str); | |
| 270 default: // Non-special character. Matches itself. | |
| 271 return *pattern == *str && | |
| 272 PatternMatchesString(pattern + 1, str + 1); | |
| 273 } | |
| 274 } | |
| 275 | |
| 276 // TODO(phajdan.jr): Avoid duplicating gtest code. (http://crbug.com/44497) | |
| 277 // For basic pattern matching for gtest_filter options. (Copied from | |
| 278 // gtest.cc) | |
| 279 bool MatchesFilter(const std::string& name, const std::string& filter) { | |
| 280 const char *cur_pattern = filter.c_str(); | |
| 281 for (;;) { | |
| 282 if (PatternMatchesString(cur_pattern, name.c_str())) { | |
| 283 return true; | |
| 284 } | |
| 285 | |
| 286 // Finds the next pattern in the filter. | |
| 287 cur_pattern = strchr(cur_pattern, ':'); | |
| 288 | |
| 289 // Returns if no more pattern can be found. | |
| 290 if (cur_pattern == NULL) { | |
| 291 return false; | |
| 292 } | |
| 293 | |
| 294 // Skips the pattern separater (the ':' character). | |
| 295 cur_pattern++; | |
| 296 } | |
| 297 } | |
| 298 | |
| 299 int RunTestInternal(const testing::TestCase* test_case, | |
| 300 const std::string& test_name, | |
| 301 CommandLine* command_line, | |
| 302 base::TimeDelta default_timeout, | |
| 303 bool* was_timeout) { | |
| 304 if (test_case) { | 69 if (test_case) { |
| 305 std::string pre_test_name = test_name; | 70 std::string pre_test_name = test_name; |
| 306 std::string replace_string = std::string(".") + kPreTestPrefix; | 71 std::string replace_string = std::string(".") + kPreTestPrefix; |
| 307 ReplaceFirstSubstringAfterOffset(&pre_test_name, 0, ".", replace_string); | 72 ReplaceFirstSubstringAfterOffset(&pre_test_name, 0, ".", replace_string); |
| 308 for (int i = 0; i < test_case->total_test_count(); ++i) { | 73 for (int i = 0; i < test_case->total_test_count(); ++i) { |
| 309 const testing::TestInfo* test_info = test_case->GetTestInfo(i); | 74 const testing::TestInfo* test_info = test_case->GetTestInfo(i); |
| 310 std::string cur_test_name = test_info->test_case_name(); | 75 std::string cur_test_name = test_info->test_case_name(); |
| 311 cur_test_name.append("."); | 76 cur_test_name.append("."); |
| 312 cur_test_name.append(test_info->name()); | 77 cur_test_name.append(test_info->name()); |
| 313 if (cur_test_name == pre_test_name) { | 78 if (cur_test_name == pre_test_name) { |
| 314 int exit_code = RunTestInternal(test_case, pre_test_name, command_line, | 79 int exit_code = DoRunTestInternal(test_case, |
| 315 default_timeout, was_timeout); | 80 pre_test_name, |
| 81 command_line, |
| 82 default_timeout, |
| 83 was_timeout); |
| 316 if (exit_code != 0) | 84 if (exit_code != 0) |
| 317 return exit_code; | 85 return exit_code; |
| 318 } | 86 } |
| 319 } | 87 } |
| 320 } | 88 } |
| 321 | 89 |
| 322 CommandLine new_cmd_line(*command_line); | 90 CommandLine new_cmd_line(*command_line); |
| 323 | 91 |
| 324 // Always enable disabled tests. This method is not called with disabled | 92 // Always enable disabled tests. This method is not called with disabled |
| 325 // tests unless this flag was specified to the browser test executable. | 93 // tests unless this flag was specified to the browser test executable. |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 377 } | 145 } |
| 378 #endif | 146 #endif |
| 379 | 147 |
| 380 base::CloseProcessHandle(process_handle); | 148 base::CloseProcessHandle(process_handle); |
| 381 | 149 |
| 382 return exit_code; | 150 return exit_code; |
| 383 } | 151 } |
| 384 | 152 |
| 385 // Runs test specified by |test_name| in a child process, | 153 // Runs test specified by |test_name| in a child process, |
| 386 // and returns the exit code. | 154 // and returns the exit code. |
| 387 int RunTest(TestLauncherDelegate* launcher_delegate, | 155 int DoRunTest(TestLauncherDelegate* launcher_delegate, |
| 388 const testing::TestCase* test_case, | 156 const testing::TestCase* test_case, |
| 389 const std::string& test_name, | 157 const std::string& test_name, |
| 390 base::TimeDelta default_timeout, | 158 base::TimeDelta default_timeout, |
| 391 bool* was_timeout) { | 159 bool* was_timeout) { |
| 392 if (was_timeout) | 160 if (was_timeout) |
| 393 *was_timeout = false; | 161 *was_timeout = false; |
| 394 | 162 |
| 395 #if defined(OS_MACOSX) | 163 #if defined(OS_MACOSX) |
| 396 // Some of the below method calls will leak objects if there is no | 164 // Some of the below method calls will leak objects if there is no |
| 397 // autorelease pool in place. | 165 // autorelease pool in place. |
| 398 base::mac::ScopedNSAutoreleasePool pool; | 166 base::mac::ScopedNSAutoreleasePool pool; |
| 399 #endif | 167 #endif |
| 400 | 168 |
| 401 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | 169 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
| 402 CommandLine new_cmd_line(cmd_line->GetProgram()); | 170 CommandLine new_cmd_line(cmd_line->GetProgram()); |
| 403 CommandLine::SwitchMap switches = cmd_line->GetSwitches(); | 171 CommandLine::SwitchMap switches = cmd_line->GetSwitches(); |
| 404 | 172 |
| 405 // Strip out gtest_output flag because otherwise we would overwrite results | 173 // Strip out gtest_output flag because otherwise we would overwrite results |
| 406 // of the previous test. We will generate the final output file later | 174 // of the previous test. We will generate the final output file later |
| 407 // in RunTests(). | 175 // in RunTests(). |
| 408 switches.erase(kGTestOutputFlag); | 176 switches.erase(base::kGTestOutputFlag); |
| 409 | 177 |
| 410 // Strip out gtest_repeat flag because we can only run one test in the child | 178 // Strip out gtest_repeat flag because we can only run one test in the child |
| 411 // process (restarting the browser in the same process is illegal after it | 179 // process (restarting the browser in the same process is illegal after it |
| 412 // has been shut down and will actually crash). | 180 // has been shut down and will actually crash). |
| 413 switches.erase(kGTestRepeatFlag); | 181 switches.erase(base::kGTestRepeatFlag); |
| 414 | 182 |
| 415 for (CommandLine::SwitchMap::const_iterator iter = switches.begin(); | 183 for (CommandLine::SwitchMap::const_iterator iter = switches.begin(); |
| 416 iter != switches.end(); ++iter) { | 184 iter != switches.end(); ++iter) { |
| 417 new_cmd_line.AppendSwitchNative((*iter).first, (*iter).second); | 185 new_cmd_line.AppendSwitchNative((*iter).first, (*iter).second); |
| 418 } | 186 } |
| 419 | 187 |
| 420 base::ScopedTempDir temp_dir; | 188 base::ScopedTempDir temp_dir; |
| 421 // Create a new data dir and pass it to the child. | 189 // Create a new data dir and pass it to the child. |
| 422 if (!temp_dir.CreateUniqueTempDir() || !temp_dir.IsValid()) { | 190 if (!temp_dir.CreateUniqueTempDir() || !temp_dir.IsValid()) { |
| 423 LOG(ERROR) << "Error creating temp data directory"; | 191 LOG(ERROR) << "Error creating temp data directory"; |
| 424 return -1; | 192 return -1; |
| 425 } | 193 } |
| 426 | 194 |
| 427 if (!launcher_delegate->AdjustChildProcessCommandLine(&new_cmd_line, | 195 if (!launcher_delegate->AdjustChildProcessCommandLine(&new_cmd_line, |
| 428 temp_dir.path())) { | 196 temp_dir.path())) { |
| 429 return -1; | 197 return -1; |
| 430 } | 198 } |
| 431 | 199 |
| 432 return RunTestInternal( | 200 return DoRunTestInternal( |
| 433 test_case, test_name, &new_cmd_line, default_timeout, was_timeout); | 201 test_case, test_name, &new_cmd_line, default_timeout, was_timeout); |
| 434 } | 202 } |
| 435 | 203 |
| 436 bool RunTests(TestLauncherDelegate* launcher_delegate, | |
| 437 bool should_shard, | |
| 438 int total_shards, | |
| 439 int shard_index) { | |
| 440 const CommandLine* command_line = CommandLine::ForCurrentProcess(); | |
| 441 | |
| 442 DCHECK(!command_line->HasSwitch(kGTestListTestsFlag)); | |
| 443 | |
| 444 testing::UnitTest* const unit_test = testing::UnitTest::GetInstance(); | |
| 445 | |
| 446 std::string filter = command_line->GetSwitchValueASCII(kGTestFilterFlag); | |
| 447 | |
| 448 // Split --gtest_filter at '-', if there is one, to separate into | |
| 449 // positive filter and negative filter portions. | |
| 450 std::string positive_filter = filter; | |
| 451 std::string negative_filter; | |
| 452 size_t dash_pos = filter.find('-'); | |
| 453 if (dash_pos != std::string::npos) { | |
| 454 positive_filter = filter.substr(0, dash_pos); // Everything up to the dash. | |
| 455 negative_filter = filter.substr(dash_pos + 1); // Everything after the dash. | |
| 456 } | |
| 457 | |
| 458 int num_runnable_tests = 0; | |
| 459 int test_run_count = 0; | |
| 460 int timeout_count = 0; | |
| 461 std::vector<std::string> failed_tests; | |
| 462 std::set<std::string> ignored_tests; | |
| 463 | |
| 464 ResultsPrinter printer(*command_line); | |
| 465 for (int i = 0; i < unit_test->total_test_case_count(); ++i) { | |
| 466 const testing::TestCase* test_case = unit_test->GetTestCase(i); | |
| 467 TestCasePrinterHelper helper(printer, test_case->name(), | |
| 468 test_case->total_test_count()); | |
| 469 for (int j = 0; j < test_case->total_test_count(); ++j) { | |
| 470 const testing::TestInfo* test_info = test_case->GetTestInfo(j); | |
| 471 std::string test_name = test_info->test_case_name(); | |
| 472 test_name.append("."); | |
| 473 test_name.append(test_info->name()); | |
| 474 | |
| 475 // Skip our special test so it's not run twice. That confuses the log | |
| 476 // parser. | |
| 477 if (test_name == launcher_delegate->GetEmptyTestName()) | |
| 478 continue; | |
| 479 | |
| 480 // Skip disabled tests. | |
| 481 if (test_name.find("DISABLED") != std::string::npos && | |
| 482 !command_line->HasSwitch(kGTestRunDisabledTestsFlag)) { | |
| 483 printer.OnTestEnd(test_info->name(), test_case->name(), | |
| 484 false, false, false, 0); | |
| 485 continue; | |
| 486 } | |
| 487 | |
| 488 if (StartsWithASCII(test_info->name(), kPreTestPrefix, true)) | |
| 489 continue; | |
| 490 | |
| 491 if (StartsWithASCII(test_info->name(), kManualTestPrefix, true) && | |
| 492 !command_line->HasSwitch(kRunManualTestsFlag)) { | |
| 493 continue; | |
| 494 } | |
| 495 | |
| 496 // Skip the test that doesn't match the filter string (if given). | |
| 497 if ((!positive_filter.empty() && | |
| 498 !MatchesFilter(test_name, positive_filter)) || | |
| 499 MatchesFilter(test_name, negative_filter)) { | |
| 500 printer.OnTestEnd(test_info->name(), test_case->name(), | |
| 501 false, false, false, 0); | |
| 502 continue; | |
| 503 } | |
| 504 | |
| 505 // Decide if this test should be run. | |
| 506 bool should_run = true; | |
| 507 if (should_shard) { | |
| 508 should_run = ShouldRunTestOnShard(total_shards, shard_index, | |
| 509 num_runnable_tests); | |
| 510 } | |
| 511 num_runnable_tests += 1; | |
| 512 // If sharding is enabled and the test should not be run, skip it. | |
| 513 if (!should_run) { | |
| 514 continue; | |
| 515 } | |
| 516 | |
| 517 base::TimeTicks start_time = base::TimeTicks::Now(); | |
| 518 ++test_run_count; | |
| 519 bool was_timeout = false; | |
| 520 int exit_code = RunTest(launcher_delegate, | |
| 521 test_case, | |
| 522 test_name, | |
| 523 TestTimeouts::action_max_timeout(), | |
| 524 &was_timeout); | |
| 525 if (exit_code == 0) { | |
| 526 // Test passed. | |
| 527 printer.OnTestEnd( | |
| 528 test_info->name(), test_case->name(), true, false, | |
| 529 false, | |
| 530 (base::TimeTicks::Now() - start_time).InMillisecondsF()); | |
| 531 } else { | |
| 532 failed_tests.push_back(test_name); | |
| 533 | |
| 534 bool ignore_failure = false; | |
| 535 printer.OnTestEnd( | |
| 536 test_info->name(), test_case->name(), true, true, | |
| 537 ignore_failure, | |
| 538 (base::TimeTicks::Now() - start_time).InMillisecondsF()); | |
| 539 if (ignore_failure) | |
| 540 ignored_tests.insert(test_name); | |
| 541 | |
| 542 if (was_timeout) | |
| 543 ++timeout_count; | |
| 544 } | |
| 545 | |
| 546 if (timeout_count > kMaxTimeouts) { | |
| 547 printf("More than %d timeouts, aborting test case\n", kMaxTimeouts); | |
| 548 break; | |
| 549 } | |
| 550 } | |
| 551 if (timeout_count > kMaxTimeouts) { | |
| 552 printf("More than %d timeouts, aborting test\n", kMaxTimeouts); | |
| 553 break; | |
| 554 } | |
| 555 } | |
| 556 | |
| 557 printf("%d test%s run\n", test_run_count, test_run_count > 1 ? "s" : ""); | |
| 558 printf("%d test%s failed (%d ignored)\n", | |
| 559 static_cast<int>(failed_tests.size()), | |
| 560 failed_tests.size() != 1 ? "s" : "", | |
| 561 static_cast<int>(ignored_tests.size())); | |
| 562 if (failed_tests.size() == ignored_tests.size()) | |
| 563 return true; | |
| 564 | |
| 565 printf("Failing tests:\n"); | |
| 566 for (std::vector<std::string>::const_iterator iter = failed_tests.begin(); | |
| 567 iter != failed_tests.end(); ++iter) { | |
| 568 bool is_ignored = ignored_tests.find(*iter) != ignored_tests.end(); | |
| 569 printf("%s%s\n", iter->c_str(), is_ignored ? " (ignored)" : ""); | |
| 570 } | |
| 571 | |
| 572 return false; | |
| 573 } | |
| 574 | |
| 575 void PrintUsage() { | 204 void PrintUsage() { |
| 576 fprintf(stdout, | 205 fprintf(stdout, |
| 577 "Runs tests using the gtest framework, each test being run in its own\n" | 206 "Runs tests using the gtest framework, each test being run in its own\n" |
| 578 "process. Any gtest flags can be specified.\n" | 207 "process. Any gtest flags can be specified.\n" |
| 579 " --single_process\n" | 208 " --single_process\n" |
| 580 " Runs the tests and the launcher in the same process. Useful for \n" | 209 " Runs the tests and the launcher in the same process. Useful for \n" |
| 581 " debugging a specific test in a debugger.\n" | 210 " debugging a specific test in a debugger.\n" |
| 582 " --single-process\n" | 211 " --single-process\n" |
| 583 " Same as above, and also runs Chrome in single-process mode.\n" | 212 " Same as above, and also runs Chrome in single-process mode.\n" |
| 584 " --help\n" | 213 " --help\n" |
| 585 " Shows this message.\n" | 214 " Shows this message.\n" |
| 586 " --gtest_help\n" | 215 " --gtest_help\n" |
| 587 " Shows the gtest help message.\n"); | 216 " Shows the gtest help message.\n"); |
| 588 } | 217 } |
| 589 | 218 |
| 219 // Implementation of base::TestLauncherDelegate. This is also a test launcher, |
| 220 // wrapping a lower-level test launcher with content-specific code. |
| 221 class WrapperTestLauncherDelegate : public base::TestLauncherDelegate { |
| 222 public: |
| 223 explicit WrapperTestLauncherDelegate( |
| 224 content::TestLauncherDelegate* launcher_delegate) |
| 225 : launcher_delegate_(launcher_delegate), |
| 226 timeout_count_(0), |
| 227 printed_timeout_message_(false) { |
| 228 } |
| 229 |
| 230 // base::TestLauncherDelegate: |
| 231 virtual bool ShouldRunTest(const testing::TestCase* test_case, |
| 232 const testing::TestInfo* test_info) OVERRIDE; |
| 233 virtual bool RunTest(const testing::TestCase* test_case, |
| 234 const testing::TestInfo* test_info) OVERRIDE; |
| 235 |
| 236 private: |
| 237 content::TestLauncherDelegate* launcher_delegate_; |
| 238 |
| 239 // Number of times a test timeout occurred. |
| 240 size_t timeout_count_; |
| 241 |
| 242 // True after a message about too many timeouts has been printed, |
| 243 // to avoid doing it more than once. |
| 244 bool printed_timeout_message_; |
| 245 |
| 246 DISALLOW_COPY_AND_ASSIGN(WrapperTestLauncherDelegate); |
| 247 }; |
| 248 |
| 249 bool WrapperTestLauncherDelegate::ShouldRunTest( |
| 250 const testing::TestCase* test_case, |
| 251 const testing::TestInfo* test_info) { |
| 252 std::string test_name = |
| 253 std::string(test_case->name()) + "." + test_info->name(); |
| 254 |
| 255 if (StartsWithASCII(test_info->name(), kPreTestPrefix, true)) |
| 256 return false; |
| 257 |
| 258 if (StartsWithASCII(test_info->name(), kManualTestPrefix, true) && |
| 259 !CommandLine::ForCurrentProcess()->HasSwitch(kRunManualTestsFlag)) { |
| 260 return false; |
| 261 } |
| 262 |
| 263 // Stop test execution after too many timeouts. |
| 264 if (timeout_count_ > 5) { |
| 265 if (!printed_timeout_message_) { |
| 266 printed_timeout_message_ = true; |
| 267 printf("Too many timeouts, aborting test\n"); |
| 268 } |
| 269 return false; |
| 270 } |
| 271 |
| 272 return true; |
| 273 } |
| 274 |
| 275 bool WrapperTestLauncherDelegate::RunTest(const testing::TestCase* test_case, |
| 276 const testing::TestInfo* test_info) { |
| 277 bool was_timeout = false; |
| 278 std::string test_name = |
| 279 std::string(test_case->name()) + "." + test_info->name(); |
| 280 int exit_code = DoRunTest(launcher_delegate_, |
| 281 test_case, |
| 282 test_name, |
| 283 TestTimeouts::action_max_timeout(), |
| 284 &was_timeout); |
| 285 if (was_timeout) |
| 286 timeout_count_++; |
| 287 return exit_code == 0; |
| 288 } |
| 289 |
| 590 } // namespace | 290 } // namespace |
| 591 | 291 |
| 592 // The following is kept for historical reasons (so people that are used to | 292 // The following is kept for historical reasons (so people that are used to |
| 593 // using it don't get surprised). | 293 // using it don't get surprised). |
| 594 const char kChildProcessFlag[] = "child"; | 294 const char kChildProcessFlag[] = "child"; |
| 595 | 295 |
| 596 const char kGTestFilterFlag[] = "gtest_filter"; | |
| 597 const char kGTestHelpFlag[] = "gtest_help"; | 296 const char kGTestHelpFlag[] = "gtest_help"; |
| 598 const char kGTestListTestsFlag[] = "gtest_list_tests"; | |
| 599 const char kGTestRepeatFlag[] = "gtest_repeat"; | |
| 600 const char kGTestRunDisabledTestsFlag[] = "gtest_also_run_disabled_tests"; | |
| 601 const char kGTestOutputFlag[] = "gtest_output"; | |
| 602 | 297 |
| 603 const char kHelpFlag[] = "help"; | 298 const char kHelpFlag[] = "help"; |
| 604 | 299 |
| 605 const char kLaunchAsBrowser[] = "as-browser"; | 300 const char kLaunchAsBrowser[] = "as-browser"; |
| 606 | 301 |
| 607 // See kManualTestPrefix above. | 302 // See kManualTestPrefix above. |
| 608 const char kRunManualTestsFlag[] = "run-manual"; | 303 const char kRunManualTestsFlag[] = "run-manual"; |
| 609 | 304 |
| 610 const char kSingleProcessTestsFlag[] = "single_process"; | 305 const char kSingleProcessTestsFlag[] = "single_process"; |
| 611 | 306 |
| 612 const char kWarmupFlag[] = "warmup"; | |
| 613 | |
| 614 | 307 |
| 615 TestLauncherDelegate::~TestLauncherDelegate() { | 308 TestLauncherDelegate::~TestLauncherDelegate() { |
| 616 } | 309 } |
| 617 | 310 |
| 618 bool ShouldRunContentMain() { | 311 bool ShouldRunContentMain() { |
| 619 #if defined(OS_WIN) || defined(OS_LINUX) | 312 #if defined(OS_WIN) || defined(OS_LINUX) |
| 620 CommandLine* command_line = CommandLine::ForCurrentProcess(); | 313 CommandLine* command_line = CommandLine::ForCurrentProcess(); |
| 621 return command_line->HasSwitch(switches::kProcessType) || | 314 return command_line->HasSwitch(switches::kProcessType) || |
| 622 command_line->HasSwitch(kLaunchAsBrowser); | 315 command_line->HasSwitch(kLaunchAsBrowser); |
| 623 #else | 316 #else |
| (...skipping 30 matching lines...) Expand all Loading... |
| 654 CommandLine::Init(argc, argv); | 347 CommandLine::Init(argc, argv); |
| 655 const CommandLine* command_line = CommandLine::ForCurrentProcess(); | 348 const CommandLine* command_line = CommandLine::ForCurrentProcess(); |
| 656 | 349 |
| 657 if (command_line->HasSwitch(kHelpFlag)) { | 350 if (command_line->HasSwitch(kHelpFlag)) { |
| 658 PrintUsage(); | 351 PrintUsage(); |
| 659 return 0; | 352 return 0; |
| 660 } | 353 } |
| 661 | 354 |
| 662 if (command_line->HasSwitch(kSingleProcessTestsFlag) || | 355 if (command_line->HasSwitch(kSingleProcessTestsFlag) || |
| 663 (command_line->HasSwitch(switches::kSingleProcess) && | 356 (command_line->HasSwitch(switches::kSingleProcess) && |
| 664 command_line->HasSwitch(kGTestFilterFlag)) || | 357 command_line->HasSwitch(base::kGTestFilterFlag)) || |
| 665 command_line->HasSwitch(kGTestListTestsFlag) || | 358 command_line->HasSwitch(base::kGTestListTestsFlag) || |
| 666 command_line->HasSwitch(kGTestHelpFlag)) { | 359 command_line->HasSwitch(kGTestHelpFlag)) { |
| 667 #if defined(OS_WIN) | 360 #if defined(OS_WIN) |
| 668 if (command_line->HasSwitch(kSingleProcessTestsFlag)) { | 361 if (command_line->HasSwitch(kSingleProcessTestsFlag)) { |
| 669 sandbox::SandboxInterfaceInfo sandbox_info; | 362 sandbox::SandboxInterfaceInfo sandbox_info; |
| 670 InitializeSandboxInfo(&sandbox_info); | 363 InitializeSandboxInfo(&sandbox_info); |
| 671 InitializeSandbox(&sandbox_info); | 364 InitializeSandbox(&sandbox_info); |
| 672 } | 365 } |
| 673 #endif | 366 #endif |
| 674 return launcher_delegate->RunTestSuite(argc, argv); | 367 return launcher_delegate->RunTestSuite(argc, argv); |
| 675 } | 368 } |
| 676 | 369 |
| 677 if (ShouldRunContentMain()) | 370 if (ShouldRunContentMain()) |
| 678 return RunContentMain(argc, argv, launcher_delegate); | 371 return RunContentMain(argc, argv, launcher_delegate); |
| 679 | 372 |
| 680 base::AtExitManager at_exit; | |
| 681 | |
| 682 int32 total_shards; | |
| 683 int32 shard_index; | |
| 684 bool should_shard = ShouldShard(&total_shards, &shard_index); | |
| 685 | |
| 686 fprintf(stdout, | 373 fprintf(stdout, |
| 687 "Starting tests...\n" | 374 "Starting tests...\n" |
| 688 "IMPORTANT DEBUGGING NOTE: each test is run inside its own process.\n" | 375 "IMPORTANT DEBUGGING NOTE: each test is run inside its own process.\n" |
| 689 "For debugging a test inside a debugger, use the\n" | 376 "For debugging a test inside a debugger, use the\n" |
| 690 "--gtest_filter=<your_test_name> flag along with either\n" | 377 "--gtest_filter=<your_test_name> flag along with either\n" |
| 691 "--single_process (to run the test in one launcher/browser process) or\n" | 378 "--single_process (to run the test in one launcher/browser process) or\n" |
| 692 "--single-process (to do the above, and also run Chrome in single-" | 379 "--single-process (to do the above, and also run Chrome in single-" |
| 693 "process mode).\n"); | 380 "process mode).\n"); |
| 694 | 381 |
| 382 base::AtExitManager at_exit; |
| 695 testing::InitGoogleTest(&argc, argv); | 383 testing::InitGoogleTest(&argc, argv); |
| 696 TestTimeouts::Initialize(); | 384 TestTimeouts::Initialize(); |
| 697 int exit_code = 0; | |
| 698 | 385 |
| 699 std::string empty_test = launcher_delegate->GetEmptyTestName(); | 386 WrapperTestLauncherDelegate delegate(launcher_delegate); |
| 700 if (!empty_test.empty()) { | 387 return base::LaunchTests(&delegate, argc, argv); |
| 701 // Make sure the entire browser code is loaded into memory. Reading it | |
| 702 // from disk may be slow on a busy bot, and can easily exceed the default | |
| 703 // timeout causing flaky test failures. Use an empty test that only starts | |
| 704 // and closes a browser with a long timeout to avoid those problems. | |
| 705 // NOTE: We don't do this when specifying a filter because this slows down | |
| 706 // the common case of running one test locally, and also on trybots when | |
| 707 // sharding as this one test runs ~200 times and wastes a few minutes. | |
| 708 bool warmup = command_line->HasSwitch(kWarmupFlag); | |
| 709 bool has_filter = command_line->HasSwitch(kGTestFilterFlag); | |
| 710 if (warmup || (!should_shard && !has_filter)) { | |
| 711 exit_code = RunTest(launcher_delegate, | |
| 712 NULL, | |
| 713 empty_test, | |
| 714 TestTimeouts::large_test_timeout(), | |
| 715 NULL); | |
| 716 if (exit_code != 0 || warmup) | |
| 717 return exit_code; | |
| 718 } | |
| 719 } | |
| 720 | |
| 721 int cycles = 1; | |
| 722 if (command_line->HasSwitch(kGTestRepeatFlag)) { | |
| 723 base::StringToInt(command_line->GetSwitchValueASCII(kGTestRepeatFlag), | |
| 724 &cycles); | |
| 725 } | |
| 726 | |
| 727 while (cycles != 0) { | |
| 728 if (!RunTests(launcher_delegate, | |
| 729 should_shard, | |
| 730 total_shards, | |
| 731 shard_index)) { | |
| 732 exit_code = 1; | |
| 733 break; | |
| 734 } | |
| 735 | |
| 736 // Special value "-1" means "repeat indefinitely". | |
| 737 if (cycles != -1) | |
| 738 cycles--; | |
| 739 } | |
| 740 return exit_code; | |
| 741 } | 388 } |
| 742 | 389 |
| 743 TestLauncherDelegate* GetCurrentTestLauncherDelegate() { | 390 TestLauncherDelegate* GetCurrentTestLauncherDelegate() { |
| 744 return g_launcher_delegate; | 391 return g_launcher_delegate; |
| 745 } | 392 } |
| 746 | 393 |
| 747 } // namespace content | 394 } // namespace content |
| OLD | NEW |