Index: base/metrics/field_trial_unittest.cc |
diff --git a/base/metrics/field_trial_unittest.cc b/base/metrics/field_trial_unittest.cc |
index 33570850992ba1d5d916ff8ddfe1591ead3bf74a..c8870a5b8a825189e0f8efa892abe585b0fe1d00 100644 |
--- a/base/metrics/field_trial_unittest.cc |
+++ b/base/metrics/field_trial_unittest.cc |
@@ -6,14 +6,18 @@ |
#include "base/metrics/field_trial.h" |
+#include "base/rand_util.h" |
#include "base/stringprintf.h" |
+#include "base/string_number_conversions.h" |
#include "testing/gtest/include/gtest/gtest.h" |
+#include <limits> |
+ |
namespace base { |
class FieldTrialTest : public testing::Test { |
public: |
- FieldTrialTest() : trial_list_() { |
+ FieldTrialTest() : trial_list_("client_id") { |
Time now = Time::NowFromSystemTime(); |
TimeDelta oneYear = TimeDelta::FromDays(365); |
Time::Exploded exploded; |
@@ -27,6 +31,13 @@ class FieldTrialTest : public testing::Test { |
last_year_ = exploded.year; |
} |
+ void SetClientId(const std::string& client_id) { |
+ trial_list_.client_id_ = client_id; |
+ } |
+ |
+ void DoSaveTest(); |
+ void DoRestoreTest(const std::string& restored_client_id); |
+ |
protected: |
int next_year_; |
int last_year_; |
@@ -217,7 +228,8 @@ TEST_F(FieldTrialTest, DisableProbability) { |
EXPECT_EQ(default_group_name, trial->group_name()); |
} |
-TEST_F(FieldTrialTest, Save) { |
+void FieldTrialTest::DoSaveTest() { |
+ const std::string& client_id = FieldTrialList::client_id(); |
std::string save_string; |
FieldTrial* trial = |
@@ -226,13 +238,13 @@ TEST_F(FieldTrialTest, Save) { |
// There is no winner yet, so no textual group name is associated with trial. |
EXPECT_EQ("", trial->group_name_internal()); |
FieldTrialList::StatesToString(&save_string); |
- EXPECT_EQ("Some name/Default some name/", save_string); |
+ EXPECT_EQ(client_id + "/Some name/Default some name/", save_string); |
jar (doing other things)
2011/05/03 00:17:47
Warning: This test may need to change if we stop d
Jói
2011/05/03 17:41:47
Done.
|
save_string.clear(); |
// Create a winning group. |
trial->AppendGroup("Winner", 10); |
FieldTrialList::StatesToString(&save_string); |
- EXPECT_EQ("Some name/Winner/", save_string); |
+ EXPECT_EQ(client_id + "/Some name/Winner/", save_string); |
save_string.clear(); |
// Create a second trial and winning group. |
@@ -242,14 +254,25 @@ TEST_F(FieldTrialTest, Save) { |
FieldTrialList::StatesToString(&save_string); |
// We assume names are alphabetized... though this is not critical. |
- EXPECT_EQ("Some name/Winner/xxx/yyyy/", save_string); |
+ EXPECT_EQ(client_id + "/Some name/Winner/xxx/yyyy/", save_string); |
} |
-TEST_F(FieldTrialTest, Restore) { |
+TEST_F(FieldTrialTest, Save) { |
+ DoSaveTest(); |
+} |
+ |
+TEST_F(FieldTrialTest, SaveWithEmptyClientId) { |
+ SetClientId(""); |
+ DoSaveTest(); |
+} |
+ |
+void FieldTrialTest::DoRestoreTest(const std::string& restored_client_id) { |
EXPECT_TRUE(FieldTrialList::Find("Some_name") == NULL); |
EXPECT_TRUE(FieldTrialList::Find("xxx") == NULL); |
- FieldTrialList::CreateTrialsInChildProcess("Some_name/Winner/xxx/yyyy/"); |
+ FieldTrialList::CreateTrialsInChildProcess( |
+ restored_client_id + "/Some_name/Winner/xxx/yyyy/"); |
+ EXPECT_EQ(restored_client_id, FieldTrialList::client_id()); |
FieldTrial* trial = FieldTrialList::Find("Some_name"); |
ASSERT_NE(static_cast<FieldTrial*>(NULL), trial); |
@@ -262,13 +285,23 @@ TEST_F(FieldTrialTest, Restore) { |
EXPECT_EQ("xxx", trial->name()); |
} |
+TEST_F(FieldTrialTest, Restore) { |
+ DoRestoreTest("restored_client_id"); |
+} |
+ |
+TEST_F(FieldTrialTest, RestoreWithEmptyClientId) { |
+ DoRestoreTest(""); |
+} |
+ |
TEST_F(FieldTrialTest, BogusRestore) { |
- EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess("MissingSlash")); |
- EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess("MissingGroupName/")); |
- EXPECT_FALSE( |
- FieldTrialList::CreateTrialsInChildProcess("MissingFinalSlash/gname")); |
- EXPECT_FALSE( |
- FieldTrialList::CreateTrialsInChildProcess("/noname, only group/")); |
+ EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess( |
+ "restored_client_id/MissingSlash")); |
+ EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess( |
+ "restored_client_id/MissingGroupName/")); |
+ EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess( |
+ "restored_client_id/MissingFinalSlash/gname")); |
+ EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess( |
+ "restored_client_id/noname, only group/")); |
} |
TEST_F(FieldTrialTest, DuplicateRestore) { |
@@ -278,13 +311,14 @@ TEST_F(FieldTrialTest, DuplicateRestore) { |
trial->AppendGroup("Winner", 10); |
std::string save_string; |
FieldTrialList::StatesToString(&save_string); |
- EXPECT_EQ("Some name/Winner/", save_string); |
+ EXPECT_EQ("client_id/Some name/Winner/", save_string); |
// It is OK if we redundantly specify a winner. |
EXPECT_TRUE(FieldTrialList::CreateTrialsInChildProcess(save_string)); |
// But it is an error to try to change to a different winner. |
- EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess("Some name/Loser/")); |
+ EXPECT_FALSE(FieldTrialList::CreateTrialsInChildProcess( |
+ "client_id/Some name/Loser/")); |
} |
TEST_F(FieldTrialTest, CreateFieldTrial) { |
@@ -321,4 +355,109 @@ TEST_F(FieldTrialTest, MakeName) { |
FieldTrial::MakeName("Histogram", "Field Trial")); |
} |
+TEST_F(FieldTrialTest, HashClientId) { |
+ double results[] = { |
+ FieldTrial::HashClientId("hi", "1"), |
+ FieldTrial::HashClientId("there", "1"), |
+ }; |
+ ASSERT_NE(results[0], results[1]); |
+ for (size_t i = 0; i < arraysize(results); ++i) { |
+ ASSERT_LE(0.0, results[i]); |
+ ASSERT_GT(1.0, results[i]); |
+ } |
+ |
+ ASSERT_EQ(FieldTrial::HashClientId("yo", "1"), |
+ FieldTrial::HashClientId("yo", "1")); |
+ ASSERT_NE(FieldTrial::HashClientId("yo", "something"), |
+ FieldTrial::HashClientId("yo", "else")); |
+} |
+ |
+TEST_F(FieldTrialTest, HashClientIdIsUniform) { |
+ // Choose a random start number but go sequentially from there, so |
+ // that each test tries a different range but we never provide uniformly |
+ // distributed input data. |
+ int current_number = RandInt(0, std::numeric_limits<int>::max()); |
+ |
+ // The expected value of a random distribution is the average over all |
+ // samples as the number of samples approaches infinity. For a uniform |
+ // distribution from [0.0, 1.0) this would be 0.5. |
+ // |
+ // We do kSamplesBetweenChecks at a time and check if the value has converged |
+ // to a narrow interval around 0.5. A non-uniform distribution would likely |
+ // converge at something different, or not converge consistently within this |
+ // range (i.e. the test would start timing out occasionally). |
+ int kSamplesBetweenChecks = 300; |
+ int num_samples = 0; |
+ double total_value = 0.0; |
+ while (true) { |
+ for (int i = 0; i < kSamplesBetweenChecks; ++i) { |
+ total_value += FieldTrial::HashClientId( |
+ IntToString(current_number++), "salt"); |
+ num_samples++; |
+ } |
+ |
+ double average = total_value / num_samples; |
+ double kExpectedMin = 0.48; |
+ double kExpectedMax = 0.52; |
+ |
+ if (num_samples > 1000 && |
+ (average < kExpectedMin || average > kExpectedMax)) { |
+ // Only printed once we have enough samples that it's very unlikely |
+ // things haven't converged. |
+ printf("After %d samples, the average was %f, outside the expected\n" |
+ "range (%f, %f). We will add more samples and check after every\n" |
+ "%d samples. If the average does not converge, something\n" |
+ "is broken. If it does converge, the test will pass.\n", |
+ num_samples, average, |
+ kExpectedMin, kExpectedMax, kSamplesBetweenChecks); |
+ } else { |
+ // Success. |
+ break; |
+ } |
+ } |
+} |
+ |
+TEST_F(FieldTrialTest, UseOneTimeRandomization) { |
+ // Simply asserts that two trials using one-time randomization |
+ // that have different names, normally generate different results. |
+ // |
+ // Note that depending on the one-time random initialization, they |
+ // _might_ actually give the same result, but we know that given |
+ // the particular client_id we use for unit tests they won't. |
+ scoped_refptr<FieldTrial> trials[] = { |
+ new FieldTrial("one", 100, "default", next_year_, 1, 1), |
+ new FieldTrial("two", 100, "default", next_year_, 1, 1), |
+ }; |
+ |
+ for (size_t i = 0; i < arraysize(trials); ++i) { |
+ trials[i]->UseOneTimeRandomization(); |
+ |
+ for (int j = 0; j < 100; ++j) { |
+ trials[i]->AppendGroup("", 1); |
+ } |
+ } |
+ |
+ // The trials are most likely to give different results since they have |
+ // different names. |
+ ASSERT_NE(trials[0]->group(), trials[1]->group()); |
+ ASSERT_NE(trials[0]->group_name(), trials[1]->group_name()); |
+} |
+ |
+TEST_F(FieldTrialTest, DisableImmediately) { |
+ FieldTrial* trial = |
+ new FieldTrial("trial", 100, "default", next_year_, 12, 31); |
+ trial->Disable(); |
+ ASSERT_EQ("default", trial->group_name()); |
+ ASSERT_EQ(FieldTrial::kDefaultGroupNumber, trial->group()); |
+} |
+ |
+TEST_F(FieldTrialTest, DisableAfterInitialization) { |
+ FieldTrial* trial = |
+ new FieldTrial("trial", 100, "default", next_year_, 12, 31); |
+ trial->AppendGroup("non_default", 100); |
+ ASSERT_EQ("non_default", trial->group_name()); |
+ trial->Disable(); |
+ ASSERT_EQ("default", trial->group_name()); |
+} |
+ |
} // namespace base |