| Index: metrics_library_test.cc
|
| diff --git a/metrics_library_test.cc b/metrics_library_test.cc
|
| index 596abd3e651116e0c0169c1ad418d3e2f011ea33..cbf4cce24280d8e7a2560d4992deaf13bacdb441 100644
|
| --- a/metrics_library_test.cc
|
| +++ b/metrics_library_test.cc
|
| @@ -12,6 +12,15 @@
|
|
|
| static const FilePath kTestUMAEventsFile("test-uma-events");
|
|
|
| +static const char kTestConsent[] = "test-consent";
|
| +
|
| +static void SetMetricsEnabled(bool enabled) {
|
| + if (enabled)
|
| + ASSERT_EQ(1, file_util::WriteFile(FilePath(kTestConsent) , "0", 1));
|
| + else
|
| + file_util::Delete(FilePath(kTestConsent), false);
|
| +}
|
| +
|
| class MetricsLibraryTest : public testing::Test {
|
| protected:
|
| virtual void SetUp() {
|
| @@ -19,15 +28,63 @@ class MetricsLibraryTest : public testing::Test {
|
| lib_.Init();
|
| EXPECT_TRUE(NULL != lib_.uma_events_file_);
|
| lib_.uma_events_file_ = kTestUMAEventsFile.value().c_str();
|
| + SetMetricsEnabled(true);
|
| + // Defeat metrics enabled caching between tests.
|
| + lib_.cached_enabled_time_ = 0;
|
| + lib_.consent_file_ = kTestConsent;
|
| }
|
|
|
| virtual void TearDown() {
|
| file_util::Delete(kTestUMAEventsFile, false);
|
| }
|
|
|
| + void VerifyEnabledCacheHit(bool to_value);
|
| + void VerifyEnabledCacheEviction(bool to_value);
|
| +
|
| MetricsLibrary lib_;
|
| };
|
|
|
| +TEST_F(MetricsLibraryTest, AreMetricsEnabledFalse) {
|
| + SetMetricsEnabled(false);
|
| + EXPECT_FALSE(lib_.AreMetricsEnabled());
|
| +}
|
| +
|
| +TEST_F(MetricsLibraryTest, AreMetricsEnabledTrue) {
|
| + EXPECT_TRUE(lib_.AreMetricsEnabled());
|
| +}
|
| +
|
| +void MetricsLibraryTest::VerifyEnabledCacheHit(bool to_value) {
|
| + // We might step from one second to the next one time, but not 100
|
| + // times in a row.
|
| + for (int i = 0; i < 100; ++i) {
|
| + lib_.cached_enabled_time_ = 0;
|
| + SetMetricsEnabled(!to_value);
|
| + ASSERT_EQ(!to_value, lib_.AreMetricsEnabled());
|
| + SetMetricsEnabled(to_value);
|
| + if (lib_.AreMetricsEnabled() == !to_value)
|
| + return;
|
| + }
|
| + ADD_FAILURE() << "Did not see evidence of caching";
|
| +}
|
| +
|
| +void MetricsLibraryTest::VerifyEnabledCacheEviction(bool to_value) {
|
| + lib_.cached_enabled_time_ = 0;
|
| + SetMetricsEnabled(!to_value);
|
| + ASSERT_EQ(!to_value, lib_.AreMetricsEnabled());
|
| + SetMetricsEnabled(to_value);
|
| + ASSERT_LT(abs(time(NULL) - lib_.cached_enabled_time_), 5);
|
| + // Sleep one second (or cheat to be faster).
|
| + --lib_.cached_enabled_time_;
|
| + ASSERT_EQ(to_value, lib_.AreMetricsEnabled());
|
| +}
|
| +
|
| +TEST_F(MetricsLibraryTest, AreMetricsEnabledCaching) {
|
| + VerifyEnabledCacheHit(false);
|
| + VerifyEnabledCacheHit(true);
|
| + VerifyEnabledCacheEviction(false);
|
| + VerifyEnabledCacheEviction(true);
|
| +}
|
| +
|
| TEST_F(MetricsLibraryTest, FormatChromeMessage) {
|
| char buf[7];
|
| const int kLen = 6;
|
| @@ -55,6 +112,12 @@ TEST_F(MetricsLibraryTest, SendEnumToUMA) {
|
| EXPECT_EQ(0, memcmp(exp, buf, kLen));
|
| }
|
|
|
| +TEST_F(MetricsLibraryTest, SendEnumToUMANotEnabled) {
|
| + SetMetricsEnabled(false);
|
| + EXPECT_TRUE(lib_.SendEnumToUMA("Test.EnumMetric", 1, 3));
|
| + EXPECT_FALSE(file_util::PathExists(kTestUMAEventsFile));
|
| +}
|
| +
|
| TEST_F(MetricsLibraryTest, SendMessageToChrome) {
|
| EXPECT_TRUE(lib_.SendMessageToChrome(4, "test"));
|
| EXPECT_TRUE(lib_.SendMessageToChrome(7, "content"));
|
| @@ -84,6 +147,12 @@ TEST_F(MetricsLibraryTest, SendToUMA) {
|
| EXPECT_EQ(0, memcmp(exp, buf, kLen));
|
| }
|
|
|
| +TEST_F(MetricsLibraryTest, SendToUMANotEnabled) {
|
| + SetMetricsEnabled(false);
|
| + EXPECT_TRUE(lib_.SendToUMA("Test.Metric", 2, 1, 100, 50));
|
| + EXPECT_FALSE(file_util::PathExists(kTestUMAEventsFile));
|
| +}
|
| +
|
| class CMetricsLibraryTest : public testing::Test {
|
| protected:
|
| virtual void SetUp() {
|
| @@ -93,6 +162,9 @@ class CMetricsLibraryTest : public testing::Test {
|
| CMetricsLibraryInit(lib_);
|
| EXPECT_TRUE(NULL != ml.uma_events_file_);
|
| ml.uma_events_file_ = kTestUMAEventsFile.value().c_str();
|
| + SetMetricsEnabled(true);
|
| + reinterpret_cast<MetricsLibrary*>(lib_)->cached_enabled_time_ = 0;
|
| + reinterpret_cast<MetricsLibrary*>(lib_)->consent_file_ = kTestConsent;
|
| }
|
|
|
| virtual void TearDown() {
|
| @@ -103,6 +175,15 @@ class CMetricsLibraryTest : public testing::Test {
|
| CMetricsLibrary lib_;
|
| };
|
|
|
| +TEST_F(CMetricsLibraryTest, AreMetricsEnabledFalse) {
|
| + SetMetricsEnabled(false);
|
| + EXPECT_FALSE(CMetricsLibraryAreMetricsEnabled(lib_));
|
| +}
|
| +
|
| +TEST_F(CMetricsLibraryTest, AreMetricsEnabledTrue) {
|
| + EXPECT_TRUE(CMetricsLibraryAreMetricsEnabled(lib_));
|
| +}
|
| +
|
| TEST_F(CMetricsLibraryTest, SendEnumToUMA) {
|
| char buf[100];
|
| const int kLen = 40;
|
|
|