Index: base/metrics/histogram_samples.cc |
diff --git a/base/metrics/histogram_samples.cc b/base/metrics/histogram_samples.cc |
index f5e03b979e110cbf297e27ab278225965141a1eb..b6303a53b469cbdbb9602fa3009cefc230e121c5 100644 |
--- a/base/metrics/histogram_samples.cc |
+++ b/base/metrics/histogram_samples.cc |
@@ -59,30 +59,50 @@ void SampleCountPickleIterator::Get(HistogramBase::Sample* min, |
} // namespace |
-HistogramSamples::HistogramSamples() : sum_(0), redundant_count_(0) {} |
+HistogramSamples::HistogramSamples(uint64_t id) |
+ : HistogramSamples(id, &local_meta_) {} |
+ |
+HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta) |
+ : meta_(meta) { |
+ DCHECK(meta_->id == 0 || meta_->id == id); |
+ meta_->id = id; |
+} |
HistogramSamples::~HistogramSamples() {} |
+// Despite using atomic operations, the increment/add actions below are *not* |
+// atomic! Race conditions may cause loss of samples or even completely corrupt |
+// the 64-bit sum on 32-bit machines. This is done intentionally to reduce the |
+// cost of these operations that could be executed in performance-significant |
+// points of the code. |
+// |
+// TODO(bcwhite): Gather quantitative information as to the cost of using |
+// proper atomic increments and improve either globally or for those histograms |
+// that really need it. |
+ |
void HistogramSamples::Add(const HistogramSamples& other) { |
- sum_ += other.sum(); |
+ meta_->sum += other.sum(); |
+ |
HistogramBase::Count old_redundant_count = |
- subtle::NoBarrier_Load(&redundant_count_); |
- subtle::NoBarrier_Store(&redundant_count_, |
+ subtle::NoBarrier_Load(&meta_->redundant_count); |
+ subtle::NoBarrier_Store(&meta_->redundant_count, |
old_redundant_count + other.redundant_count()); |
bool success = AddSubtractImpl(other.Iterator().get(), ADD); |
DCHECK(success); |
} |
bool HistogramSamples::AddFromPickle(PickleIterator* iter) { |
- int64 sum; |
+ int64_t sum; |
HistogramBase::Count redundant_count; |
if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count)) |
return false; |
- sum_ += sum; |
+ |
+ meta_->sum += sum; |
+ |
HistogramBase::Count old_redundant_count = |
- subtle::NoBarrier_Load(&redundant_count_); |
- subtle::NoBarrier_Store(&redundant_count_, |
+ subtle::NoBarrier_Load(&meta_->redundant_count); |
+ subtle::NoBarrier_Store(&meta_->redundant_count, |
old_redundant_count + redundant_count); |
SampleCountPickleIterator pickle_iter(iter); |
@@ -90,18 +110,20 @@ bool HistogramSamples::AddFromPickle(PickleIterator* iter) { |
} |
void HistogramSamples::Subtract(const HistogramSamples& other) { |
- sum_ -= other.sum(); |
+ meta_->sum -= other.sum(); |
+ |
HistogramBase::Count old_redundant_count = |
- subtle::NoBarrier_Load(&redundant_count_); |
- subtle::NoBarrier_Store(&redundant_count_, |
+ subtle::NoBarrier_Load(&meta_->redundant_count); |
+ subtle::NoBarrier_Store(&meta_->redundant_count, |
old_redundant_count - other.redundant_count()); |
bool success = AddSubtractImpl(other.Iterator().get(), SUBTRACT); |
DCHECK(success); |
} |
bool HistogramSamples::Serialize(Pickle* pickle) const { |
- if (!pickle->WriteInt64(sum_) || |
- !pickle->WriteInt(subtle::NoBarrier_Load(&redundant_count_))) |
+ if (!pickle->WriteInt64(meta_->sum)) |
+ return false; |
+ if (!pickle->WriteInt(subtle::NoBarrier_Load(&meta_->redundant_count))) |
return false; |
HistogramBase::Sample min; |
@@ -119,13 +141,13 @@ bool HistogramSamples::Serialize(Pickle* pickle) const { |
return true; |
} |
-void HistogramSamples::IncreaseSum(int64 diff) { |
- sum_ += diff; |
+void HistogramSamples::IncreaseSum(int64_t diff) { |
+ meta_->sum += diff; |
} |
void HistogramSamples::IncreaseRedundantCount(HistogramBase::Count diff) { |
- subtle::NoBarrier_Store(&redundant_count_, |
- subtle::NoBarrier_Load(&redundant_count_) + diff); |
+ subtle::NoBarrier_Store(&meta_->redundant_count, |
+ subtle::NoBarrier_Load(&meta_->redundant_count) + diff); |
} |
SampleCountIterator::~SampleCountIterator() {} |