Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3275)

Unified Diff: base/metrics/histogram_samples.cc

Issue 1471073007: Reorganize histograms for persistence. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@shmem-alloc
Patch Set: added GN changes Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: base/metrics/histogram_samples.cc
diff --git a/base/metrics/histogram_samples.cc b/base/metrics/histogram_samples.cc
index f5e03b979e110cbf297e27ab278225965141a1eb..ec065c602d4df7f780995dc01114032134820ab9 100644
--- a/base/metrics/histogram_samples.cc
+++ b/base/metrics/histogram_samples.cc
@@ -59,15 +59,45 @@ void SampleCountPickleIterator::Get(HistogramBase::Sample* min,
} // namespace
-HistogramSamples::HistogramSamples() : sum_(0), redundant_count_(0) {}
+HistogramSamples::HistogramSamples(uint64 id)
+ : meta_(&local_meta_) {
+ SetMetadataId(id);
+}
+
+HistogramSamples::HistogramSamples(uint64 id, Metadata* meta)
+ : meta_(meta) {
+ SetMetadataId(id);
+}
HistogramSamples::~HistogramSamples() {}
+void HistogramSamples::SetMetadataId(uint64 id) {
+ CHECK(meta_->id == 0 || meta_->id == id);
+ *const_cast<uint64*>(&meta_->id) = id;
+}
+
+// Despite using atomic operations, the increment/add actions below are *not*
+// atomic! Race conditions may cause loss of samples or even completely corrupt
+// the 64-bit sum on 32-bit machines. This is done intentionally to reduce the
+// cost of these operations that could be executed in performance-significant
+// points of the code.
+//
+// TODO(bcwhite): Gather quantitative information as to the cost of using
+// proper atomic increments and improve either globally or for those histograms
+// that really need it.
+
void HistogramSamples::Add(const HistogramSamples& other) {
- sum_ += other.sum();
+#ifdef ARCH_CPU_64_BITS
+ subtle::NoBarrier_Store(
+ &meta_->sum,
+ subtle::NoBarrier_Load(&meta_->sum) + other.sum());
+#else // No Atomic64 on 32-bit platforms.
+ meta_->sum += other.sum();
+#endif
+
HistogramBase::Count old_redundant_count =
- subtle::NoBarrier_Load(&redundant_count_);
- subtle::NoBarrier_Store(&redundant_count_,
+ subtle::NoBarrier_Load(&meta_->redundant_count);
+ subtle::NoBarrier_Store(&meta_->redundant_count,
old_redundant_count + other.redundant_count());
bool success = AddSubtractImpl(other.Iterator().get(), ADD);
DCHECK(success);
@@ -79,10 +109,18 @@ bool HistogramSamples::AddFromPickle(PickleIterator* iter) {
if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count))
return false;
- sum_ += sum;
+
+#ifdef ARCH_CPU_64_BITS
+ subtle::NoBarrier_Store(
+ &meta_->sum,
+ subtle::NoBarrier_Load(&meta_->sum) + sum);
+#else // No Atomic64 on 32-bit platforms.
+ meta_->sum += sum;
+#endif
+
HistogramBase::Count old_redundant_count =
- subtle::NoBarrier_Load(&redundant_count_);
- subtle::NoBarrier_Store(&redundant_count_,
+ subtle::NoBarrier_Load(&meta_->redundant_count);
+ subtle::NoBarrier_Store(&meta_->redundant_count,
old_redundant_count + redundant_count);
SampleCountPickleIterator pickle_iter(iter);
@@ -90,18 +128,31 @@ bool HistogramSamples::AddFromPickle(PickleIterator* iter) {
}
void HistogramSamples::Subtract(const HistogramSamples& other) {
- sum_ -= other.sum();
+#ifdef ARCH_CPU_64_BITS
+ subtle::NoBarrier_Store(
+ &meta_->sum,
+ subtle::NoBarrier_Load(&meta_->sum) - other.sum());
+#else // No Atomic64 on 32-bit platforms.
+ meta_->sum -= other.sum();
+#endif
+
HistogramBase::Count old_redundant_count =
- subtle::NoBarrier_Load(&redundant_count_);
- subtle::NoBarrier_Store(&redundant_count_,
+ subtle::NoBarrier_Load(&meta_->redundant_count);
+ subtle::NoBarrier_Store(&meta_->redundant_count,
old_redundant_count - other.redundant_count());
bool success = AddSubtractImpl(other.Iterator().get(), SUBTRACT);
DCHECK(success);
}
bool HistogramSamples::Serialize(Pickle* pickle) const {
- if (!pickle->WriteInt64(sum_) ||
- !pickle->WriteInt(subtle::NoBarrier_Load(&redundant_count_)))
+#ifdef ARCH_CPU_64_BITS
+ if (!pickle->WriteInt64(subtle::NoBarrier_Load(&meta_->sum)))
+ return false;
+#else // No Atomic64 on 32-bit platforms.
+ if (!pickle->WriteInt64(meta_->sum))
+ return false;
+#endif
+ if (!pickle->WriteInt(subtle::NoBarrier_Load(&meta_->redundant_count)))
return false;
HistogramBase::Sample min;
@@ -120,12 +171,18 @@ bool HistogramSamples::Serialize(Pickle* pickle) const {
}
void HistogramSamples::IncreaseSum(int64 diff) {
- sum_ += diff;
+#ifdef ARCH_CPU_64_BITS
Alexei Svitkine (slow) 2015/11/25 18:05:27 Can we hide these details in the implementation of
bcwhite 2015/11/25 21:54:17 I'll look at moving the metadata to std::atomic wh
+ subtle::NoBarrier_Store(
+ &meta_->sum,
+ subtle::NoBarrier_Load(&meta_->sum) + diff);
+#else // No Atomic64 on 32-bit platforms.
+ meta_->sum += diff;
+#endif
}
void HistogramSamples::IncreaseRedundantCount(HistogramBase::Count diff) {
- subtle::NoBarrier_Store(&redundant_count_,
- subtle::NoBarrier_Load(&redundant_count_) + diff);
+ subtle::NoBarrier_Store(&meta_->redundant_count,
+ subtle::NoBarrier_Load(&meta_->redundant_count) + diff);
}
SampleCountIterator::~SampleCountIterator() {}

Powered by Google App Engine
This is Rietveld 408576698