Chromium Code Reviews| Index: base/metrics/histogram_samples.cc |
| diff --git a/base/metrics/histogram_samples.cc b/base/metrics/histogram_samples.cc |
| index 3475cd59f71df439ebd7cd124d4614ccd588b98a..a63d2b16760f9fc79b72eb552fd7167e70af7d42 100644 |
| --- a/base/metrics/histogram_samples.cc |
| +++ b/base/metrics/histogram_samples.cc |
| @@ -4,6 +4,8 @@ |
| #include "base/metrics/histogram_samples.h" |
| +#include <limits> |
| + |
| #include "base/compiler_specific.h" |
| #include "base/pickle.h" |
| @@ -11,6 +13,13 @@ namespace base { |
| namespace { |
| +// A shorthand constant for the max value of size_t. |
| +const size_t kSizeMax = std::numeric_limits<size_t>::max(); |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
Nit: Can this be constexpr?
bcwhite
2017/04/19 18:13:02
Done.
|
| + |
| +// A constant stored in an AtomicSingleSample (as_atomic) to indicate that the |
| +// sample is "disabled" and no further accumulation should be done with it. |
| +const int32_t kDisabledSingleSample = -1; |
|
Alexei Svitkine (slow)
2017/04/18 20:52:19
Nit: constexpr
bcwhite
2017/04/19 18:13:02
Done.
|
| + |
| class SampleCountPickleIterator : public SampleCountIterator { |
| public: |
| explicit SampleCountPickleIterator(PickleIterator* iter); |
| @@ -59,6 +68,90 @@ void SampleCountPickleIterator::Get(HistogramBase::Sample* min, |
| } // namespace |
| +static_assert(sizeof(HistogramSamples::AtomicSingleSample) == |
| + sizeof(subtle::Atomic32), |
| + "AtomicSingleSample isn't 32 bits"); |
| + |
| +HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Load() |
| + const { |
| + AtomicSingleSample single_sample = subtle::Acquire_Load(&as_atomic); |
| + if (single_sample.as_atomic == kDisabledSingleSample) |
| + single_sample.as_atomic = 0; |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
This seems like a strange thing to do. Wouldn't it
bcwhite
2017/04/19 18:13:02
When a value is "extracted", it becomes zero. Whe
|
| + return single_sample.as_parts; |
| +} |
| + |
| +HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Extract( |
| + bool disable) { |
| + AtomicSingleSample single_sample = subtle::NoBarrier_AtomicExchange( |
| + &as_atomic, disable ? kDisabledSingleSample : 0); |
| + if (single_sample.as_atomic == kDisabledSingleSample) |
| + single_sample.as_atomic = 0; |
| + return single_sample.as_parts; |
| +} |
| + |
| +bool HistogramSamples::AtomicSingleSample::Accumulate( |
| + HistogramBase::Sample value, |
| + HistogramBase::Count count) { |
| + if (count == 0) |
| + return true; |
| + |
| + // Convert the parameters to 16-bit variables because it's all 16-bit below. |
| + if (value < std::numeric_limits<int16_t>::min() || |
| + value > std::numeric_limits<int16_t>::max() || |
| + count < std::numeric_limits<int16_t>::min() || |
| + count > std::numeric_limits<int16_t>::max()) { |
| + return false; |
| + } |
| + int16_t value16 = static_cast<int16_t>(value); |
| + int16_t count16 = static_cast<int16_t>(count); |
| + |
| + // A local, unshared copy of the single-sample is necessary so the parts |
| + // can be manipulated without worrying about atomicity. |
| + AtomicSingleSample single_sample; |
| + |
| + subtle::Atomic32 existing, found; |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
Nit: Separate lines.
bcwhite
2017/04/19 18:13:02
Done.
|
| + do { |
| + existing = subtle::Acquire_Load(&as_atomic); |
| + if (existing == kDisabledSingleSample) |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
if Load() returned a bool and took a param, then t
bcwhite
2017/04/19 18:13:02
Load() returns the parts structure. This needs th
|
| + return false; |
| + single_sample.as_atomic = existing; |
| + if (single_sample.as_atomic != 0) { |
| + if (single_sample.as_parts.value != value16) |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
Nit: Add a comment for semantic meaning of this ch
bcwhite
2017/04/19 18:13:02
Done.
|
| + return false; |
| + // Make sure counts don't overflow. |
| + if (count16 < 0) { |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
Instead of your own checks, can you use base/numer
bcwhite
2017/04/19 18:13:02
Done.
|
| + if (single_sample.as_parts.count < |
| + std::numeric_limits<int16_t>::min() - count16) { |
| + return false; |
| + } |
| + } else { |
| + if (single_sample.as_parts.count > |
| + std::numeric_limits<int16_t>::max() - count16) { |
| + return false; |
| + } |
| + } |
| + } else { |
| + single_sample.as_parts.value = value16; |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
Nit: Add a comment here mentioning no need to chec
bcwhite
2017/04/19 18:13:02
Done.
|
| + } |
| + |
| + single_sample.as_parts.count += count16; |
| + if (single_sample.as_parts.count == 0) |
| + single_sample.as_atomic = 0; |
| + else if (single_sample.as_atomic == kDisabledSingleSample) |
| + return false; |
| + found = subtle::Release_CompareAndSwap(&as_atomic, existing, |
| + single_sample.as_atomic); |
| + } while (found != existing); |
| + |
| + return true; |
| +} |
| + |
| +HistogramSamples::LocalMetadata::LocalMetadata() { |
| + // This is the same way it's done for persistent metadata since no ctor |
| + // is called for the data members in that case. |
|
Alexei Svitkine (slow)
2017/04/18 20:52:20
This adds a requirement that all members must be v
bcwhite
2017/04/19 18:13:02
Done (in the .h file for "struct Metadata").
|
| + memset(this, 0, sizeof(*this)); |
| +} |
| + |
| // Don't try to delegate behavior to the constructor below that accepts a |
| // Matadata pointer by passing &local_meta_. Such cannot be reliably passed |
| // because it has not yet been constructed -- no member variables have; the |
| @@ -83,9 +176,7 @@ HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta) |
| HistogramSamples::~HistogramSamples() {} |
| void HistogramSamples::Add(const HistogramSamples& other) { |
| - IncreaseSum(other.sum()); |
| - subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, |
| - other.redundant_count()); |
| + IncreaseSumAndCount(other.sum(), other.redundant_count()); |
| bool success = AddSubtractImpl(other.Iterator().get(), ADD); |
| DCHECK(success); |
| } |
| @@ -97,18 +188,14 @@ bool HistogramSamples::AddFromPickle(PickleIterator* iter) { |
| if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count)) |
| return false; |
| - IncreaseSum(sum); |
| - subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, |
| - redundant_count); |
| + IncreaseSumAndCount(sum, redundant_count); |
| SampleCountPickleIterator pickle_iter(iter); |
| return AddSubtractImpl(&pickle_iter, ADD); |
| } |
| void HistogramSamples::Subtract(const HistogramSamples& other) { |
| - IncreaseSum(-other.sum()); |
| - subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, |
| - -other.redundant_count()); |
| + IncreaseSumAndCount(-other.sum(), -other.redundant_count()); |
| bool success = AddSubtractImpl(other.Iterator().get(), SUBTRACT); |
| DCHECK(success); |
| } |
| @@ -133,16 +220,24 @@ bool HistogramSamples::Serialize(Pickle* pickle) const { |
| return true; |
| } |
| -void HistogramSamples::IncreaseSum(int64_t diff) { |
| +bool HistogramSamples::AccumulateSingleSample(HistogramBase::Sample value, |
| + HistogramBase::Count count) { |
| + if (single_sample().Accumulate(value, count)) { |
| + // Success. Update the (separate) sum and redundant-count. |
| + IncreaseSumAndCount(static_cast<int64_t>(value) * count, count); |
| + return true; |
| + } |
| + return false; |
| +} |
| + |
| +void HistogramSamples::IncreaseSumAndCount(int64_t sum, |
| + HistogramBase::Count count) { |
| #ifdef ARCH_CPU_64_BITS |
| - subtle::NoBarrier_AtomicIncrement(&meta_->sum, diff); |
| + subtle::NoBarrier_AtomicIncrement(&meta_->sum, sum); |
| #else |
| - meta_->sum += diff; |
| + meta_->sum += sum; |
| #endif |
| -} |
| - |
| -void HistogramSamples::IncreaseRedundantCount(HistogramBase::Count diff) { |
| - subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, diff); |
| + subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, count); |
| } |
| SampleCountIterator::~SampleCountIterator() {} |
| @@ -152,4 +247,46 @@ bool SampleCountIterator::GetBucketIndex(size_t* index) const { |
| return false; |
| } |
| +SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min, |
| + HistogramBase::Sample max, |
| + HistogramBase::Count count) |
| + : SingleSampleIterator(min, max, count, kSizeMax) {} |
| + |
| +SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min, |
| + HistogramBase::Sample max, |
| + HistogramBase::Count count, |
| + size_t bucket_index) |
| + : min_(min), max_(max), bucket_index_(bucket_index), count_(count) {} |
| + |
| +SingleSampleIterator::~SingleSampleIterator() {} |
| + |
| +bool SingleSampleIterator::Done() const { |
| + return count_ == 0; |
| +} |
| + |
| +void SingleSampleIterator::Next() { |
| + DCHECK(!Done()); |
| + count_ = 0; |
| +} |
| + |
| +void SingleSampleIterator::Get(HistogramBase::Sample* min, |
| + HistogramBase::Sample* max, |
| + HistogramBase::Count* count) const { |
| + DCHECK(!Done()); |
| + if (min != nullptr) |
| + *min = min_; |
| + if (max != nullptr) |
| + *max = max_; |
| + if (count != nullptr) |
| + *count = count_; |
| +} |
| + |
| +bool SingleSampleIterator::GetBucketIndex(size_t* index) const { |
| + DCHECK(!Done()); |
| + if (bucket_index_ == kSizeMax) |
| + return false; |
| + *index = bucket_index_; |
| + return true; |
| +} |
| + |
| } // namespace base |