| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/metrics/histogram_samples.h" | 5 #include "base/metrics/histogram_samples.h" |
| 6 | 6 |
| 7 #include <limits> | |
| 8 | |
| 9 #include "base/compiler_specific.h" | 7 #include "base/compiler_specific.h" |
| 10 #include "base/numerics/safe_math.h" | |
| 11 #include "base/pickle.h" | 8 #include "base/pickle.h" |
| 12 | 9 |
| 13 namespace base { | 10 namespace base { |
| 14 | 11 |
| 15 namespace { | 12 namespace { |
| 16 | 13 |
| 17 // A shorthand constant for the max value of size_t. | |
| 18 constexpr size_t kSizeMax = std::numeric_limits<size_t>::max(); | |
| 19 | |
| 20 // A constant stored in an AtomicSingleSample (as_atomic) to indicate that the | |
| 21 // sample is "disabled" and no further accumulation should be done with it. The | |
| 22 // value is chosen such that it will be MAX_UINT16 for both |bucket| & |count|, | |
| 23 // and thus less likely to conflict with real use. Conflicts are explicitly | |
| 24 // handled in the code but it's worth making them as unlikely as possible. | |
| 25 constexpr int32_t kDisabledSingleSample = -1; | |
| 26 | |
| 27 class SampleCountPickleIterator : public SampleCountIterator { | 14 class SampleCountPickleIterator : public SampleCountIterator { |
| 28 public: | 15 public: |
| 29 explicit SampleCountPickleIterator(PickleIterator* iter); | 16 explicit SampleCountPickleIterator(PickleIterator* iter); |
| 30 | 17 |
| 31 bool Done() const override; | 18 bool Done() const override; |
| 32 void Next() override; | 19 void Next() override; |
| 33 void Get(HistogramBase::Sample* min, | 20 void Get(HistogramBase::Sample* min, |
| 34 HistogramBase::Sample* max, | 21 HistogramBase::Sample* max, |
| 35 HistogramBase::Count* count) const override; | 22 HistogramBase::Count* count) const override; |
| 36 | 23 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 65 HistogramBase::Sample* max, | 52 HistogramBase::Sample* max, |
| 66 HistogramBase::Count* count) const { | 53 HistogramBase::Count* count) const { |
| 67 DCHECK(!Done()); | 54 DCHECK(!Done()); |
| 68 *min = min_; | 55 *min = min_; |
| 69 *max = max_; | 56 *max = max_; |
| 70 *count = count_; | 57 *count = count_; |
| 71 } | 58 } |
| 72 | 59 |
| 73 } // namespace | 60 } // namespace |
| 74 | 61 |
| 75 static_assert(sizeof(HistogramSamples::AtomicSingleSample) == | |
| 76 sizeof(subtle::Atomic32), | |
| 77 "AtomicSingleSample isn't 32 bits"); | |
| 78 | |
| 79 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Load() | |
| 80 const { | |
| 81 AtomicSingleSample single_sample = subtle::Acquire_Load(&as_atomic); | |
| 82 | |
| 83 // If the sample was extracted/disabled, it's still zero to the outside. | |
| 84 if (single_sample.as_atomic == kDisabledSingleSample) | |
| 85 single_sample.as_atomic = 0; | |
| 86 | |
| 87 return single_sample.as_parts; | |
| 88 } | |
| 89 | |
| 90 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Extract( | |
| 91 bool disable) { | |
| 92 AtomicSingleSample single_sample = subtle::NoBarrier_AtomicExchange( | |
| 93 &as_atomic, disable ? kDisabledSingleSample : 0); | |
| 94 if (single_sample.as_atomic == kDisabledSingleSample) | |
| 95 single_sample.as_atomic = 0; | |
| 96 return single_sample.as_parts; | |
| 97 } | |
| 98 | |
| 99 bool HistogramSamples::AtomicSingleSample::Accumulate( | |
| 100 size_t bucket, | |
| 101 HistogramBase::Count count) { | |
| 102 if (count == 0) | |
| 103 return true; | |
| 104 | |
| 105 // Convert the parameters to 16-bit variables because it's all 16-bit below. | |
| 106 if (count < std::numeric_limits<uint16_t>::min() || | |
| 107 count > std::numeric_limits<uint16_t>::max() || | |
| 108 bucket > std::numeric_limits<uint16_t>::max()) { | |
| 109 return false; | |
| 110 } | |
| 111 uint16_t bucket16 = static_cast<uint16_t>(bucket); | |
| 112 uint16_t count16 = static_cast<uint16_t>(count); | |
| 113 | |
| 114 // A local, unshared copy of the single-sample is necessary so the parts | |
| 115 // can be manipulated without worrying about atomicity. | |
| 116 AtomicSingleSample single_sample; | |
| 117 | |
| 118 bool sample_updated; | |
| 119 do { | |
| 120 subtle::Atomic32 original = subtle::Acquire_Load(&as_atomic); | |
| 121 if (original == kDisabledSingleSample) | |
| 122 return false; | |
| 123 single_sample.as_atomic = original; | |
| 124 if (single_sample.as_atomic != 0) { | |
| 125 // Only the same bucket (parameter and stored) can be counted multiple | |
| 126 // times. | |
| 127 if (single_sample.as_parts.bucket != bucket16) | |
| 128 return false; | |
| 129 } else { | |
| 130 // The |single_ sample| was zero so becomes the |bucket| parameter, the | |
| 131 // contents of which were checked above to fit in 16 bits. | |
| 132 single_sample.as_parts.bucket = bucket16; | |
| 133 } | |
| 134 | |
| 135 // Update count, making sure that it doesn't overflow. | |
| 136 CheckedNumeric<uint16_t> new_count(single_sample.as_parts.count); | |
| 137 new_count += count16; | |
| 138 if (!new_count.AssignIfValid(&single_sample.as_parts.count)) | |
| 139 return false; | |
| 140 | |
| 141 // Don't let this become equivalent to the "disabled" value. | |
| 142 if (single_sample.as_atomic == kDisabledSingleSample) | |
| 143 return false; | |
| 144 | |
| 145 // Store the updated single-sample back into memory. |existing| is what | |
| 146 // was in that memory location at the time of the call; if it doesn't | |
| 147 // match |original| then the swap didn't happen so loop again. | |
| 148 subtle::Atomic32 existing = subtle::Release_CompareAndSwap( | |
| 149 &as_atomic, original, single_sample.as_atomic); | |
| 150 sample_updated = (existing == original); | |
| 151 } while (!sample_updated); | |
| 152 | |
| 153 return true; | |
| 154 } | |
| 155 | |
| 156 bool HistogramSamples::AtomicSingleSample::IsDisabled() const { | |
| 157 return subtle::Acquire_Load(&as_atomic) == kDisabledSingleSample; | |
| 158 } | |
| 159 | |
| 160 HistogramSamples::LocalMetadata::LocalMetadata() { | |
| 161 // This is the same way it's done for persistent metadata since no ctor | |
| 162 // is called for the data members in that case. | |
| 163 memset(this, 0, sizeof(*this)); | |
| 164 } | |
| 165 | |
| 166 // Don't try to delegate behavior to the constructor below that accepts a | 62 // Don't try to delegate behavior to the constructor below that accepts a |
| 167 // Matadata pointer by passing &local_meta_. Such cannot be reliably passed | 63 // Matadata pointer by passing &local_meta_. Such cannot be reliably passed |
| 168 // because it has not yet been constructed -- no member variables have; the | 64 // because it has not yet been constructed -- no member variables have; the |
| 169 // class itself is in the middle of being constructed. Using it to | 65 // class itself is in the middle of being constructed. Using it to |
| 170 // initialize meta_ is okay because the object now exists and local_meta_ | 66 // initialize meta_ is okay because the object now exists and local_meta_ |
| 171 // is before meta_ in the construction order. | 67 // is before meta_ in the construction order. |
| 172 HistogramSamples::HistogramSamples(uint64_t id) | 68 HistogramSamples::HistogramSamples(uint64_t id) |
| 173 : meta_(&local_meta_) { | 69 : meta_(&local_meta_) { |
| 174 meta_->id = id; | 70 meta_->id = id; |
| 175 } | 71 } |
| 176 | 72 |
| 177 HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta) | 73 HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta) |
| 178 : meta_(meta) { | 74 : meta_(meta) { |
| 179 DCHECK(meta_->id == 0 || meta_->id == id); | 75 DCHECK(meta_->id == 0 || meta_->id == id); |
| 180 | 76 |
| 181 // It's possible that |meta| is contained in initialized, read-only memory | 77 // It's possible that |meta| is contained in initialized, read-only memory |
| 182 // so it's essential that no write be done in that case. | 78 // so it's essential that no write be done in that case. |
| 183 if (!meta_->id) | 79 if (!meta_->id) |
| 184 meta_->id = id; | 80 meta_->id = id; |
| 185 } | 81 } |
| 186 | 82 |
| 187 HistogramSamples::~HistogramSamples() {} | 83 HistogramSamples::~HistogramSamples() {} |
| 188 | 84 |
| 189 void HistogramSamples::Add(const HistogramSamples& other) { | 85 void HistogramSamples::Add(const HistogramSamples& other) { |
| 190 IncreaseSumAndCount(other.sum(), other.redundant_count()); | 86 IncreaseSum(other.sum()); |
| 87 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, |
| 88 other.redundant_count()); |
| 191 bool success = AddSubtractImpl(other.Iterator().get(), ADD); | 89 bool success = AddSubtractImpl(other.Iterator().get(), ADD); |
| 192 DCHECK(success); | 90 DCHECK(success); |
| 193 } | 91 } |
| 194 | 92 |
| 195 bool HistogramSamples::AddFromPickle(PickleIterator* iter) { | 93 bool HistogramSamples::AddFromPickle(PickleIterator* iter) { |
| 196 int64_t sum; | 94 int64_t sum; |
| 197 HistogramBase::Count redundant_count; | 95 HistogramBase::Count redundant_count; |
| 198 | 96 |
| 199 if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count)) | 97 if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count)) |
| 200 return false; | 98 return false; |
| 201 | 99 |
| 202 IncreaseSumAndCount(sum, redundant_count); | 100 IncreaseSum(sum); |
| 101 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, |
| 102 redundant_count); |
| 203 | 103 |
| 204 SampleCountPickleIterator pickle_iter(iter); | 104 SampleCountPickleIterator pickle_iter(iter); |
| 205 return AddSubtractImpl(&pickle_iter, ADD); | 105 return AddSubtractImpl(&pickle_iter, ADD); |
| 206 } | 106 } |
| 207 | 107 |
| 208 void HistogramSamples::Subtract(const HistogramSamples& other) { | 108 void HistogramSamples::Subtract(const HistogramSamples& other) { |
| 209 IncreaseSumAndCount(-other.sum(), -other.redundant_count()); | 109 IncreaseSum(-other.sum()); |
| 110 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, |
| 111 -other.redundant_count()); |
| 210 bool success = AddSubtractImpl(other.Iterator().get(), SUBTRACT); | 112 bool success = AddSubtractImpl(other.Iterator().get(), SUBTRACT); |
| 211 DCHECK(success); | 113 DCHECK(success); |
| 212 } | 114 } |
| 213 | 115 |
| 214 bool HistogramSamples::Serialize(Pickle* pickle) const { | 116 bool HistogramSamples::Serialize(Pickle* pickle) const { |
| 215 if (!pickle->WriteInt64(sum())) | 117 if (!pickle->WriteInt64(sum())) |
| 216 return false; | 118 return false; |
| 217 if (!pickle->WriteInt(redundant_count())) | 119 if (!pickle->WriteInt(redundant_count())) |
| 218 return false; | 120 return false; |
| 219 | 121 |
| 220 HistogramBase::Sample min; | 122 HistogramBase::Sample min; |
| 221 HistogramBase::Sample max; | 123 HistogramBase::Sample max; |
| 222 HistogramBase::Count count; | 124 HistogramBase::Count count; |
| 223 for (std::unique_ptr<SampleCountIterator> it = Iterator(); !it->Done(); | 125 for (std::unique_ptr<SampleCountIterator> it = Iterator(); !it->Done(); |
| 224 it->Next()) { | 126 it->Next()) { |
| 225 it->Get(&min, &max, &count); | 127 it->Get(&min, &max, &count); |
| 226 if (!pickle->WriteInt(min) || | 128 if (!pickle->WriteInt(min) || |
| 227 !pickle->WriteInt(max) || | 129 !pickle->WriteInt(max) || |
| 228 !pickle->WriteInt(count)) | 130 !pickle->WriteInt(count)) |
| 229 return false; | 131 return false; |
| 230 } | 132 } |
| 231 return true; | 133 return true; |
| 232 } | 134 } |
| 233 | 135 |
| 234 bool HistogramSamples::AccumulateSingleSample(HistogramBase::Sample value, | 136 void HistogramSamples::IncreaseSum(int64_t diff) { |
| 235 HistogramBase::Count count, | 137 #ifdef ARCH_CPU_64_BITS |
| 236 size_t bucket) { | 138 subtle::NoBarrier_AtomicIncrement(&meta_->sum, diff); |
| 237 if (single_sample().Accumulate(bucket, count)) { | 139 #else |
| 238 // Success. Update the (separate) sum and redundant-count. | 140 meta_->sum += diff; |
| 239 IncreaseSumAndCount(static_cast<int64_t>(value) * count, count); | 141 #endif |
| 240 return true; | |
| 241 } | |
| 242 return false; | |
| 243 } | 142 } |
| 244 | 143 |
| 245 void HistogramSamples::IncreaseSumAndCount(int64_t sum, | 144 void HistogramSamples::IncreaseRedundantCount(HistogramBase::Count diff) { |
| 246 HistogramBase::Count count) { | 145 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, diff); |
| 247 #ifdef ARCH_CPU_64_BITS | |
| 248 subtle::NoBarrier_AtomicIncrement(&meta_->sum, sum); | |
| 249 #else | |
| 250 meta_->sum += sum; | |
| 251 #endif | |
| 252 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, count); | |
| 253 } | 146 } |
| 254 | 147 |
| 255 SampleCountIterator::~SampleCountIterator() {} | 148 SampleCountIterator::~SampleCountIterator() {} |
| 256 | 149 |
| 257 bool SampleCountIterator::GetBucketIndex(size_t* index) const { | 150 bool SampleCountIterator::GetBucketIndex(size_t* index) const { |
| 258 DCHECK(!Done()); | 151 DCHECK(!Done()); |
| 259 return false; | 152 return false; |
| 260 } | 153 } |
| 261 | 154 |
| 262 SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min, | |
| 263 HistogramBase::Sample max, | |
| 264 HistogramBase::Count count) | |
| 265 : SingleSampleIterator(min, max, count, kSizeMax) {} | |
| 266 | |
| 267 SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min, | |
| 268 HistogramBase::Sample max, | |
| 269 HistogramBase::Count count, | |
| 270 size_t bucket_index) | |
| 271 : min_(min), max_(max), bucket_index_(bucket_index), count_(count) {} | |
| 272 | |
| 273 SingleSampleIterator::~SingleSampleIterator() {} | |
| 274 | |
| 275 bool SingleSampleIterator::Done() const { | |
| 276 return count_ == 0; | |
| 277 } | |
| 278 | |
| 279 void SingleSampleIterator::Next() { | |
| 280 DCHECK(!Done()); | |
| 281 count_ = 0; | |
| 282 } | |
| 283 | |
| 284 void SingleSampleIterator::Get(HistogramBase::Sample* min, | |
| 285 HistogramBase::Sample* max, | |
| 286 HistogramBase::Count* count) const { | |
| 287 DCHECK(!Done()); | |
| 288 if (min != nullptr) | |
| 289 *min = min_; | |
| 290 if (max != nullptr) | |
| 291 *max = max_; | |
| 292 if (count != nullptr) | |
| 293 *count = count_; | |
| 294 } | |
| 295 | |
| 296 bool SingleSampleIterator::GetBucketIndex(size_t* index) const { | |
| 297 DCHECK(!Done()); | |
| 298 if (bucket_index_ == kSizeMax) | |
| 299 return false; | |
| 300 *index = bucket_index_; | |
| 301 return true; | |
| 302 } | |
| 303 | |
| 304 } // namespace base | 155 } // namespace base |
| OLD | NEW |