Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(108)

Side by Side Diff: base/metrics/histogram_samples.cc

Issue 2811713003: Embed a single sample in histogram metadata. (Closed)
Patch Set: addressed review comments by asvitkine Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/metrics/histogram_samples.h" 5 #include "base/metrics/histogram_samples.h"
6 6
7 #include <limits>
8
7 #include "base/compiler_specific.h" 9 #include "base/compiler_specific.h"
8 #include "base/pickle.h" 10 #include "base/pickle.h"
9 11
10 namespace base { 12 namespace base {
11 13
12 namespace { 14 namespace {
13 15
16 // A shorthand constant for the max value of size_t.
17 const size_t kSizeMax = std::numeric_limits<size_t>::max();
Alexei Svitkine (slow) 2017/04/18 20:52:20 Nit: Can this be constexpr?
bcwhite 2017/04/19 18:13:02 Done.
18
19 // A constant stored in an AtomicSingleSample (as_atomic) to indicate that the
20 // sample is "disabled" and no further accumulation should be done with it.
21 const int32_t kDisabledSingleSample = -1;
Alexei Svitkine (slow) 2017/04/18 20:52:19 Nit: constexpr
bcwhite 2017/04/19 18:13:02 Done.
22
14 class SampleCountPickleIterator : public SampleCountIterator { 23 class SampleCountPickleIterator : public SampleCountIterator {
15 public: 24 public:
16 explicit SampleCountPickleIterator(PickleIterator* iter); 25 explicit SampleCountPickleIterator(PickleIterator* iter);
17 26
18 bool Done() const override; 27 bool Done() const override;
19 void Next() override; 28 void Next() override;
20 void Get(HistogramBase::Sample* min, 29 void Get(HistogramBase::Sample* min,
21 HistogramBase::Sample* max, 30 HistogramBase::Sample* max,
22 HistogramBase::Count* count) const override; 31 HistogramBase::Count* count) const override;
23 32
(...skipping 28 matching lines...) Expand all
52 HistogramBase::Sample* max, 61 HistogramBase::Sample* max,
53 HistogramBase::Count* count) const { 62 HistogramBase::Count* count) const {
54 DCHECK(!Done()); 63 DCHECK(!Done());
55 *min = min_; 64 *min = min_;
56 *max = max_; 65 *max = max_;
57 *count = count_; 66 *count = count_;
58 } 67 }
59 68
60 } // namespace 69 } // namespace
61 70
71 static_assert(sizeof(HistogramSamples::AtomicSingleSample) ==
72 sizeof(subtle::Atomic32),
73 "AtomicSingleSample isn't 32 bits");
74
75 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Load()
76 const {
77 AtomicSingleSample single_sample = subtle::Acquire_Load(&as_atomic);
78 if (single_sample.as_atomic == kDisabledSingleSample)
79 single_sample.as_atomic = 0;
Alexei Svitkine (slow) 2017/04/18 20:52:20 This seems like a strange thing to do. Wouldn't it
bcwhite 2017/04/19 18:13:02 When a value is "extracted", it becomes zero. Whe
80 return single_sample.as_parts;
81 }
82
83 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Extract(
84 bool disable) {
85 AtomicSingleSample single_sample = subtle::NoBarrier_AtomicExchange(
86 &as_atomic, disable ? kDisabledSingleSample : 0);
87 if (single_sample.as_atomic == kDisabledSingleSample)
88 single_sample.as_atomic = 0;
89 return single_sample.as_parts;
90 }
91
92 bool HistogramSamples::AtomicSingleSample::Accumulate(
93 HistogramBase::Sample value,
94 HistogramBase::Count count) {
95 if (count == 0)
96 return true;
97
98 // Convert the parameters to 16-bit variables because it's all 16-bit below.
99 if (value < std::numeric_limits<int16_t>::min() ||
100 value > std::numeric_limits<int16_t>::max() ||
101 count < std::numeric_limits<int16_t>::min() ||
102 count > std::numeric_limits<int16_t>::max()) {
103 return false;
104 }
105 int16_t value16 = static_cast<int16_t>(value);
106 int16_t count16 = static_cast<int16_t>(count);
107
108 // A local, unshared copy of the single-sample is necessary so the parts
109 // can be manipulated without worrying about atomicity.
110 AtomicSingleSample single_sample;
111
112 subtle::Atomic32 existing, found;
Alexei Svitkine (slow) 2017/04/18 20:52:20 Nit: Separate lines.
bcwhite 2017/04/19 18:13:02 Done.
113 do {
114 existing = subtle::Acquire_Load(&as_atomic);
115 if (existing == kDisabledSingleSample)
Alexei Svitkine (slow) 2017/04/18 20:52:20 if Load() returned a bool and took a param, then t
bcwhite 2017/04/19 18:13:02 Load() returns the parts structure. This needs th
116 return false;
117 single_sample.as_atomic = existing;
118 if (single_sample.as_atomic != 0) {
119 if (single_sample.as_parts.value != value16)
Alexei Svitkine (slow) 2017/04/18 20:52:20 Nit: Add a comment for semantic meaning of this ch
bcwhite 2017/04/19 18:13:02 Done.
120 return false;
121 // Make sure counts don't overflow.
122 if (count16 < 0) {
Alexei Svitkine (slow) 2017/04/18 20:52:20 Instead of your own checks, can you use base/numer
bcwhite 2017/04/19 18:13:02 Done.
123 if (single_sample.as_parts.count <
124 std::numeric_limits<int16_t>::min() - count16) {
125 return false;
126 }
127 } else {
128 if (single_sample.as_parts.count >
129 std::numeric_limits<int16_t>::max() - count16) {
130 return false;
131 }
132 }
133 } else {
134 single_sample.as_parts.value = value16;
Alexei Svitkine (slow) 2017/04/18 20:52:20 Nit: Add a comment here mentioning no need to chec
bcwhite 2017/04/19 18:13:02 Done.
135 }
136
137 single_sample.as_parts.count += count16;
138 if (single_sample.as_parts.count == 0)
139 single_sample.as_atomic = 0;
140 else if (single_sample.as_atomic == kDisabledSingleSample)
141 return false;
142 found = subtle::Release_CompareAndSwap(&as_atomic, existing,
143 single_sample.as_atomic);
144 } while (found != existing);
145
146 return true;
147 }
148
149 HistogramSamples::LocalMetadata::LocalMetadata() {
150 // This is the same way it's done for persistent metadata since no ctor
151 // is called for the data members in that case.
Alexei Svitkine (slow) 2017/04/18 20:52:20 This adds a requirement that all members must be v
bcwhite 2017/04/19 18:13:02 Done (in the .h file for "struct Metadata").
152 memset(this, 0, sizeof(*this));
153 }
154
62 // Don't try to delegate behavior to the constructor below that accepts a 155 // Don't try to delegate behavior to the constructor below that accepts a
63 // Matadata pointer by passing &local_meta_. Such cannot be reliably passed 156 // Matadata pointer by passing &local_meta_. Such cannot be reliably passed
64 // because it has not yet been constructed -- no member variables have; the 157 // because it has not yet been constructed -- no member variables have; the
65 // class itself is in the middle of being constructed. Using it to 158 // class itself is in the middle of being constructed. Using it to
66 // initialize meta_ is okay because the object now exists and local_meta_ 159 // initialize meta_ is okay because the object now exists and local_meta_
67 // is before meta_ in the construction order. 160 // is before meta_ in the construction order.
68 HistogramSamples::HistogramSamples(uint64_t id) 161 HistogramSamples::HistogramSamples(uint64_t id)
69 : meta_(&local_meta_) { 162 : meta_(&local_meta_) {
70 meta_->id = id; 163 meta_->id = id;
71 } 164 }
72 165
73 HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta) 166 HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta)
74 : meta_(meta) { 167 : meta_(meta) {
75 DCHECK(meta_->id == 0 || meta_->id == id); 168 DCHECK(meta_->id == 0 || meta_->id == id);
76 169
77 // It's possible that |meta| is contained in initialized, read-only memory 170 // It's possible that |meta| is contained in initialized, read-only memory
78 // so it's essential that no write be done in that case. 171 // so it's essential that no write be done in that case.
79 if (!meta_->id) 172 if (!meta_->id)
80 meta_->id = id; 173 meta_->id = id;
81 } 174 }
82 175
83 HistogramSamples::~HistogramSamples() {} 176 HistogramSamples::~HistogramSamples() {}
84 177
85 void HistogramSamples::Add(const HistogramSamples& other) { 178 void HistogramSamples::Add(const HistogramSamples& other) {
86 IncreaseSum(other.sum()); 179 IncreaseSumAndCount(other.sum(), other.redundant_count());
87 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count,
88 other.redundant_count());
89 bool success = AddSubtractImpl(other.Iterator().get(), ADD); 180 bool success = AddSubtractImpl(other.Iterator().get(), ADD);
90 DCHECK(success); 181 DCHECK(success);
91 } 182 }
92 183
93 bool HistogramSamples::AddFromPickle(PickleIterator* iter) { 184 bool HistogramSamples::AddFromPickle(PickleIterator* iter) {
94 int64_t sum; 185 int64_t sum;
95 HistogramBase::Count redundant_count; 186 HistogramBase::Count redundant_count;
96 187
97 if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count)) 188 if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count))
98 return false; 189 return false;
99 190
100 IncreaseSum(sum); 191 IncreaseSumAndCount(sum, redundant_count);
101 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count,
102 redundant_count);
103 192
104 SampleCountPickleIterator pickle_iter(iter); 193 SampleCountPickleIterator pickle_iter(iter);
105 return AddSubtractImpl(&pickle_iter, ADD); 194 return AddSubtractImpl(&pickle_iter, ADD);
106 } 195 }
107 196
108 void HistogramSamples::Subtract(const HistogramSamples& other) { 197 void HistogramSamples::Subtract(const HistogramSamples& other) {
109 IncreaseSum(-other.sum()); 198 IncreaseSumAndCount(-other.sum(), -other.redundant_count());
110 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count,
111 -other.redundant_count());
112 bool success = AddSubtractImpl(other.Iterator().get(), SUBTRACT); 199 bool success = AddSubtractImpl(other.Iterator().get(), SUBTRACT);
113 DCHECK(success); 200 DCHECK(success);
114 } 201 }
115 202
116 bool HistogramSamples::Serialize(Pickle* pickle) const { 203 bool HistogramSamples::Serialize(Pickle* pickle) const {
117 if (!pickle->WriteInt64(sum())) 204 if (!pickle->WriteInt64(sum()))
118 return false; 205 return false;
119 if (!pickle->WriteInt(redundant_count())) 206 if (!pickle->WriteInt(redundant_count()))
120 return false; 207 return false;
121 208
122 HistogramBase::Sample min; 209 HistogramBase::Sample min;
123 HistogramBase::Sample max; 210 HistogramBase::Sample max;
124 HistogramBase::Count count; 211 HistogramBase::Count count;
125 for (std::unique_ptr<SampleCountIterator> it = Iterator(); !it->Done(); 212 for (std::unique_ptr<SampleCountIterator> it = Iterator(); !it->Done();
126 it->Next()) { 213 it->Next()) {
127 it->Get(&min, &max, &count); 214 it->Get(&min, &max, &count);
128 if (!pickle->WriteInt(min) || 215 if (!pickle->WriteInt(min) ||
129 !pickle->WriteInt(max) || 216 !pickle->WriteInt(max) ||
130 !pickle->WriteInt(count)) 217 !pickle->WriteInt(count))
131 return false; 218 return false;
132 } 219 }
133 return true; 220 return true;
134 } 221 }
135 222
136 void HistogramSamples::IncreaseSum(int64_t diff) { 223 bool HistogramSamples::AccumulateSingleSample(HistogramBase::Sample value,
137 #ifdef ARCH_CPU_64_BITS 224 HistogramBase::Count count) {
138 subtle::NoBarrier_AtomicIncrement(&meta_->sum, diff); 225 if (single_sample().Accumulate(value, count)) {
139 #else 226 // Success. Update the (separate) sum and redundant-count.
140 meta_->sum += diff; 227 IncreaseSumAndCount(static_cast<int64_t>(value) * count, count);
141 #endif 228 return true;
229 }
230 return false;
142 } 231 }
143 232
144 void HistogramSamples::IncreaseRedundantCount(HistogramBase::Count diff) { 233 void HistogramSamples::IncreaseSumAndCount(int64_t sum,
145 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, diff); 234 HistogramBase::Count count) {
235 #ifdef ARCH_CPU_64_BITS
236 subtle::NoBarrier_AtomicIncrement(&meta_->sum, sum);
237 #else
238 meta_->sum += sum;
239 #endif
240 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, count);
146 } 241 }
147 242
148 SampleCountIterator::~SampleCountIterator() {} 243 SampleCountIterator::~SampleCountIterator() {}
149 244
150 bool SampleCountIterator::GetBucketIndex(size_t* index) const { 245 bool SampleCountIterator::GetBucketIndex(size_t* index) const {
151 DCHECK(!Done()); 246 DCHECK(!Done());
152 return false; 247 return false;
153 } 248 }
154 249
250 SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min,
251 HistogramBase::Sample max,
252 HistogramBase::Count count)
253 : SingleSampleIterator(min, max, count, kSizeMax) {}
254
255 SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min,
256 HistogramBase::Sample max,
257 HistogramBase::Count count,
258 size_t bucket_index)
259 : min_(min), max_(max), bucket_index_(bucket_index), count_(count) {}
260
261 SingleSampleIterator::~SingleSampleIterator() {}
262
263 bool SingleSampleIterator::Done() const {
264 return count_ == 0;
265 }
266
267 void SingleSampleIterator::Next() {
268 DCHECK(!Done());
269 count_ = 0;
270 }
271
272 void SingleSampleIterator::Get(HistogramBase::Sample* min,
273 HistogramBase::Sample* max,
274 HistogramBase::Count* count) const {
275 DCHECK(!Done());
276 if (min != nullptr)
277 *min = min_;
278 if (max != nullptr)
279 *max = max_;
280 if (count != nullptr)
281 *count = count_;
282 }
283
284 bool SingleSampleIterator::GetBucketIndex(size_t* index) const {
285 DCHECK(!Done());
286 if (bucket_index_ == kSizeMax)
287 return false;
288 *index = bucket_index_;
289 return true;
290 }
291
155 } // namespace base 292 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698