xref: /aosp_15_r20/external/cronet/base/metrics/histogram_samples.cc (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/metrics/histogram_samples.h"
6 
7 #include <limits>
8 #include <string_view>
9 #include <utility>
10 
11 #include "base/compiler_specific.h"
12 #include "base/memory/raw_ptr.h"
13 #include "base/metrics/histogram_functions.h"
14 #include "base/metrics/histogram_macros.h"
15 #include "base/numerics/clamped_math.h"
16 #include "base/numerics/safe_conversions.h"
17 #include "base/numerics/safe_math.h"
18 #include "base/pickle.h"
19 #include "base/strings/strcat.h"
20 #include "base/strings/string_number_conversions.h"
21 #include "base/strings/stringprintf.h"
22 
23 namespace base {
24 
25 namespace {
26 
27 // A shorthand constant for the max value of size_t.
28 constexpr size_t kSizeMax = std::numeric_limits<size_t>::max();
29 
30 // A constant stored in an AtomicSingleSample (as_atomic) to indicate that the
31 // sample is "disabled" and no further accumulation should be done with it. The
32 // value is chosen such that it will be MAX_UINT16 for both |bucket| & |count|,
33 // and thus less likely to conflict with real use. Conflicts are explicitly
34 // handled in the code but it's worth making them as unlikely as possible.
35 constexpr int32_t kDisabledSingleSample = -1;
36 
37 class SampleCountPickleIterator : public SampleCountIterator {
38  public:
39   explicit SampleCountPickleIterator(PickleIterator* iter);
40 
41   bool Done() const override;
42   void Next() override;
43   void Get(HistogramBase::Sample* min,
44            int64_t* max,
45            HistogramBase::Count* count) override;
46 
47  private:
48   const raw_ptr<PickleIterator> iter_;
49 
50   HistogramBase::Sample min_;
51   int64_t max_;
52   HistogramBase::Count count_;
53   bool is_done_;
54 };
55 
SampleCountPickleIterator(PickleIterator * iter)56 SampleCountPickleIterator::SampleCountPickleIterator(PickleIterator* iter)
57     : iter_(iter),
58       is_done_(false) {
59   Next();
60 }
61 
Done() const62 bool SampleCountPickleIterator::Done() const {
63   return is_done_;
64 }
65 
Next()66 void SampleCountPickleIterator::Next() {
67   DCHECK(!Done());
68   if (!iter_->ReadInt(&min_) || !iter_->ReadInt64(&max_) ||
69       !iter_->ReadInt(&count_)) {
70     is_done_ = true;
71   }
72 }
73 
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)74 void SampleCountPickleIterator::Get(HistogramBase::Sample* min,
75                                     int64_t* max,
76                                     HistogramBase::Count* count) {
77   DCHECK(!Done());
78   *min = min_;
79   *max = max_;
80   *count = count_;
81 }
82 
83 }  // namespace
84 
85 static_assert(sizeof(HistogramSamples::AtomicSingleSample) ==
86                   sizeof(subtle::Atomic32),
87               "AtomicSingleSample isn't 32 bits");
88 
Load() const89 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Load()
90     const {
91   AtomicSingleSample single_sample(subtle::Acquire_Load(&as_atomic));
92 
93   // If the sample was extracted/disabled, it's still zero to the outside.
94   if (single_sample.as_atomic == kDisabledSingleSample)
95     single_sample.as_atomic = 0;
96 
97   return single_sample.as_parts;
98 }
99 
Extract(AtomicSingleSample new_value)100 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Extract(
101     AtomicSingleSample new_value) {
102   DCHECK(new_value.as_atomic != kDisabledSingleSample)
103       << "Disabling an AtomicSingleSample should be done through "
104          "ExtractAndDisable().";
105 
106   AtomicSingleSample old_value;
107 
108   // Because a concurrent call may modify and/or disable this object as we are
109   // trying to extract its value, a compare-and-swap loop must be done to ensure
110   // that the value was not changed between the reading and writing (and to
111   // prevent accidentally re-enabling this object).
112   while (true) {
113     old_value.as_atomic = subtle::Acquire_Load(&as_atomic);
114 
115     // If this object was already disabled, return an empty sample and keep it
116     // disabled.
117     if (old_value.as_atomic == kDisabledSingleSample) {
118       old_value.as_atomic = 0;
119       return old_value.as_parts;
120     }
121 
122     // Extract the single-sample from memory. |existing| is what was in that
123     // memory location at the time of the call; if it doesn't match |original|
124     // (i.e., the single-sample was concurrently modified during this
125     // iteration), then the swap did not happen, so try again.
126     subtle::Atomic32 existing = subtle::Release_CompareAndSwap(
127         &as_atomic, old_value.as_atomic, new_value.as_atomic);
128     if (existing == old_value.as_atomic) {
129       return old_value.as_parts;
130     }
131   }
132 }
133 
134 HistogramSamples::SingleSample
ExtractAndDisable()135 HistogramSamples::AtomicSingleSample::ExtractAndDisable() {
136   AtomicSingleSample old_value(
137       subtle::NoBarrier_AtomicExchange(&as_atomic, kDisabledSingleSample));
138   // If this object was already disabled, return an empty sample.
139   if (old_value.as_atomic == kDisabledSingleSample) {
140     old_value.as_atomic = 0;
141   }
142   return old_value.as_parts;
143 }
144 
Accumulate(size_t bucket,HistogramBase::Count count)145 bool HistogramSamples::AtomicSingleSample::Accumulate(
146     size_t bucket,
147     HistogramBase::Count count) {
148   if (count == 0)
149     return true;
150 
151   // Convert the parameters to 16-bit variables because it's all 16-bit below.
152   // To support decrements/subtractions, divide the |count| into sign/value and
153   // do the proper operation below. The alternative is to change the single-
154   // sample's count to be a signed integer (int16_t) and just add an int16_t
155   // |count16| but that is somewhat wasteful given that the single-sample is
156   // never expected to have a count less than zero.
157   if (count < -std::numeric_limits<uint16_t>::max() ||
158       count > std::numeric_limits<uint16_t>::max() ||
159       bucket > std::numeric_limits<uint16_t>::max()) {
160     return false;
161   }
162   bool count_is_negative = count < 0;
163   uint16_t count16 = static_cast<uint16_t>(count_is_negative ? -count : count);
164   uint16_t bucket16 = static_cast<uint16_t>(bucket);
165 
166   // A local, unshared copy of the single-sample is necessary so the parts
167   // can be manipulated without worrying about atomicity.
168   AtomicSingleSample single_sample;
169 
170   bool sample_updated;
171   do {
172     subtle::Atomic32 original = subtle::Acquire_Load(&as_atomic);
173     if (original == kDisabledSingleSample)
174       return false;
175     single_sample.as_atomic = original;
176     if (single_sample.as_atomic != 0) {
177       // Only the same bucket (parameter and stored) can be counted multiple
178       // times.
179       if (single_sample.as_parts.bucket != bucket16)
180         return false;
181     } else {
182       // The |single_ sample| was zero so becomes the |bucket| parameter, the
183       // contents of which were checked above to fit in 16 bits.
184       single_sample.as_parts.bucket = bucket16;
185     }
186 
187     // Update count, making sure that it doesn't overflow.
188     CheckedNumeric<uint16_t> new_count(single_sample.as_parts.count);
189     if (count_is_negative)
190       new_count -= count16;
191     else
192       new_count += count16;
193     if (!new_count.AssignIfValid(&single_sample.as_parts.count))
194       return false;
195 
196     // Don't let this become equivalent to the "disabled" value.
197     if (single_sample.as_atomic == kDisabledSingleSample)
198       return false;
199 
200     // Store the updated single-sample back into memory. |existing| is what
201     // was in that memory location at the time of the call; if it doesn't
202     // match |original| then the swap didn't happen so loop again.
203     subtle::Atomic32 existing = subtle::Release_CompareAndSwap(
204         &as_atomic, original, single_sample.as_atomic);
205     sample_updated = (existing == original);
206   } while (!sample_updated);
207 
208   return true;
209 }
210 
IsDisabled() const211 bool HistogramSamples::AtomicSingleSample::IsDisabled() const {
212   return subtle::Acquire_Load(&as_atomic) == kDisabledSingleSample;
213 }
214 
LocalMetadata()215 HistogramSamples::LocalMetadata::LocalMetadata() {
216   // This is the same way it's done for persistent metadata since no ctor
217   // is called for the data members in that case.
218   memset(this, 0, sizeof(*this));
219 }
220 
HistogramSamples(uint64_t id,Metadata * meta)221 HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta)
222     : meta_(meta) {
223   DCHECK(meta_->id == 0 || meta_->id == id);
224 
225   // It's possible that |meta| is contained in initialized, read-only memory
226   // so it's essential that no write be done in that case.
227   if (!meta_->id)
228     meta_->id = id;
229 }
230 
HistogramSamples(uint64_t id,std::unique_ptr<Metadata> meta)231 HistogramSamples::HistogramSamples(uint64_t id, std::unique_ptr<Metadata> meta)
232     : HistogramSamples(id, meta.get()) {
233   meta_owned_ = std::move(meta);
234 }
235 
236 // This mustn't do anything with |meta_|. It was passed to the ctor and may
237 // be invalid by the time this dtor gets called.
238 HistogramSamples::~HistogramSamples() = default;
239 
Add(const HistogramSamples & other)240 void HistogramSamples::Add(const HistogramSamples& other) {
241   IncreaseSumAndCount(other.sum(), other.redundant_count());
242   std::unique_ptr<SampleCountIterator> it = other.Iterator();
243   bool success = AddSubtractImpl(it.get(), ADD);
244   DCHECK(success);
245 }
246 
AddFromPickle(PickleIterator * iter)247 bool HistogramSamples::AddFromPickle(PickleIterator* iter) {
248   int64_t sum;
249   HistogramBase::Count redundant_count;
250 
251   if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count))
252     return false;
253 
254   IncreaseSumAndCount(sum, redundant_count);
255 
256   SampleCountPickleIterator pickle_iter(iter);
257   return AddSubtractImpl(&pickle_iter, ADD);
258 }
259 
Subtract(const HistogramSamples & other)260 void HistogramSamples::Subtract(const HistogramSamples& other) {
261   IncreaseSumAndCount(-other.sum(), -other.redundant_count());
262   std::unique_ptr<SampleCountIterator> it = other.Iterator();
263   bool success = AddSubtractImpl(it.get(), SUBTRACT);
264   DCHECK(success);
265 }
266 
Extract(HistogramSamples & other)267 void HistogramSamples::Extract(HistogramSamples& other) {
268   static_assert(sizeof(other.meta_->sum) == 8);
269 
270 #ifdef ARCH_CPU_64_BITS
271   // NoBarrier_AtomicExchange() is only defined for 64-bit types if
272   // the ARCH_CPU_64_BITS macro is set.
273   subtle::Atomic64 other_sum =
274       subtle::NoBarrier_AtomicExchange(&other.meta_->sum, 0);
275 #else
276   // |sum| is only atomic on 64 bit archs. Make |other_sum| volatile so that
277   // the following code is not optimized or rearranged to be something like:
278   //     IncreaseSumAndCount(other.meta_->sum, ...);
279   //     other.meta_->sum = 0;
280   // Or:
281   //     int64_t other_sum = other.meta_->sum;
282   //     other.meta_->sum = 0;
283   //     IncreaseSumAndCount(other_sum, ...);
284   // Which do not guarantee eventual consistency anymore (other.meta_->sum may
285   // be modified concurrently at any time). However, despite this, eventual
286   // consistency is still not guaranteed here because performing 64-bit
287   // operations (loading, storing, adding, etc.) on a 32-bit machine cannot be
288   // done atomically, but this at least reduces the odds of inconsistencies, at
289   // the cost of a few extra instructions.
290   volatile int64_t other_sum = other.meta_->sum;
291   other.meta_->sum -= other_sum;
292 #endif  // ARCH_CPU_64_BITS
293   HistogramBase::AtomicCount other_redundant_count =
294       subtle::NoBarrier_AtomicExchange(&other.meta_->redundant_count, 0);
295   IncreaseSumAndCount(other_sum, other_redundant_count);
296   std::unique_ptr<SampleCountIterator> it = other.ExtractingIterator();
297   bool success = AddSubtractImpl(it.get(), ADD);
298   DCHECK(success);
299 }
300 
IsDefinitelyEmpty() const301 bool HistogramSamples::IsDefinitelyEmpty() const {
302   return sum() == 0 && redundant_count() == 0;
303 }
304 
Serialize(Pickle * pickle) const305 void HistogramSamples::Serialize(Pickle* pickle) const {
306   pickle->WriteInt64(sum());
307   pickle->WriteInt(redundant_count());
308 
309   HistogramBase::Sample min;
310   int64_t max;
311   HistogramBase::Count count;
312   for (std::unique_ptr<SampleCountIterator> it = Iterator(); !it->Done();
313        it->Next()) {
314     it->Get(&min, &max, &count);
315     pickle->WriteInt(min);
316     pickle->WriteInt64(max);
317     pickle->WriteInt(count);
318   }
319 }
320 
AccumulateSingleSample(HistogramBase::Sample value,HistogramBase::Count count,size_t bucket)321 bool HistogramSamples::AccumulateSingleSample(HistogramBase::Sample value,
322                                               HistogramBase::Count count,
323                                               size_t bucket) {
324   if (single_sample().Accumulate(bucket, count)) {
325     // Success. Update the (separate) sum and redundant-count.
326     IncreaseSumAndCount(strict_cast<int64_t>(value) * count, count);
327     return true;
328   }
329   return false;
330 }
331 
IncreaseSumAndCount(int64_t sum,HistogramBase::Count count)332 void HistogramSamples::IncreaseSumAndCount(int64_t sum,
333                                            HistogramBase::Count count) {
334 #ifdef ARCH_CPU_64_BITS
335   subtle::NoBarrier_AtomicIncrement(&meta_->sum, sum);
336 #else
337   meta_->sum += sum;
338 #endif
339   subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, count);
340 }
341 
RecordNegativeSample(NegativeSampleReason reason,HistogramBase::Count increment)342 void HistogramSamples::RecordNegativeSample(NegativeSampleReason reason,
343                                             HistogramBase::Count increment) {
344   UMA_HISTOGRAM_ENUMERATION("UMA.NegativeSamples.Reason", reason,
345                             MAX_NEGATIVE_SAMPLE_REASONS);
346   UMA_HISTOGRAM_CUSTOM_COUNTS("UMA.NegativeSamples.Increment", increment, 1,
347                               1 << 30, 100);
348   UmaHistogramSparse("UMA.NegativeSamples.Histogram",
349                      static_cast<int32_t>(id()));
350 }
351 
ToGraphDict(std::string_view histogram_name,int32_t flags) const352 base::Value::Dict HistogramSamples::ToGraphDict(std::string_view histogram_name,
353                                                 int32_t flags) const {
354   base::Value::Dict dict;
355   dict.Set("name", histogram_name);
356   dict.Set("header", GetAsciiHeader(histogram_name, flags));
357   dict.Set("body", GetAsciiBody());
358   return dict;
359 }
360 
GetAsciiHeader(std::string_view histogram_name,int32_t flags) const361 std::string HistogramSamples::GetAsciiHeader(std::string_view histogram_name,
362                                              int32_t flags) const {
363   std::string output;
364   StrAppend(&output, {"Histogram: ", histogram_name, " recorded ",
365                       NumberToString(TotalCount()), " samples"});
366   if (flags)
367     StringAppendF(&output, " (flags = 0x%x)", flags);
368   return output;
369 }
370 
GetAsciiBody() const371 std::string HistogramSamples::GetAsciiBody() const {
372   HistogramBase::Count total_count = TotalCount();
373   double scaled_total_count = total_count / 100.0;
374 
375   // Determine how wide the largest bucket range is (how many digits to print),
376   // so that we'll be able to right-align starts for the graphical bars.
377   // Determine which bucket has the largest sample count so that we can
378   // normalize the graphical bar-width relative to that sample count.
379   HistogramBase::Count largest_count = 0;
380   HistogramBase::Sample largest_sample = 0;
381   std::unique_ptr<SampleCountIterator> it = Iterator();
382   while (!it->Done()) {
383     HistogramBase::Sample min;
384     int64_t max;
385     HistogramBase::Count count;
386     it->Get(&min, &max, &count);
387     if (min > largest_sample)
388       largest_sample = min;
389     if (count > largest_count)
390       largest_count = count;
391     it->Next();
392   }
393   // Scale histogram bucket counts to take at most 72 characters.
394   // Note: Keep in sync w/ kLineLength sample_vector.cc
395   const double kLineLength = 72;
396   double scaling_factor = 1;
397   if (largest_count > kLineLength)
398     scaling_factor = kLineLength / largest_count;
399   size_t print_width = GetSimpleAsciiBucketRange(largest_sample).size() + 1;
400 
401   // iterate over each item and display them
402   it = Iterator();
403   std::string output;
404   while (!it->Done()) {
405     HistogramBase::Sample min;
406     int64_t max;
407     HistogramBase::Count count;
408     it->Get(&min, &max, &count);
409 
410     // value is min, so display it
411     std::string range = GetSimpleAsciiBucketRange(min);
412     output.append(range);
413     if (const auto range_size = range.size(); print_width >= range_size) {
414       output.append(print_width + 1 - range_size, ' ');
415     }
416     HistogramBase::Count current_size = round(count * scaling_factor);
417     WriteAsciiBucketGraph(current_size, kLineLength, &output);
418     WriteAsciiBucketValue(count, scaled_total_count, &output);
419     output.append(1, '\n');
420     it->Next();
421   }
422   return output;
423 }
424 
425 // static
WriteAsciiBucketGraph(double x_count,int line_length,std::string * output)426 void HistogramSamples::WriteAsciiBucketGraph(double x_count,
427                                              int line_length,
428                                              std::string* output) {
429   output->reserve(ClampAdd(output->size(), ClampAdd(line_length, 1)));
430 
431   const size_t count = ClampRound<size_t>(x_count);
432   output->append(count, '-');
433   output->append(1, 'O');
434   if (const auto len = as_unsigned(line_length); count < len) {
435     output->append(len - count, ' ');
436   }
437 }
438 
WriteAsciiBucketValue(HistogramBase::Count current,double scaled_sum,std::string * output) const439 void HistogramSamples::WriteAsciiBucketValue(HistogramBase::Count current,
440                                              double scaled_sum,
441                                              std::string* output) const {
442   StringAppendF(output, " (%d = %3.1f%%)", current, current / scaled_sum);
443 }
444 
GetSimpleAsciiBucketRange(HistogramBase::Sample sample) const445 const std::string HistogramSamples::GetSimpleAsciiBucketRange(
446     HistogramBase::Sample sample) const {
447   return StringPrintf("%d", sample);
448 }
449 
450 SampleCountIterator::~SampleCountIterator() = default;
451 
GetBucketIndex(size_t * index) const452 bool SampleCountIterator::GetBucketIndex(size_t* index) const {
453   DCHECK(!Done());
454   return false;
455 }
456 
SingleSampleIterator(HistogramBase::Sample min,int64_t max,HistogramBase::Count count,size_t bucket_index,bool value_was_extracted)457 SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min,
458                                            int64_t max,
459                                            HistogramBase::Count count,
460                                            size_t bucket_index,
461                                            bool value_was_extracted)
462     : min_(min),
463       max_(max),
464       bucket_index_(bucket_index),
465       count_(count),
466       value_was_extracted_(value_was_extracted) {}
467 
~SingleSampleIterator()468 SingleSampleIterator::~SingleSampleIterator() {
469   // Because this object may have been instantiated in such a way that the
470   // samples it is holding were already extracted from the underlying data, we
471   // add a DCHECK to ensure that in those cases, users of this iterator read the
472   // samples, otherwise they may be lost.
473   DCHECK(!value_was_extracted_ || Done());
474 }
475 
Done() const476 bool SingleSampleIterator::Done() const {
477   return count_ == 0;
478 }
479 
Next()480 void SingleSampleIterator::Next() {
481   DCHECK(!Done());
482   count_ = 0;
483 }
484 
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)485 void SingleSampleIterator::Get(HistogramBase::Sample* min,
486                                int64_t* max,
487                                HistogramBase::Count* count) {
488   DCHECK(!Done());
489   *min = min_;
490   *max = max_;
491   *count = count_;
492 }
493 
GetBucketIndex(size_t * index) const494 bool SingleSampleIterator::GetBucketIndex(size_t* index) const {
495   DCHECK(!Done());
496   if (bucket_index_ == kSizeMax)
497     return false;
498   *index = bucket_index_;
499   return true;
500 }
501 
502 }  // namespace base
503