xref: /aosp_15_r20/external/cronet/base/trace_event/heap_profiler_allocation_context.cc (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1 // Copyright 2015 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/trace_event/heap_profiler_allocation_context.h"
6 
7 #include <algorithm>
8 #include <cstring>
9 
10 #include "base/containers/span.h"
11 #include "base/hash/hash.h"
12 
13 namespace base {
14 namespace trace_event {
15 
operator <(const StackFrame & lhs,const StackFrame & rhs)16 bool operator < (const StackFrame& lhs, const StackFrame& rhs) {
17   return lhs.value < rhs.value;
18 }
19 
operator ==(const StackFrame & lhs,const StackFrame & rhs)20 bool operator == (const StackFrame& lhs, const StackFrame& rhs) {
21   return lhs.value == rhs.value;
22 }
23 
operator !=(const StackFrame & lhs,const StackFrame & rhs)24 bool operator != (const StackFrame& lhs, const StackFrame& rhs) {
25   return !(lhs.value == rhs.value);
26 }
27 
28 Backtrace::Backtrace() = default;
29 
operator ==(const Backtrace & lhs,const Backtrace & rhs)30 bool operator==(const Backtrace& lhs, const Backtrace& rhs) {
31   if (lhs.frame_count != rhs.frame_count) return false;
32   return std::equal(lhs.frames, lhs.frames + lhs.frame_count, rhs.frames);
33 }
34 
operator !=(const Backtrace & lhs,const Backtrace & rhs)35 bool operator!=(const Backtrace& lhs, const Backtrace& rhs) {
36   return !(lhs == rhs);
37 }
38 
AllocationContext()39 AllocationContext::AllocationContext(): type_name(nullptr) {}
40 
AllocationContext(const Backtrace & backtrace,const char * type_name)41 AllocationContext::AllocationContext(const Backtrace& backtrace,
42                                      const char* type_name)
43   : backtrace(backtrace), type_name(type_name) {}
44 
operator ==(const AllocationContext & lhs,const AllocationContext & rhs)45 bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) {
46   return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name);
47 }
48 
operator !=(const AllocationContext & lhs,const AllocationContext & rhs)49 bool operator!=(const AllocationContext& lhs, const AllocationContext& rhs) {
50   return !(lhs == rhs);
51 }
52 
53 }  // namespace trace_event
54 }  // namespace base
55 
56 namespace std {
57 
58 using base::trace_event::AllocationContext;
59 using base::trace_event::Backtrace;
60 using base::trace_event::StackFrame;
61 
operator ()(const StackFrame & frame) const62 size_t hash<StackFrame>::operator()(const StackFrame& frame) const {
63   return hash<const void*>()(frame.value.get());
64 }
65 
operator ()(const Backtrace & backtrace) const66 size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const {
67   const void* values[Backtrace::kMaxFrameCount];
68   for (size_t i = 0; i != backtrace.frame_count; ++i) {
69     values[i] = backtrace.frames[i].value;
70   }
71   auto values_span = base::make_span(values).first(backtrace.frame_count);
72   return base::PersistentHash(base::as_bytes(values_span));
73 }
74 
operator ()(const AllocationContext & ctx) const75 size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const {
76   size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace);
77 
78   // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits,
79   // because the magic number is a prime very close to 2^32 / golden ratio, but
80   // will still redistribute keys bijectively on 64-bit architectures because
81   // the magic number is coprime to 2^64.
82   size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761;
83 
84   // Multiply one side to break the commutativity of +. Multiplication with a
85   // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so
86   // randomness is preserved.
87   return (backtrace_hash * 3) + type_hash;
88 }
89 
90 }  // namespace std
91