xref: /aosp_15_r20/art/libartbase/base/arena_allocator.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "arena_allocator-inl.h"
18 
19 
20 #include <algorithm>
21 #include <cstddef>
22 #include <iomanip>
23 #include <numeric>
24 
25 #include <android-base/logging.h>
26 
27 #include "mman.h"
28 
29 namespace art {
30 
31 template <bool kCount>
32 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
33   // Every name should have the same width and end with a space. Abbreviate if necessary:
34   "Misc         ",
35   "SwitchTbl    ",
36   "SlowPaths    ",
37   "GrowBitMap   ",
38   "STL          ",
39   "GraphBuilder ",
40   "Graph        ",
41   "BasicBlock   ",
42   "BlockList    ",
43   "RevPostOrder ",
44   "LinearOrder  ",
45   "Reachability ",
46   "ConstantsMap ",
47   "Predecessors ",
48   "Successors   ",
49   "Dominated    ",
50   "Instruction  ",
51   "CtorFenceIns ",
52   "InvokeInputs ",
53   "PhiInputs    ",
54   "TypeCheckIns ",
55   "LoopInfo     ",
56   "LIBackEdges  ",
57   "TryCatchInf  ",
58   "UseListNode  ",
59   "Environment  ",
60   "EnvLocations ",
61   "LocSummary   ",
62   "SsaBuilder   ",
63   "MoveOperands ",
64   "CodeBuffer   ",
65   "StackMaps    ",
66   "Optimization ",
67   "GVN          ",
68   "InductionVar ",
69   "BCE          ",
70   "DCE          ",
71   "LSA          ",
72   "LSE          ",
73   "CFRE         ",
74   "LICM         ",
75   "WBE          ",
76   "LoopOpt      ",
77   "SsaLiveness  ",
78   "SsaPhiElim   ",
79   "RefTypeProp  ",
80   "SelectGen    ",
81   "SideEffects  ",
82   "RegAllocator ",
83   "RegAllocVldt ",
84   "StackMapStm  ",
85   "BitTableBld  ",
86   "VectorNode   ",
87   "CodeGen      ",
88   "Assembler    ",
89   "ParallelMove ",
90   "GraphChecker ",
91   "Verifier     ",
92   "CallingConv  ",
93   "CHA          ",
94   "Scheduler    ",
95   "Profile      ",
96   "SBCloner     ",
97   "Transaction  ",
98 };
99 
100 template <bool kCount>
ArenaAllocatorStatsImpl()101 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
102     : num_allocations_(0u),
103       alloc_stats_(kNumArenaAllocKinds, 0u) {
104 }
105 
106 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)107 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
108   num_allocations_ = other.num_allocations_;
109   std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
110 }
111 
112 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)113 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
114   alloc_stats_[kind] += bytes;
115   ++num_allocations_;
116 }
117 
118 template <bool kCount>
NumAllocations() const119 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
120   return num_allocations_;
121 }
122 
123 template <bool kCount>
BytesAllocated() const124 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
125   const size_t init = 0u;  // Initial value of the correct type.
126   return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
127 }
128 
129 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const130 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
131                                            ssize_t lost_bytes_adjustment) const {
132   size_t malloc_bytes = 0u;
133   size_t lost_bytes = 0u;
134   size_t num_arenas = 0u;
135   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
136     malloc_bytes += arena->Size();
137     lost_bytes += arena->RemainingSpace();
138     ++num_arenas;
139   }
140   // The lost_bytes_adjustment is used to make up for the fact that the current arena
141   // may not have the bytes_allocated_ updated correctly.
142   lost_bytes += lost_bytes_adjustment;
143   const size_t bytes_allocated = BytesAllocated();
144   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
145      << ", lost: " << lost_bytes << "\n";
146   size_t num_allocations = NumAllocations();
147   if (num_allocations != 0) {
148     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
149        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
150   }
151   os << "===== Allocation by kind\n";
152   static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
153   for (int i = 0; i < kNumArenaAllocKinds; i++) {
154     // Reduce output by listing only allocation kinds that actually have allocations.
155     if (alloc_stats_[i] != 0u) {
156       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
157     }
158   }
159 }
160 
161 #pragma GCC diagnostic push
162 #pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
163 // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
164 // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
165 // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
166 // but requires the warning "-Winstantiation-after-specialization" to be turned off.
167 //
168 // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
169 // (but keep the unnecessary code out of release builds) as we do not usually compile with
170 // kArenaAllocatorCountAllocations set to true.
171 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
172 #pragma GCC diagnostic pop
173 
DoMakeDefined(void * ptr,size_t size)174 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
175   MEMORY_TOOL_MAKE_DEFINED(ptr, size);
176 }
177 
DoMakeUndefined(void * ptr,size_t size)178 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
179   MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
180 }
181 
DoMakeInaccessible(void * ptr,size_t size)182 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
183   MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
184 }
185 
BytesAllocated() const186 size_t ArenaAllocator::BytesAllocated() const {
187   return ArenaAllocatorStats::BytesAllocated();
188 }
189 
BytesUsed() const190 size_t ArenaAllocator::BytesUsed() const {
191   size_t total = ptr_ - begin_;
192   if (arena_head_ != nullptr) {
193     for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
194          cur_arena = cur_arena->next_) {
195      total += cur_arena->GetBytesAllocated();
196     }
197   }
198   return total;
199 }
200 
ArenaAllocator(ArenaPool * pool)201 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
202   : pool_(pool),
203     begin_(nullptr),
204     end_(nullptr),
205     ptr_(nullptr),
206     arena_head_(nullptr) {
207 }
208 
UpdateBytesAllocated()209 void ArenaAllocator::UpdateBytesAllocated() {
210   if (arena_head_ != nullptr) {
211     // Update how many bytes we have allocated into the arena so that the arena pool knows how
212     // much memory to zero out.
213     arena_head_->bytes_allocated_ = ptr_ - begin_;
214   }
215 }
216 
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)217 void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
218   // We mark all memory for a newly retrieved arena as inaccessible and then
219   // mark only the actually allocated memory as defined. That leaves red zones
220   // and padding between allocations marked as inaccessible.
221   size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
222   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
223   uint8_t* ret;
224   if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
225     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
226   } else {
227     ret = ptr_;
228     ptr_ += rounded_bytes;
229   }
230   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
231   // Check that the memory is already zeroed out.
232   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
233   return ret;
234 }
235 
AllocWithMemoryToolAlign16(size_t bytes,ArenaAllocKind kind)236 void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
237   // We mark all memory for a newly retrieved arena as inaccessible and then
238   // mark only the actually allocated memory as defined. That leaves red zones
239   // and padding between allocations marked as inaccessible.
240   size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
241   DCHECK_ALIGNED(rounded_bytes, 8);  // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
242   uintptr_t padding =
243       RoundUp(reinterpret_cast<uintptr_t>(ptr_), 16) - reinterpret_cast<uintptr_t>(ptr_);
244   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
245   uint8_t* ret;
246   if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
247     static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
248     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
249   } else {
250     ptr_ += padding;  // Leave padding inaccessible.
251     ret = ptr_;
252     ptr_ += rounded_bytes;
253   }
254   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
255   // Check that the memory is already zeroed out.
256   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
257   return ret;
258 }
259 
~ArenaAllocator()260 ArenaAllocator::~ArenaAllocator() {
261   // Reclaim all the arenas by giving them back to the thread pool.
262   UpdateBytesAllocated();
263   pool_->FreeArenaChain(arena_head_);
264 }
265 
ResetCurrentArena()266 void ArenaAllocator::ResetCurrentArena() {
267   UpdateBytesAllocated();
268   begin_ = nullptr;
269   ptr_ = nullptr;
270   end_ = nullptr;
271 }
272 
AllocFromNewArena(size_t bytes)273 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
274   Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
275   DCHECK(new_arena != nullptr);
276   DCHECK_LE(bytes, new_arena->Size());
277   if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
278     // The old arena has more space remaining than the new one, so keep using it.
279     // This can happen when the requested size is over half of the default size.
280     DCHECK(arena_head_ != nullptr);
281     new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
282     new_arena->next_ = arena_head_->next_;
283     arena_head_->next_ = new_arena;
284   } else {
285     UpdateBytesAllocated();
286     new_arena->next_ = arena_head_;
287     arena_head_ = new_arena;
288     // Update our internal data structures.
289     begin_ = new_arena->Begin();
290     DCHECK_ALIGNED(begin_, kAlignment);
291     ptr_ = begin_ + bytes;
292     end_ = new_arena->End();
293   }
294   return new_arena->Begin();
295 }
296 
AllocFromNewArenaWithMemoryTool(size_t bytes)297 uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
298   uint8_t* ret = AllocFromNewArena(bytes);
299   uint8_t* noaccess_begin = ret + bytes;
300   uint8_t* noaccess_end;
301   if (ret == arena_head_->Begin()) {
302     DCHECK(ptr_ - bytes == ret);
303     noaccess_end = end_;
304   } else {
305     // We're still using the old arena but `ret` comes from a new one just after it.
306     DCHECK(arena_head_->next_ != nullptr);
307     DCHECK(ret == arena_head_->next_->Begin());
308     DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
309     noaccess_end = arena_head_->next_->End();
310   }
311   MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
312   return ret;
313 }
314 
Contains(const void * ptr) const315 bool ArenaAllocator::Contains(const void* ptr) const {
316   if (ptr >= begin_ && ptr < end_) {
317     return true;
318   }
319   for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
320     if (cur_arena->Contains(ptr)) {
321       return true;
322     }
323   }
324   return false;
325 }
326 
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)327 MemStats::MemStats(const char* name,
328                    const ArenaAllocatorStats* stats,
329                    const Arena* first_arena,
330                    ssize_t lost_bytes_adjustment)
331     : name_(name),
332       stats_(stats),
333       first_arena_(first_arena),
334       lost_bytes_adjustment_(lost_bytes_adjustment) {
335 }
336 
Dump(std::ostream & os) const337 void MemStats::Dump(std::ostream& os) const {
338   os << name_ << " stats:\n";
339   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
340 }
341 
342 // Dump memory usage stats.
GetMemStats() const343 MemStats ArenaAllocator::GetMemStats() const {
344   ssize_t lost_bytes_adjustment =
345       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
346   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
347 }
348 
349 }  // namespace art
350