xref: /aosp_15_r20/art/libartbase/base/arena_allocator.h (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
18 #define ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
19 
20 #include <stddef.h>
21 #include <stdint.h>
22 
23 #include "bit_utils.h"
24 #include "debug_stack.h"
25 #include "dchecked_vector.h"
26 #include "macros.h"
27 #include "memory_tool.h"
28 
29 namespace art {
30 
31 class Arena;
32 class ArenaPool;
33 class ArenaAllocator;
34 class ArenaStack;
35 class ScopedArenaAllocator;
36 class MemStats;
37 
38 template <typename T>
39 class ArenaAllocatorAdapter;
40 
41 static constexpr bool kArenaAllocatorCountAllocations = false;
42 
43 // Type of allocation for memory tuning.
44 enum ArenaAllocKind {
45   kArenaAllocMisc,
46   kArenaAllocSwitchTable,
47   kArenaAllocSlowPaths,
48   kArenaAllocGrowableBitMap,
49   kArenaAllocSTL,
50   kArenaAllocGraphBuilder,
51   kArenaAllocGraph,
52   kArenaAllocBasicBlock,
53   kArenaAllocBlockList,
54   kArenaAllocReversePostOrder,
55   kArenaAllocLinearOrder,
56   kArenaAllocReachabilityGraph,
57   kArenaAllocConstantsMap,
58   kArenaAllocPredecessors,
59   kArenaAllocSuccessors,
60   kArenaAllocDominated,
61   kArenaAllocInstruction,
62   kArenaAllocConstructorFenceInputs,
63   kArenaAllocInvokeInputs,
64   kArenaAllocPhiInputs,
65   kArenaAllocTypeCheckInputs,
66   kArenaAllocLoopInfo,
67   kArenaAllocLoopInfoBackEdges,
68   kArenaAllocTryCatchInfo,
69   kArenaAllocUseListNode,
70   kArenaAllocEnvironment,
71   kArenaAllocEnvironmentLocations,
72   kArenaAllocLocationSummary,
73   kArenaAllocSsaBuilder,
74   kArenaAllocMoveOperands,
75   kArenaAllocCodeBuffer,
76   kArenaAllocStackMaps,
77   kArenaAllocOptimization,
78   kArenaAllocGvn,
79   kArenaAllocInductionVarAnalysis,
80   kArenaAllocBoundsCheckElimination,
81   kArenaAllocDCE,
82   kArenaAllocLSA,
83   kArenaAllocLSE,
84   kArenaAllocCFRE,
85   kArenaAllocLICM,
86   kArenaAllocWBE,
87   kArenaAllocLoopOptimization,
88   kArenaAllocSsaLiveness,
89   kArenaAllocSsaPhiElimination,
90   kArenaAllocReferenceTypePropagation,
91   kArenaAllocSelectGenerator,
92   kArenaAllocSideEffectsAnalysis,
93   kArenaAllocRegisterAllocator,
94   kArenaAllocRegisterAllocatorValidate,
95   kArenaAllocStackMapStream,
96   kArenaAllocBitTableBuilder,
97   kArenaAllocVectorNode,
98   kArenaAllocCodeGenerator,
99   kArenaAllocAssembler,
100   kArenaAllocParallelMoveResolver,
101   kArenaAllocGraphChecker,
102   kArenaAllocVerifier,
103   kArenaAllocCallingConvention,
104   kArenaAllocCHA,
105   kArenaAllocScheduler,
106   kArenaAllocProfile,
107   kArenaAllocSuperblockCloner,
108   kArenaAllocTransaction,
109   kNumArenaAllocKinds
110 };
111 
112 template <bool kCount>
113 class ArenaAllocatorStatsImpl;
114 
115 template <>
116 class ArenaAllocatorStatsImpl<false> {
117  public:
118   ArenaAllocatorStatsImpl() = default;
119   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
120   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
121 
Copy(const ArenaAllocatorStatsImpl & other)122   void Copy([[maybe_unused]] const ArenaAllocatorStatsImpl& other) {}
RecordAlloc(size_t bytes,ArenaAllocKind kind)123   void RecordAlloc([[maybe_unused]] size_t bytes, [[maybe_unused]] ArenaAllocKind kind) {}
NumAllocations()124   size_t NumAllocations() const { return 0u; }
BytesAllocated()125   size_t BytesAllocated() const { return 0u; }
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment)126   void Dump([[maybe_unused]] std::ostream& os,
127             [[maybe_unused]] const Arena* first,
128             [[maybe_unused]] ssize_t lost_bytes_adjustment) const {}
129 };
130 
131 template <bool kCount>
132 class ArenaAllocatorStatsImpl {
133  public:
134   ArenaAllocatorStatsImpl();
135   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
136   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
137 
138   void Copy(const ArenaAllocatorStatsImpl& other);
139   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
140   size_t NumAllocations() const;
141   size_t BytesAllocated() const;
142   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
143 
144  private:
145   size_t num_allocations_;
146   dchecked_vector<size_t> alloc_stats_;  // Bytes used by various allocation kinds.
147 
148   static const char* const kAllocNames[];
149 };
150 
151 using ArenaAllocatorStats = ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
152 
153 class ArenaAllocatorMemoryTool {
154  public:
IsRunningOnMemoryTool()155   static constexpr bool IsRunningOnMemoryTool() { return kMemoryToolIsAvailable; }
156 
MakeDefined(void * ptr,size_t size)157   void MakeDefined(void* ptr, size_t size) {
158     if (UNLIKELY(IsRunningOnMemoryTool())) {
159       DoMakeDefined(ptr, size);
160     }
161   }
MakeUndefined(void * ptr,size_t size)162   void MakeUndefined(void* ptr, size_t size) {
163     if (UNLIKELY(IsRunningOnMemoryTool())) {
164       DoMakeUndefined(ptr, size);
165     }
166   }
MakeInaccessible(void * ptr,size_t size)167   void MakeInaccessible(void* ptr, size_t size) {
168     if (UNLIKELY(IsRunningOnMemoryTool())) {
169       DoMakeInaccessible(ptr, size);
170     }
171   }
172 
173  private:
174   void DoMakeDefined(void* ptr, size_t size);
175   void DoMakeUndefined(void* ptr, size_t size);
176   void DoMakeInaccessible(void* ptr, size_t size);
177 };
178 
179 class Arena {
180  public:
Arena()181   Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) {}
182 
~Arena()183   virtual ~Arena() { }
184   // Reset is for pre-use and uses memset for performance.
185   void Reset();
186   // Release is used inbetween uses and uses madvise for memory usage.
Release()187   virtual void Release() { }
Begin()188   uint8_t* Begin() const {
189     return memory_;
190   }
191 
End()192   uint8_t* End() const { return memory_ + size_; }
193 
Size()194   size_t Size() const {
195     return size_;
196   }
197 
RemainingSpace()198   size_t RemainingSpace() const {
199     return Size() - bytes_allocated_;
200   }
201 
GetBytesAllocated()202   size_t GetBytesAllocated() const {
203     return bytes_allocated_;
204   }
205 
206   // Return true if ptr is contained in the arena.
Contains(const void * ptr)207   bool Contains(const void* ptr) const { return memory_ <= ptr && ptr < memory_ + size_; }
208 
Next()209   Arena* Next() const { return next_; }
210 
211  protected:
212   size_t bytes_allocated_;
213   uint8_t* memory_;
214   size_t size_;
215   Arena* next_;
216   friend class MallocArenaPool;
217   friend class MemMapArenaPool;
218   friend class ArenaAllocator;
219   friend class ArenaStack;
220   friend class ScopedArenaAllocator;
221   template <bool kCount> friend class ArenaAllocatorStatsImpl;
222 
223   friend class ArenaAllocatorTest;
224 
225  private:
226   DISALLOW_COPY_AND_ASSIGN(Arena);
227 };
228 
229 class ArenaPool {
230  public:
231   virtual ~ArenaPool() = default;
232 
233   virtual Arena* AllocArena(size_t size) = 0;
234   virtual void FreeArenaChain(Arena* first) = 0;
235   virtual size_t GetBytesAllocated() const = 0;
236   virtual void ReclaimMemory() = 0;
237   virtual void LockReclaimMemory() = 0;
238   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage.
239   virtual void TrimMaps() = 0;
240 
241  protected:
242   ArenaPool() = default;
243 
244  private:
245   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
246 };
247 
248 // Fast single-threaded allocator for zero-initialized memory chunks.
249 //
250 // Memory is allocated from ArenaPool in large chunks and then rationed through
251 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
252 // is destroyed.
253 class ArenaAllocator
254     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
255  public:
256   explicit ArenaAllocator(ArenaPool* pool);
257   ~ArenaAllocator();
258 
259   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
260   using ArenaAllocatorMemoryTool::MakeDefined;
261   using ArenaAllocatorMemoryTool::MakeUndefined;
262   using ArenaAllocatorMemoryTool::MakeInaccessible;
263 
264   // Get adapter for use in STL containers. See arena_containers.h .
265   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
266 
267   // Returns zeroed memory.
268   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
269     if (UNLIKELY(IsRunningOnMemoryTool())) {
270       return AllocWithMemoryTool(bytes, kind);
271     }
272     bytes = RoundUp(bytes, kAlignment);
273     ArenaAllocatorStats::RecordAlloc(bytes, kind);
274     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
275       return AllocFromNewArena(bytes);
276     }
277     uint8_t* ret = ptr_;
278     DCHECK_ALIGNED(ret, kAlignment);
279     ptr_ += bytes;
280     return ret;
281   }
282 
283   // Returns zeroed memory.
284   void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
285     // It is an error to request 16-byte aligned allocation of unaligned size.
286     DCHECK_ALIGNED(bytes, 16);
287     if (UNLIKELY(IsRunningOnMemoryTool())) {
288       return AllocWithMemoryToolAlign16(bytes, kind);
289     }
290     uintptr_t padding =
291         RoundUp(reinterpret_cast<uintptr_t>(ptr_), 16) - reinterpret_cast<uintptr_t>(ptr_);
292     ArenaAllocatorStats::RecordAlloc(bytes, kind);
293     if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
294       static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
295       return AllocFromNewArena(bytes);
296     }
297     ptr_ += padding;
298     uint8_t* ret = ptr_;
299     DCHECK_ALIGNED(ret, 16);
300     ptr_ += bytes;
301     return ret;
302   }
303 
304   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
305   void* Realloc(void* ptr,
306                 size_t ptr_size,
307                 size_t new_size,
308                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
309     DCHECK_GE(new_size, ptr_size);
310     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
311     // We always allocate aligned.
312     const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
313     auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
314     // If we haven't allocated anything else, we can safely extend.
315     if (end == ptr_) {
316       // Red zone prevents end == ptr_ (unless input = allocator state = null).
317       DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
318       const size_t aligned_new_size = RoundUp(new_size, kAlignment);
319       const size_t size_delta = aligned_new_size - aligned_ptr_size;
320       // Check remain space.
321       const size_t remain = end_ - ptr_;
322       if (remain >= size_delta) {
323         ptr_ += size_delta;
324         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
325         DCHECK_ALIGNED(ptr_, kAlignment);
326         return ptr;
327       }
328     }
329     auto* new_ptr = Alloc(new_size, kind);  // Note: Alloc will take care of aligning new_size.
330     memcpy(new_ptr, ptr, ptr_size);
331     // TODO: Call free on ptr if linear alloc supports free.
332     return new_ptr;
333   }
334 
335   template <typename T>
336   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
337     return AllocArray<T>(1, kind);
338   }
339 
340   template <typename T>
341   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
342     return static_cast<T*>(Alloc(length * sizeof(T), kind));
343   }
344 
345   size_t BytesAllocated() const;
346 
347   MemStats GetMemStats() const;
348 
349   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
350   // TODO: Change BytesAllocated to this behavior?
351   size_t BytesUsed() const;
352 
GetArenaPool()353   ArenaPool* GetArenaPool() const {
354     return pool_;
355   }
356 
GetHeadArena()357   Arena* GetHeadArena() const {
358     return arena_head_;
359   }
360 
CurrentPtr()361   uint8_t* CurrentPtr() const {
362     return ptr_;
363   }
364 
CurrentArenaUnusedBytes()365   size_t CurrentArenaUnusedBytes() const {
366     DCHECK_LE(ptr_, end_);
367     return end_ - ptr_;
368   }
369   // Resets the current arena in use, which will force us to get a new arena
370   // on next allocation.
371   void ResetCurrentArena();
372 
373   bool Contains(const void* ptr) const;
374 
375   // The alignment guaranteed for individual allocations.
376   static constexpr size_t kAlignment = 8u;
377 
378   // The alignment required for the whole Arena rather than individual allocations.
379   static constexpr size_t kArenaAlignment = 16u;
380 
381   // Extra bytes required by the memory tool.
382   static constexpr size_t kMemoryToolRedZoneBytes = 8u;
383 
384  private:
385   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
386   void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
387   uint8_t* AllocFromNewArena(size_t bytes);
388   uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
389 
390   void UpdateBytesAllocated();
391 
392   ArenaPool* pool_;
393   uint8_t* begin_;
394   uint8_t* end_;
395   uint8_t* ptr_;
396   Arena* arena_head_;
397 
398   template <typename U>
399   friend class ArenaAllocatorAdapter;
400 
401   friend class ArenaAllocatorTest;
402 
403   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
404 };  // ArenaAllocator
405 
406 class MemStats {
407  public:
408   MemStats(const char* name,
409            const ArenaAllocatorStats* stats,
410            const Arena* first_arena,
411            ssize_t lost_bytes_adjustment = 0);
412   void Dump(std::ostream& os) const;
413 
414  private:
415   const char* const name_;
416   const ArenaAllocatorStats* const stats_;
417   const Arena* const first_arena_;
418   const ssize_t lost_bytes_adjustment_;
419 };  // MemStats
420 
421 }  // namespace art
422 
423 #endif  // ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
424