1 // Copyright 2023 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef PARTITION_ALLOC_PARTITION_FREELIST_ENTRY_H_
6 #define PARTITION_ALLOC_PARTITION_FREELIST_ENTRY_H_
7
8 #include <cstddef>
9
10 #include "partition_alloc/partition_alloc_base/bits.h"
11 #include "partition_alloc/partition_alloc_base/compiler_specific.h"
12 #include "partition_alloc/partition_alloc_base/component_export.h"
13 #include "partition_alloc/partition_alloc_base/no_destructor.h"
14 #include "partition_alloc/partition_alloc_buildflags.h"
15 #include "partition_alloc/partition_alloc_constants.h"
16
17 namespace partition_alloc::internal {
18
19 [[noreturn]] PA_NOINLINE PA_COMPONENT_EXPORT(
20 PARTITION_ALLOC) void FreelistCorruptionDetected(size_t slot_size);
21
22 } // namespace partition_alloc::internal
23
24 #include "partition_alloc/encoded_next_freelist.h" // IWYU pragma: export
25
26 // PA defaults to a freelist whose "next" links are encoded pointers.
27 // We are assessing an alternate implementation using an alternate
28 // encoding (pool offsets). When build support is enabled, the
29 // freelist implementation is determined at runtime.
30 #if BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
31 #include "partition_alloc/pool_offset_freelist.h" // IWYU pragma: export
32 #endif // BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
33
34 namespace partition_alloc::internal {
35
36 // Assertions that are agnostic to the implementation of the freelist.
37
38 static_assert(kSmallestBucket >= sizeof(EncodedNextFreelistEntry),
39 "Need enough space for freelist entries in the smallest slot");
40 #if BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
41 static_assert(kSmallestBucket >= sizeof(PoolOffsetFreelistEntry),
42 "Need enough space for freelist entries in the smallest slot");
43 #endif // BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
44
45 enum class PartitionFreelistEncoding {
46 kEncodedFreeList,
47 kPoolOffsetFreeList,
48 };
49
50 #if BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
51 union PartitionFreelistEntry {
52 EncodedNextFreelistEntry encoded_entry_;
53 PoolOffsetFreelistEntry pool_offset_entry_;
54 };
55 #else
56 using PartitionFreelistEntry = EncodedNextFreelistEntry;
57 #endif // USE_FREELIST_POOL_OFFSETS
58
59 #if BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
60 static_assert(offsetof(PartitionFreelistEntry, encoded_entry_) == 0ull);
61 static_assert(offsetof(PartitionFreelistEntry, pool_offset_entry_) == 0ull);
62 #endif // BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
63
64 struct PartitionFreelistDispatcher {
65 #if BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
66 static const PartitionFreelistDispatcher* Create(
67 PartitionFreelistEncoding encoding);
68
69 PA_ALWAYS_INLINE virtual PartitionFreelistEntry* EmplaceAndInitNull(
70 void* slot_start_tagged) const = 0;
71 PA_ALWAYS_INLINE virtual PartitionFreelistEntry* EmplaceAndInitNull(
72 uintptr_t slot_start) const = 0;
73 PA_ALWAYS_INLINE virtual PartitionFreelistEntry* EmplaceAndInitForThreadCache(
74 uintptr_t slot_start,
75 PartitionFreelistEntry* next) const = 0;
76 PA_ALWAYS_INLINE virtual void EmplaceAndInitForTest(
77 uintptr_t slot_start,
78 void* next,
79 bool make_shadow_match) const = 0;
80 PA_ALWAYS_INLINE virtual void CorruptNextForTesting(
81 PartitionFreelistEntry* entry,
82 uintptr_t v) const = 0;
83 PA_ALWAYS_INLINE virtual PartitionFreelistEntry* GetNextForThreadCacheTrue(
84 PartitionFreelistEntry* entry,
85 size_t slot_size) const = 0;
86 PA_ALWAYS_INLINE virtual PartitionFreelistEntry* GetNextForThreadCacheFalse(
87 PartitionFreelistEntry* entry,
88 size_t slot_size) const = 0;
89 PA_ALWAYS_INLINE virtual PartitionFreelistEntry* GetNextForThreadCacheBool(
90 PartitionFreelistEntry* entry,
91 bool crash_on_corruption,
92 size_t slot_size) const = 0;
93 PA_ALWAYS_INLINE virtual PartitionFreelistEntry* GetNext(
94 PartitionFreelistEntry* entry,
95 size_t slot_size) const = 0;
96 PA_NOINLINE virtual void CheckFreeList(PartitionFreelistEntry* entry,
97 size_t slot_size) const = 0;
98 PA_NOINLINE virtual void CheckFreeListForThreadCache(
99 PartitionFreelistEntry* entry,
100 size_t slot_size) const = 0;
101 PA_ALWAYS_INLINE virtual void SetNext(PartitionFreelistEntry* entry,
102 PartitionFreelistEntry* next) const = 0;
103 PA_ALWAYS_INLINE virtual uintptr_t ClearForAllocation(
104 PartitionFreelistEntry* entry) const = 0;
105 PA_ALWAYS_INLINE virtual constexpr bool IsEncodedNextPtrZero(
106 PartitionFreelistEntry* entry) const = 0;
107
108 virtual ~PartitionFreelistDispatcher() = default;
109 #else
110 static const PartitionFreelistDispatcher* Create(
111 PartitionFreelistEncoding encoding) {
112 static constinit PartitionFreelistDispatcher dispatcher =
113 PartitionFreelistDispatcher();
114 return &dispatcher;
115 }
116
117 PA_ALWAYS_INLINE PartitionFreelistEntry* EmplaceAndInitNull(
118 void* slot_start_tagged) const {
119 return reinterpret_cast<PartitionFreelistEntry*>(
120 EncodedNextFreelistEntry::EmplaceAndInitNull(slot_start_tagged));
121 }
122
123 PA_ALWAYS_INLINE PartitionFreelistEntry* EmplaceAndInitNull(
124 uintptr_t slot_start) const {
125 return reinterpret_cast<PartitionFreelistEntry*>(
126 EncodedNextFreelistEntry::EmplaceAndInitNull(slot_start));
127 }
128
129 PA_ALWAYS_INLINE PartitionFreelistEntry* EmplaceAndInitForThreadCache(
130 uintptr_t slot_start,
131 PartitionFreelistEntry* next) const {
132 return reinterpret_cast<PartitionFreelistEntry*>(
133 EncodedNextFreelistEntry::EmplaceAndInitForThreadCache(slot_start,
134 next));
135 }
136
137 PA_ALWAYS_INLINE void EmplaceAndInitForTest(uintptr_t slot_start,
138 void* next,
139 bool make_shadow_match) const {
140 return EncodedNextFreelistEntry::EmplaceAndInitForTest(slot_start, next,
141 make_shadow_match);
142 }
143
144 PA_ALWAYS_INLINE void CorruptNextForTesting(PartitionFreelistEntry* entry,
145 uintptr_t v) const {
146 return entry->CorruptNextForTesting(v);
147 }
148
149 template <bool crash_on_corruption>
150 PA_ALWAYS_INLINE PartitionFreelistEntry* GetNextForThreadCache(
151 PartitionFreelistEntry* entry,
152 size_t slot_size) const {
153 return reinterpret_cast<PartitionFreelistEntry*>(
154 entry->GetNextForThreadCache<crash_on_corruption>(slot_size));
155 }
156
157 PA_ALWAYS_INLINE PartitionFreelistEntry* GetNext(
158 PartitionFreelistEntry* entry,
159 size_t slot_size) const {
160 return reinterpret_cast<PartitionFreelistEntry*>(entry->GetNext(slot_size));
161 }
162
163 PA_NOINLINE void CheckFreeList(PartitionFreelistEntry* entry,
164 size_t slot_size) const {
165 return entry->CheckFreeList(slot_size);
166 }
167
168 PA_NOINLINE void CheckFreeListForThreadCache(PartitionFreelistEntry* entry,
169 size_t slot_size) const {
170 return entry->CheckFreeListForThreadCache(slot_size);
171 }
172
173 PA_ALWAYS_INLINE void SetNext(PartitionFreelistEntry* entry,
174 PartitionFreelistEntry* next) const {
175 return entry->SetNext(next);
176 }
177
178 PA_ALWAYS_INLINE uintptr_t
179 ClearForAllocation(PartitionFreelistEntry* entry) const {
180 return entry->ClearForAllocation();
181 }
182
183 PA_ALWAYS_INLINE constexpr bool IsEncodedNextPtrZero(
184 PartitionFreelistEntry* entry) const {
185 return entry->IsEncodedNextPtrZero();
186 }
187
188 ~PartitionFreelistDispatcher() = default;
189 #endif // BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
190 };
191
192 #if BUILDFLAG(USE_FREELIST_POOL_OFFSETS)
193 template <PartitionFreelistEncoding encoding>
194 struct PartitionFreelistDispatcherImpl : PartitionFreelistDispatcher {
195 using Entry =
196 std::conditional_t<encoding ==
197 PartitionFreelistEncoding::kEncodedFreeList,
198 EncodedNextFreelistEntry,
199 PoolOffsetFreelistEntry>;
200
201 // `entry` can be passed in as `nullptr`
GetEntryImplPartitionFreelistDispatcherImpl202 Entry* GetEntryImpl(PartitionFreelistEntry* entry) const {
203 return reinterpret_cast<Entry*>(entry);
204 }
205
EmplaceAndInitNullPartitionFreelistDispatcherImpl206 PA_ALWAYS_INLINE PartitionFreelistEntry* EmplaceAndInitNull(
207 void* slot_start_tagged) const override {
208 return reinterpret_cast<PartitionFreelistEntry*>(
209 Entry::EmplaceAndInitNull(slot_start_tagged));
210 }
211
EmplaceAndInitNullPartitionFreelistDispatcherImpl212 PA_ALWAYS_INLINE PartitionFreelistEntry* EmplaceAndInitNull(
213 uintptr_t slot_start) const override {
214 return reinterpret_cast<PartitionFreelistEntry*>(
215 Entry::EmplaceAndInitNull(slot_start));
216 }
217
218 // `next` can be passed in as `nullptr`
EmplaceAndInitForThreadCachePartitionFreelistDispatcherImpl219 PA_ALWAYS_INLINE PartitionFreelistEntry* EmplaceAndInitForThreadCache(
220 uintptr_t slot_start,
221 PartitionFreelistEntry* next) const override {
222 return reinterpret_cast<PartitionFreelistEntry*>(
223 Entry::EmplaceAndInitForThreadCache(slot_start, GetEntryImpl(next)));
224 }
225
EmplaceAndInitForTestPartitionFreelistDispatcherImpl226 PA_ALWAYS_INLINE void EmplaceAndInitForTest(
227 uintptr_t slot_start,
228 void* next,
229 bool make_shadow_match) const override {
230 return Entry::EmplaceAndInitForTest(slot_start, next, make_shadow_match);
231 }
232
CorruptNextForTestingPartitionFreelistDispatcherImpl233 PA_ALWAYS_INLINE void CorruptNextForTesting(PartitionFreelistEntry* entry,
234 uintptr_t v) const override {
235 return GetEntryImpl(entry)->CorruptNextForTesting(v);
236 }
237
GetNextForThreadCacheTruePartitionFreelistDispatcherImpl238 PA_ALWAYS_INLINE PartitionFreelistEntry* GetNextForThreadCacheTrue(
239 PartitionFreelistEntry* entry,
240 size_t slot_size) const override {
241 return reinterpret_cast<PartitionFreelistEntry*>(
242 GetEntryImpl(entry)->template GetNextForThreadCache<true>(slot_size));
243 }
244
GetNextForThreadCacheFalsePartitionFreelistDispatcherImpl245 PA_ALWAYS_INLINE PartitionFreelistEntry* GetNextForThreadCacheFalse(
246 PartitionFreelistEntry* entry,
247 size_t slot_size) const override {
248 return reinterpret_cast<PartitionFreelistEntry*>(
249 GetEntryImpl(entry)->template GetNextForThreadCache<false>(slot_size));
250 }
251
GetNextForThreadCacheBoolPartitionFreelistDispatcherImpl252 PA_ALWAYS_INLINE PartitionFreelistEntry* GetNextForThreadCacheBool(
253 PartitionFreelistEntry* entry,
254 bool crash_on_corruption,
255 size_t slot_size) const override {
256 if (crash_on_corruption) {
257 return GetNextForThreadCacheTrue(entry, slot_size);
258 } else {
259 return GetNextForThreadCacheFalse(entry, slot_size);
260 }
261 }
262
GetNextPartitionFreelistDispatcherImpl263 PA_ALWAYS_INLINE PartitionFreelistEntry* GetNext(
264 PartitionFreelistEntry* entry,
265 size_t slot_size) const override {
266 return reinterpret_cast<PartitionFreelistEntry*>(
267 GetEntryImpl(entry)->GetNext(slot_size));
268 }
269
CheckFreeListPartitionFreelistDispatcherImpl270 PA_NOINLINE void CheckFreeList(PartitionFreelistEntry* entry,
271 size_t slot_size) const override {
272 return GetEntryImpl(entry)->CheckFreeList(slot_size);
273 }
274
CheckFreeListForThreadCachePartitionFreelistDispatcherImpl275 PA_NOINLINE void CheckFreeListForThreadCache(
276 PartitionFreelistEntry* entry,
277 size_t slot_size) const override {
278 return GetEntryImpl(entry)->CheckFreeListForThreadCache(slot_size);
279 }
280
281 // `next` can be passed in as `nullptr`
SetNextPartitionFreelistDispatcherImpl282 PA_ALWAYS_INLINE void SetNext(PartitionFreelistEntry* entry,
283 PartitionFreelistEntry* next) const override {
284 return GetEntryImpl(entry)->SetNext(GetEntryImpl(next));
285 }
286
287 PA_ALWAYS_INLINE uintptr_t
ClearForAllocationPartitionFreelistDispatcherImpl288 ClearForAllocation(PartitionFreelistEntry* entry) const override {
289 return GetEntryImpl(entry)->ClearForAllocation();
290 }
291
IsEncodedNextPtrZeroPartitionFreelistDispatcherImpl292 PA_ALWAYS_INLINE constexpr bool IsEncodedNextPtrZero(
293 PartitionFreelistEntry* entry) const override {
294 return GetEntryImpl(entry)->IsEncodedNextPtrZero();
295 }
296 };
297
298 PA_ALWAYS_INLINE const PartitionFreelistDispatcher*
Create(PartitionFreelistEncoding encoding)299 PartitionFreelistDispatcher::Create(PartitionFreelistEncoding encoding) {
300 switch (encoding) {
301 case PartitionFreelistEncoding::kEncodedFreeList: {
302 static base::NoDestructor<PartitionFreelistDispatcherImpl<
303 PartitionFreelistEncoding::kEncodedFreeList>>
304 encoded_impl;
305 return encoded_impl.get();
306 }
307 case PartitionFreelistEncoding::kPoolOffsetFreeList: {
308 static base::NoDestructor<PartitionFreelistDispatcherImpl<
309 PartitionFreelistEncoding::kPoolOffsetFreeList>>
310 pool_offset_impl;
311 return pool_offset_impl.get();
312 }
313 }
314 }
315 #endif // USE_FREELIST_POOL_OFFSETS
316 } // namespace partition_alloc::internal
317
318 #endif // PARTITION_ALLOC_PARTITION_FREELIST_ENTRY_H_
319