1 // Copyright 2020 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "partition_alloc/pointers/raw_ptr.h"
6
7 #include <climits>
8 #include <cstddef>
9 #include <cstdint>
10 #include <memory>
11 #include <optional>
12 #include <string>
13 #include <thread>
14 #include <type_traits>
15 #include <utility>
16
17 #include "base/allocator/partition_alloc_features.h"
18 #include "base/allocator/partition_alloc_support.h"
19 #include "base/cpu.h"
20 #include "base/logging.h"
21 #include "base/memory/raw_ptr_asan_service.h"
22 #include "base/metrics/histogram_base.h"
23 #include "base/task/thread_pool.h"
24 #include "base/test/bind.h"
25 #include "base/test/gtest_util.h"
26 #include "base/test/memory/dangling_ptr_instrumentation.h"
27 #include "base/test/scoped_feature_list.h"
28 #include "base/types/to_address.h"
29 #include "build/build_config.h"
30 #include "partition_alloc/dangling_raw_ptr_checks.h"
31 #include "partition_alloc/partition_alloc-inl.h"
32 #include "partition_alloc/partition_alloc.h"
33 #include "partition_alloc/partition_alloc_base/numerics/checked_math.h"
34 #include "partition_alloc/partition_alloc_buildflags.h"
35 #include "partition_alloc/partition_alloc_config.h"
36 #include "partition_alloc/partition_alloc_constants.h"
37 #include "partition_alloc/partition_alloc_hooks.h"
38 #include "partition_alloc/partition_root.h"
39 #include "partition_alloc/pointers/instance_tracer.h"
40 #include "partition_alloc/pointers/raw_ptr_counting_impl_for_test.h"
41 #include "partition_alloc/pointers/raw_ptr_test_support.h"
42 #include "partition_alloc/pointers/raw_ref.h"
43 #include "partition_alloc/tagging.h"
44 #include "testing/gmock/include/gmock/gmock.h"
45 #include "testing/gtest/include/gtest/gtest.h"
46 #include "third_party/abseil-cpp/absl/types/variant.h"
47
48 #if BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
49 #include <sanitizer/asan_interface.h>
50 #include "base/debug/asan_service.h"
51 #endif
52
53 using testing::AllOf;
54 using testing::Eq;
55 using testing::HasSubstr;
56 using testing::IsEmpty;
57 using testing::Ne;
58 using testing::SizeIs;
59 using testing::Test;
60
61 // The instance tracer has unavoidable per-instance overhead, but when disabled,
62 // there should be no size difference between raw_ptr<T> and T*.
63 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_INSTANCE_TRACER)
64 static_assert(sizeof(raw_ptr<void>) == sizeof(void*),
65 "raw_ptr shouldn't add memory overhead");
66 static_assert(sizeof(raw_ptr<int>) == sizeof(int*),
67 "raw_ptr shouldn't add memory overhead");
68 static_assert(sizeof(raw_ptr<std::string>) == sizeof(std::string*),
69 "raw_ptr shouldn't add memory overhead");
70 #endif
71
72 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
73 !BUILDFLAG(USE_ASAN_UNOWNED_PTR) && !BUILDFLAG(USE_HOOKABLE_RAW_PTR) && \
74 !BUILDFLAG(RAW_PTR_ZERO_ON_MOVE) && !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
75 // |is_trivially_copyable| assertion means that arrays/vectors of raw_ptr can
76 // be copied by memcpy.
77 static_assert(std::is_trivially_copyable_v<raw_ptr<void>>,
78 "raw_ptr should be trivially copyable");
79 static_assert(std::is_trivially_copyable_v<raw_ptr<int>>,
80 "raw_ptr should be trivially copyable");
81 static_assert(std::is_trivially_copyable_v<raw_ptr<std::string>>,
82 "raw_ptr should be trivially copyable");
83 #endif // !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
84 // !BUILDFLAG(USE_ASAN_UNOWNED_PTR) &&
85 // !BUILDFLAG(USE_HOOKABLE_RAW_PTR) &&
86 // !BUILDFLAG(RAW_PTR_ZERO_ON_MOVE) &&
87 // !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
88
89 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
90 !BUILDFLAG(USE_ASAN_UNOWNED_PTR) && !BUILDFLAG(USE_HOOKABLE_RAW_PTR) && \
91 !BUILDFLAG(RAW_PTR_ZERO_ON_CONSTRUCT) && \
92 !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
93 // |is_trivially_default_constructible| assertion helps retain implicit default
94 // constructors when raw_ptr is used as a union field. Example of an error
95 // if this assertion didn't hold:
96 //
97 // ../../base/trace_event/trace_arguments.h:249:16: error: call to
98 // implicitly-deleted default constructor of 'base::trace_event::TraceValue'
99 // TraceValue ret;
100 // ^
101 // ../../base/trace_event/trace_arguments.h:211:26: note: default
102 // constructor of 'TraceValue' is implicitly deleted because variant field
103 // 'as_pointer' has a non-trivial default constructor
104 // raw_ptr<const void> as_pointer;
105 static_assert(std::is_trivially_default_constructible_v<raw_ptr<void>>,
106 "raw_ptr should be trivially default constructible");
107 static_assert(std::is_trivially_default_constructible_v<raw_ptr<int>>,
108 "raw_ptr should be trivially default constructible");
109 static_assert(std::is_trivially_default_constructible_v<raw_ptr<std::string>>,
110 "raw_ptr should be trivially default constructible");
111 #endif // !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
112 // !BUILDFLAG(USE_ASAN_UNOWNED_PTR) &&
113 // !BUILDFLAG(USE_HOOKABLE_RAW_PTR) &&
114 // !BUILDFLAG(RAW_PTR_ZERO_ON_CONSTRUCT) &&
115 // !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
116
117 // Verify that raw_ptr is a literal type, and its entire interface is constexpr.
118 //
119 // Constexpr destructors were introduced in C++20. PartitionAlloc's minimum
120 // supported C++ version is C++17, so raw_ptr is not a literal type in C++17.
121 // Thus we only test for constexpr in C++20.
122 #if defined(__cpp_constexpr) && __cpp_constexpr >= 201907L
__anon154ccca30102() 123 static_assert([]() constexpr {
124 struct IntBase {};
125 struct Int : public IntBase {
126 int i = 0;
127 };
128
129 Int* i = new Int();
130 {
131 raw_ptr<Int> r(i); // raw_ptr(T*)
132 raw_ptr<Int> r2(r); // raw_ptr(const raw_ptr&)
133 raw_ptr<Int> r3(std::move(r)); // raw_ptr(raw_ptr&&)
134 r = r2; // operator=(const raw_ptr&)
135 r = std::move(r3); // operator=(raw_ptr&&)
136 raw_ptr<Int, base::RawPtrTraits::kMayDangle> r4(
137 r); // raw_ptr(const raw_ptr<DifferentTraits>&)
138 r4 = r2; // operator=(const raw_ptr<DifferentTraits>&)
139 // (There is no move-version of DifferentTraits.)
140 [[maybe_unused]] raw_ptr<IntBase> r5(
141 r2); // raw_ptr(const raw_ptr<Convertible>&)
142 [[maybe_unused]] raw_ptr<IntBase> r6(
143 std::move(r2)); // raw_ptr(raw_ptr<Convertible>&&)
144 r2 = r; // Reset after move...
145 r5 = r2; // operator=(const raw_ptr<Convertible>&)
146 r5 = std::move(r2); // operator=(raw_ptr<Convertible>&&)
147 [[maybe_unused]] raw_ptr<Int> r7(nullptr); // raw_ptr(nullptr)
148 r4 = nullptr; // operator=(nullptr)
149 r4 = i; // operator=(T*)
150 r5 = r4; // operator=(const Upcast&)
151 r5 = std::move(r4); // operator=(Upcast&&)
152 r.get()->i += 1; // get()
153 [[maybe_unused]] bool b = r; // operator bool
154 (*r).i += 1; // operator*()
155 r->i += 1; // operator->()
156 [[maybe_unused]] Int* i2 = r; // operator T*()
157 [[maybe_unused]] IntBase* i3 = r; // operator Convertible*()
158
159 auto func_taking_ptr_to_ptr = [](Int**) {};
160 auto func_taking_ref_to_ptr = [](Int*&) {};
161 func_taking_ptr_to_ptr(&r.AsEphemeralRawAddr());
162 func_taking_ref_to_ptr(r.AsEphemeralRawAddr());
163
164 Int* array = new Int[4]();
165 {
166 raw_ptr<Int, base::RawPtrTraits::kAllowPtrArithmetic> ra(array);
167 ++ra; // operator++()
168 --ra; // operator--()
169 ra++; // operator++(int)
170 ra--; // operator--(int)
171 ra += 1u; // operator+=()
172 ra -= 1u; // operator-=()
173 ra = ra + 1; // operator+(raw_ptr,int)
174 ra = 1 + ra; // operator+(int,raw_ptr)
175 ra = ra - 2; // operator-(raw_ptr,int)
176 [[maybe_unused]] ptrdiff_t d = ra - ra; // operator-(raw_ptr,raw_ptr)
177 d = ra - array; // operator-(raw_ptr,T*)
178 d = array - ra; // operator-(T*,raw_ptr)
179
180 ra[0] = ra[1]; // operator[]()
181
182 b = ra < ra; // operator<(raw_ptr,raw_ptr)
183 b = ra < array; // operator<(raw_ptr,T*)
184 b = array < ra; // operator<(T*,raw_ptr)
185 b = ra <= ra; // operator<=(raw_ptr,raw_ptr)
186 b = ra <= array; // operator<=(raw_ptr,T*)
187 b = array <= ra; // operator<=(T*,raw_ptr)
188 b = ra > ra; // operator>(raw_ptr,raw_ptr)
189 b = ra > array; // operator>(raw_ptr,T*)
190 b = array > ra; // operator>(T*,raw_ptr)
191 b = ra >= ra; // operator>=(raw_ptr,raw_ptr)
192 b = ra >= array; // operator>=(raw_ptr,T*)
193 b = array >= ra; // operator>=(T*,raw_ptr)
194 b = ra == ra; // operator==(raw_ptr,raw_ptr)
195 b = ra == array; // operator==(raw_ptr,T*)
196 b = array == ra; // operator==(T*,raw_ptr)
197 b = ra != ra; // operator!=(raw_ptr,raw_ptr)
198 b = ra != array; // operator!=(raw_ptr,T*)
199 b = array != ra; // operator!=(T*,raw_ptr)
200 }
201 delete[] array;
202 }
203 delete i;
204 return true;
205 }());
206 #endif
207
208 struct StructWithoutTypeBasedTraits {};
209 struct BaseWithTypeBasedTraits {};
210 struct DerivedWithTypeBasedTraits : BaseWithTypeBasedTraits {};
211
212 namespace base::raw_ptr_traits {
213 // `BaseWithTypeBasedTraits` and any derived classes have
214 // `RawPtrTraits::kDummyForTest`.
215 template <typename T>
216 constexpr auto kTypeTraits<
217 T,
218 std::enable_if_t<std::is_base_of_v<BaseWithTypeBasedTraits, T>>> =
219 RawPtrTraits::kDummyForTest;
220 } // namespace base::raw_ptr_traits
221
222 // `raw_ptr<T>` should have traits based on specialization of `kTypeTraits<T>`.
223 static_assert(!ContainsFlags(raw_ptr<StructWithoutTypeBasedTraits>::Traits,
224 base::RawPtrTraits::kDummyForTest));
225 static_assert(ContainsFlags(raw_ptr<BaseWithTypeBasedTraits>::Traits,
226 base::RawPtrTraits::kDummyForTest));
227 static_assert(ContainsFlags(raw_ptr<DerivedWithTypeBasedTraits>::Traits,
228 base::RawPtrTraits::kDummyForTest));
229
230 // Don't use base::internal for testing raw_ptr API, to test if code outside
231 // this namespace calls the correct functions from this namespace.
232 namespace {
233
234 // Shorter name for expected test impl.
235 using RawPtrCountingImpl = base::test::RawPtrCountingImplForTest;
236
237 template <typename T>
238 using CountingRawPtr = raw_ptr<T,
239 base::RawPtrTraits::kUseCountingImplForTest |
240 base::RawPtrTraits::kAllowPtrArithmetic>;
241
242 // Ensure that the `kUseCountingImplForTest` flag selects the test impl.
243 static_assert(std::is_same_v<CountingRawPtr<int>::Impl, RawPtrCountingImpl>);
244
245 template <typename T>
246 using CountingRawPtrMayDangle =
247 raw_ptr<T,
248 base::RawPtrTraits::kMayDangle |
249 base::RawPtrTraits::kUseCountingImplForTest |
250 base::RawPtrTraits::kAllowPtrArithmetic>;
251
252 // Ensure that the `kUseCountingImplForTest` flag selects the test impl.
253 static_assert(
254 std::is_same_v<CountingRawPtrMayDangle<int>::Impl, RawPtrCountingImpl>);
255
256 template <typename T>
257 using CountingRawPtrUninitialized =
258 raw_ptr<T,
259 base::RawPtrTraits::kUseCountingImplForTest |
260 base::RawPtrTraits::kAllowUninitialized>;
261
262 // Ensure that the `kUseCountingImplForTest` flag selects the test impl.
263 static_assert(
264 std::is_same_v<CountingRawPtrUninitialized<int>::Impl, RawPtrCountingImpl>);
265
266 struct MyStruct {
267 int x;
268 };
269
270 struct Base1 {
Base1__anon154ccca30411::Base1271 explicit Base1(int b1) : b1(b1) {}
272 int b1;
273 };
274
275 struct Base2 {
Base2__anon154ccca30411::Base2276 explicit Base2(int b2) : b2(b2) {}
277 int b2;
278 };
279
280 struct Derived : Base1, Base2 {
Derived__anon154ccca30411::Derived281 Derived(int b1, int b2, int d) : Base1(b1), Base2(b2), d(d) {}
282 int d;
283 };
284
285 class RawPtrTest : public Test {
286 protected:
SetUp()287 void SetUp() override {
288 RawPtrCountingImpl::ClearCounters();
289 }
290 };
291
292 // Use this instead of std::ignore, to prevent the instruction from getting
293 // optimized out by the compiler.
294 volatile int g_volatile_int_to_ignore;
295
TEST_F(RawPtrTest,NullStarDereference)296 TEST_F(RawPtrTest, NullStarDereference) {
297 raw_ptr<int> ptr = nullptr;
298 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *ptr, "");
299 }
300
TEST_F(RawPtrTest,NullArrowDereference)301 TEST_F(RawPtrTest, NullArrowDereference) {
302 raw_ptr<MyStruct> ptr = nullptr;
303 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = ptr->x, "");
304 }
305
TEST_F(RawPtrTest,NullExtractNoDereference)306 TEST_F(RawPtrTest, NullExtractNoDereference) {
307 CountingRawPtr<int> ptr = nullptr;
308 // No dereference hence shouldn't crash.
309 int* raw = ptr;
310 std::ignore = raw;
311 EXPECT_THAT((CountingRawPtrExpectations{.get_for_dereference_cnt = 0,
312 .get_for_extraction_cnt = 1,
313 .get_for_comparison_cnt = 0}),
314 CountersMatch());
315 }
316
TEST_F(RawPtrTest,InvalidExtractNoDereference)317 TEST_F(RawPtrTest, InvalidExtractNoDereference) {
318 // Some code uses invalid pointer values as indicators, so those values must
319 // be accepted by raw_ptr and passed through unchanged during extraction.
320 int* inv_ptr = reinterpret_cast<int*>(~static_cast<uintptr_t>(0));
321 CountingRawPtr<int> ptr = inv_ptr;
322 int* raw = ptr;
323 EXPECT_EQ(raw, inv_ptr);
324 EXPECT_THAT((CountingRawPtrExpectations{.get_for_dereference_cnt = 0,
325 .get_for_extraction_cnt = 1,
326 .get_for_comparison_cnt = 0}),
327 CountersMatch());
328 }
329
TEST_F(RawPtrTest,NullCmpExplicit)330 TEST_F(RawPtrTest, NullCmpExplicit) {
331 CountingRawPtr<int> ptr = nullptr;
332 EXPECT_TRUE(ptr == nullptr);
333 EXPECT_TRUE(nullptr == ptr);
334 EXPECT_FALSE(ptr != nullptr);
335 EXPECT_FALSE(nullptr != ptr);
336 // No need to unwrap pointer, just compare against 0.
337 EXPECT_THAT((CountingRawPtrExpectations{
338 .get_for_dereference_cnt = 0,
339 .get_for_extraction_cnt = 0,
340 .get_for_comparison_cnt = 0,
341 }),
342 CountersMatch());
343 }
344
TEST_F(RawPtrTest,NullCmpBool)345 TEST_F(RawPtrTest, NullCmpBool) {
346 CountingRawPtr<int> ptr = nullptr;
347 EXPECT_FALSE(ptr);
348 EXPECT_TRUE(!ptr);
349 // No need to unwrap pointer, just compare against 0.
350 EXPECT_THAT((CountingRawPtrExpectations{
351 .get_for_dereference_cnt = 0,
352 .get_for_extraction_cnt = 0,
353 .get_for_comparison_cnt = 0,
354 }),
355 CountersMatch());
356 }
357
FuncThatAcceptsBool(bool b)358 void FuncThatAcceptsBool(bool b) {}
359
IsValidNoCast(CountingRawPtr<int> ptr)360 bool IsValidNoCast(CountingRawPtr<int> ptr) {
361 return !!ptr; // !! to avoid implicit cast
362 }
IsValidNoCast2(CountingRawPtr<int> ptr)363 bool IsValidNoCast2(CountingRawPtr<int> ptr) {
364 return ptr && true;
365 }
366
TEST_F(RawPtrTest,BoolOpNotCast)367 TEST_F(RawPtrTest, BoolOpNotCast) {
368 CountingRawPtr<int> ptr = nullptr;
369 volatile bool is_valid = !!ptr; // !! to avoid implicit cast
370 is_valid = ptr || is_valid; // volatile, so won't be optimized
371 if (ptr) {
372 is_valid = true;
373 }
374 [[maybe_unused]] bool is_not_valid = !ptr;
375 if (!ptr) {
376 is_not_valid = true;
377 }
378 std::ignore = IsValidNoCast(ptr);
379 std::ignore = IsValidNoCast2(ptr);
380 FuncThatAcceptsBool(!ptr);
381 // No need to unwrap pointer, just compare against 0.
382 EXPECT_THAT((CountingRawPtrExpectations{
383 .get_for_dereference_cnt = 0,
384 .get_for_extraction_cnt = 0,
385 .get_for_comparison_cnt = 0,
386 }),
387 CountersMatch());
388 }
389
IsValidWithCast(CountingRawPtr<int> ptr)390 bool IsValidWithCast(CountingRawPtr<int> ptr) {
391 return ptr;
392 }
393
394 // This test is mostly for documentation purposes. It demonstrates cases where
395 // |operator T*| is called first and then the pointer is converted to bool,
396 // as opposed to calling |operator bool| directly. The former may be more
397 // costly, so the caller has to be careful not to trigger this path.
TEST_F(RawPtrTest,CastNotBoolOp)398 TEST_F(RawPtrTest, CastNotBoolOp) {
399 CountingRawPtr<int> ptr = nullptr;
400 [[maybe_unused]] bool is_valid = ptr;
401 is_valid = IsValidWithCast(ptr);
402 FuncThatAcceptsBool(ptr);
403 EXPECT_THAT((CountingRawPtrExpectations{
404 .get_for_dereference_cnt = 0,
405 .get_for_extraction_cnt = 3,
406 .get_for_comparison_cnt = 0,
407 }),
408 CountersMatch());
409 }
410
TEST_F(RawPtrTest,StarDereference)411 TEST_F(RawPtrTest, StarDereference) {
412 int foo = 42;
413 CountingRawPtr<int> ptr = &foo;
414 EXPECT_EQ(*ptr, 42);
415 EXPECT_THAT((CountingRawPtrExpectations{
416 .get_for_dereference_cnt = 1,
417 .get_for_extraction_cnt = 0,
418 .get_for_comparison_cnt = 0,
419 }),
420 CountersMatch());
421 }
422
TEST_F(RawPtrTest,ArrowDereference)423 TEST_F(RawPtrTest, ArrowDereference) {
424 MyStruct foo = {42};
425 CountingRawPtr<MyStruct> ptr = &foo;
426 EXPECT_EQ(ptr->x, 42);
427 EXPECT_THAT((CountingRawPtrExpectations{
428 .get_for_dereference_cnt = 1,
429 .get_for_extraction_cnt = 0,
430 .get_for_comparison_cnt = 0,
431 }),
432 CountersMatch());
433 }
434
TEST_F(RawPtrTest,Delete)435 TEST_F(RawPtrTest, Delete) {
436 CountingRawPtr<int> ptr = new int(42);
437 delete ptr.ExtractAsDangling();
438 // The pointer is first internally converted to MayDangle kind, then extracted
439 // using implicit cast before passing to |delete|.
440 EXPECT_THAT((CountingRawPtrExpectations{
441 .get_for_dereference_cnt = 0,
442 .get_for_extraction_cnt = 1,
443 .get_for_comparison_cnt = 0,
444 .wrap_raw_ptr_for_dup_cnt = 1,
445 .get_for_duplication_cnt = 1,
446 }),
447 CountersMatch());
448 }
449
TEST_F(RawPtrTest,ClearAndDelete)450 TEST_F(RawPtrTest, ClearAndDelete) {
451 CountingRawPtr<int> ptr(new int);
452 ptr.ClearAndDelete();
453
454 EXPECT_THAT((CountingRawPtrExpectations{
455 .wrap_raw_ptr_cnt = 1,
456 .release_wrapped_ptr_cnt = 1,
457 .get_for_dereference_cnt = 0,
458 .get_for_extraction_cnt = 1,
459 .wrapped_ptr_swap_cnt = 0,
460 }),
461 CountersMatch());
462 EXPECT_EQ(ptr.get(), nullptr);
463 }
464
TEST_F(RawPtrTest,ClearAndDeleteArray)465 TEST_F(RawPtrTest, ClearAndDeleteArray) {
466 CountingRawPtr<int> ptr(new int[8]);
467 ptr.ClearAndDeleteArray();
468
469 EXPECT_THAT((CountingRawPtrExpectations{
470 .wrap_raw_ptr_cnt = 1,
471 .release_wrapped_ptr_cnt = 1,
472 .get_for_dereference_cnt = 0,
473 .get_for_extraction_cnt = 1,
474 .wrapped_ptr_swap_cnt = 0,
475 }),
476 CountersMatch());
477 EXPECT_EQ(ptr.get(), nullptr);
478 }
479
TEST_F(RawPtrTest,ExtractAsDangling)480 TEST_F(RawPtrTest, ExtractAsDangling) {
481 CountingRawPtr<int> ptr(new int);
482
483 EXPECT_THAT((CountingRawPtrExpectations{
484 .wrap_raw_ptr_cnt = 1,
485 .release_wrapped_ptr_cnt = 0,
486 .get_for_dereference_cnt = 0,
487 .wrapped_ptr_swap_cnt = 0,
488 .wrap_raw_ptr_for_dup_cnt = 0,
489 .get_for_duplication_cnt = 0,
490 }),
491 CountersMatch());
492
493 EXPECT_TRUE(ptr.get());
494
495 CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
496
497 EXPECT_THAT((CountingRawPtrExpectations{
498 .wrap_raw_ptr_cnt = 1,
499 .release_wrapped_ptr_cnt = 1,
500 .get_for_dereference_cnt = 0,
501 .wrapped_ptr_swap_cnt = 0,
502 .wrap_raw_ptr_for_dup_cnt = 1,
503 .get_for_duplication_cnt = 1,
504 }),
505 CountersMatch());
506
507 EXPECT_FALSE(ptr.get());
508 EXPECT_TRUE(dangling.get());
509
510 dangling.ClearAndDelete();
511 }
512
TEST_F(RawPtrTest,ExtractAsDanglingFromDangling)513 TEST_F(RawPtrTest, ExtractAsDanglingFromDangling) {
514 CountingRawPtrMayDangle<int> ptr(new int);
515
516 EXPECT_THAT((CountingRawPtrExpectations{
517 .wrap_raw_ptr_cnt = 1,
518 .release_wrapped_ptr_cnt = 0,
519 .get_for_dereference_cnt = 0,
520 .wrapped_ptr_swap_cnt = 0,
521 .wrap_raw_ptr_for_dup_cnt = 0,
522 .get_for_duplication_cnt = 0,
523 }),
524 CountersMatch());
525
526 CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
527
528 // wrap_raw_ptr_cnt remains `1` because, as `ptr` is already a dangling
529 // pointer, we are only moving `ptr` to `dangling` here to avoid extra cost.
530 EXPECT_THAT((CountingRawPtrExpectations{
531 .wrap_raw_ptr_cnt = 1,
532 .release_wrapped_ptr_cnt = 1,
533 .get_for_dereference_cnt = 0,
534 .wrapped_ptr_swap_cnt = 0,
535 .wrap_raw_ptr_for_dup_cnt = 0,
536 .get_for_duplication_cnt = 0,
537 }),
538 CountersMatch());
539
540 dangling.ClearAndDelete();
541 }
542
TEST_F(RawPtrTest,ConstVolatileVoidPtr)543 TEST_F(RawPtrTest, ConstVolatileVoidPtr) {
544 int32_t foo[] = {1234567890};
545 CountingRawPtr<const volatile void> ptr = foo;
546 EXPECT_EQ(*static_cast<const volatile int32_t*>(ptr), 1234567890);
547 // Because we're using a cast, the extraction API kicks in, which doesn't
548 // know if the extracted pointer will be dereferenced or not.
549 EXPECT_THAT((CountingRawPtrExpectations{
550 .get_for_dereference_cnt = 0,
551 .get_for_extraction_cnt = 1,
552 .get_for_comparison_cnt = 0,
553 }),
554 CountersMatch());
555 }
556
TEST_F(RawPtrTest,VoidPtr)557 TEST_F(RawPtrTest, VoidPtr) {
558 int32_t foo[] = {1234567890};
559 CountingRawPtr<void> ptr = foo;
560 EXPECT_EQ(*static_cast<int32_t*>(ptr), 1234567890);
561 // Because we're using a cast, the extraction API kicks in, which doesn't
562 // know if the extracted pointer will be dereferenced or not.
563 EXPECT_THAT((CountingRawPtrExpectations{
564 .get_for_dereference_cnt = 0,
565 .get_for_extraction_cnt = 1,
566 .get_for_comparison_cnt = 0,
567 }),
568 CountersMatch());
569 }
570
TEST_F(RawPtrTest,OperatorEQ)571 TEST_F(RawPtrTest, OperatorEQ) {
572 int foo;
573 CountingRawPtr<int> ptr1 = nullptr;
574 EXPECT_TRUE(ptr1 == ptr1);
575
576 CountingRawPtr<int> ptr2 = nullptr;
577 EXPECT_TRUE(ptr1 == ptr2);
578
579 CountingRawPtr<int> ptr3 = &foo;
580 EXPECT_TRUE(&foo == ptr3);
581 EXPECT_TRUE(ptr3 == &foo);
582 EXPECT_FALSE(ptr1 == ptr3);
583
584 ptr1 = &foo;
585 EXPECT_TRUE(ptr1 == ptr3);
586 EXPECT_TRUE(ptr3 == ptr1);
587
588 EXPECT_THAT((CountingRawPtrExpectations{
589 .get_for_dereference_cnt = 0,
590 .get_for_extraction_cnt = 0,
591 .get_for_comparison_cnt = 12,
592 }),
593 CountersMatch());
594 }
595
TEST_F(RawPtrTest,OperatorNE)596 TEST_F(RawPtrTest, OperatorNE) {
597 int foo;
598 CountingRawPtr<int> ptr1 = nullptr;
599 EXPECT_FALSE(ptr1 != ptr1);
600
601 CountingRawPtr<int> ptr2 = nullptr;
602 EXPECT_FALSE(ptr1 != ptr2);
603
604 CountingRawPtr<int> ptr3 = &foo;
605 EXPECT_FALSE(&foo != ptr3);
606 EXPECT_FALSE(ptr3 != &foo);
607 EXPECT_TRUE(ptr1 != ptr3);
608
609 ptr1 = &foo;
610 EXPECT_FALSE(ptr1 != ptr3);
611 EXPECT_FALSE(ptr3 != ptr1);
612
613 EXPECT_THAT((CountingRawPtrExpectations{
614 .get_for_dereference_cnt = 0,
615 .get_for_extraction_cnt = 0,
616 .get_for_comparison_cnt = 12,
617 }),
618 CountersMatch());
619 }
620
TEST_F(RawPtrTest,OperatorEQCast)621 TEST_F(RawPtrTest, OperatorEQCast) {
622 int foo = 42;
623 const int* raw_int_ptr = &foo;
624 volatile void* raw_void_ptr = &foo;
625 CountingRawPtr<volatile int> checked_int_ptr = &foo;
626 CountingRawPtr<const void> checked_void_ptr = &foo;
627 EXPECT_TRUE(checked_int_ptr == checked_int_ptr);
628 EXPECT_TRUE(checked_int_ptr == raw_int_ptr);
629 EXPECT_TRUE(raw_int_ptr == checked_int_ptr);
630 EXPECT_TRUE(checked_void_ptr == checked_void_ptr);
631 EXPECT_TRUE(checked_void_ptr == raw_void_ptr);
632 EXPECT_TRUE(raw_void_ptr == checked_void_ptr);
633 EXPECT_TRUE(checked_int_ptr == checked_void_ptr);
634 EXPECT_TRUE(checked_int_ptr == raw_void_ptr);
635 EXPECT_TRUE(raw_int_ptr == checked_void_ptr);
636 EXPECT_TRUE(checked_void_ptr == checked_int_ptr);
637 EXPECT_TRUE(checked_void_ptr == raw_int_ptr);
638 EXPECT_TRUE(raw_void_ptr == checked_int_ptr);
639 // Make sure that all cases are handled by operator== (faster) and none by the
640 // cast operator (slower).
641 EXPECT_THAT((CountingRawPtrExpectations{
642 .get_for_dereference_cnt = 0,
643 .get_for_extraction_cnt = 0,
644 .get_for_comparison_cnt = 16,
645 }),
646 CountersMatch());
647 }
648
TEST_F(RawPtrTest,OperatorEQCastHierarchy)649 TEST_F(RawPtrTest, OperatorEQCastHierarchy) {
650 Derived derived_val(42, 84, 1024);
651 Derived* raw_derived_ptr = &derived_val;
652 const Base1* raw_base1_ptr = &derived_val;
653 volatile Base2* raw_base2_ptr = &derived_val;
654 // Double check the basic understanding of pointers: Even though the numeric
655 // value (i.e. the address) isn't equal, the pointers are still equal. That's
656 // because from derived to base adjusts the address.
657 // raw_ptr must behave the same, which is checked below.
658 ASSERT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
659 reinterpret_cast<uintptr_t>(raw_derived_ptr));
660 ASSERT_TRUE(raw_base2_ptr == raw_derived_ptr);
661
662 CountingRawPtr<const volatile Derived> checked_derived_ptr = &derived_val;
663 CountingRawPtr<volatile Base1> checked_base1_ptr = &derived_val;
664 CountingRawPtr<const Base2> checked_base2_ptr = &derived_val;
665 EXPECT_TRUE(checked_derived_ptr == checked_derived_ptr);
666 EXPECT_TRUE(checked_derived_ptr == raw_derived_ptr);
667 EXPECT_TRUE(raw_derived_ptr == checked_derived_ptr);
668 EXPECT_TRUE(checked_derived_ptr == checked_base1_ptr);
669 EXPECT_TRUE(checked_derived_ptr == raw_base1_ptr);
670 EXPECT_TRUE(raw_derived_ptr == checked_base1_ptr);
671 EXPECT_TRUE(checked_base1_ptr == checked_derived_ptr);
672 EXPECT_TRUE(checked_base1_ptr == raw_derived_ptr);
673 EXPECT_TRUE(raw_base1_ptr == checked_derived_ptr);
674 // |base2_ptr| points to the second base class of |derived|, so will be
675 // located at an offset. While the stored raw uinptr_t values shouldn't match,
676 // ensure that the internal pointer manipulation correctly offsets when
677 // casting up and down the class hierarchy.
678 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
679 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
680 EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
681 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
682 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
683 reinterpret_cast<uintptr_t>(raw_derived_ptr));
684 EXPECT_TRUE(checked_derived_ptr == checked_base2_ptr);
685 EXPECT_TRUE(checked_derived_ptr == raw_base2_ptr);
686 EXPECT_TRUE(raw_derived_ptr == checked_base2_ptr);
687 EXPECT_TRUE(checked_base2_ptr == checked_derived_ptr);
688 EXPECT_TRUE(checked_base2_ptr == raw_derived_ptr);
689 EXPECT_TRUE(raw_base2_ptr == checked_derived_ptr);
690 // Make sure that all cases are handled by operator== (faster) and none by the
691 // cast operator (slower).
692 // The 4 extractions come from .get() checks, that compare raw addresses.
693 EXPECT_THAT((CountingRawPtrExpectations{
694 .get_for_dereference_cnt = 0,
695 .get_for_extraction_cnt = 4,
696 .get_for_comparison_cnt = 20,
697 }),
698 CountersMatch());
699 }
700
TEST_F(RawPtrTest,OperatorNECast)701 TEST_F(RawPtrTest, OperatorNECast) {
702 int foo = 42;
703 volatile int* raw_int_ptr = &foo;
704 const void* raw_void_ptr = &foo;
705 CountingRawPtr<const int> checked_int_ptr = &foo;
706 CountingRawPtr<volatile void> checked_void_ptr = &foo;
707 EXPECT_FALSE(checked_int_ptr != checked_int_ptr);
708 EXPECT_FALSE(checked_int_ptr != raw_int_ptr);
709 EXPECT_FALSE(raw_int_ptr != checked_int_ptr);
710 EXPECT_FALSE(checked_void_ptr != checked_void_ptr);
711 EXPECT_FALSE(checked_void_ptr != raw_void_ptr);
712 EXPECT_FALSE(raw_void_ptr != checked_void_ptr);
713 EXPECT_FALSE(checked_int_ptr != checked_void_ptr);
714 EXPECT_FALSE(checked_int_ptr != raw_void_ptr);
715 EXPECT_FALSE(raw_int_ptr != checked_void_ptr);
716 EXPECT_FALSE(checked_void_ptr != checked_int_ptr);
717 EXPECT_FALSE(checked_void_ptr != raw_int_ptr);
718 EXPECT_FALSE(raw_void_ptr != checked_int_ptr);
719 // Make sure that all cases are handled by operator== (faster) and none by the
720 // cast operator (slower).
721 EXPECT_THAT((CountingRawPtrExpectations{
722 .get_for_dereference_cnt = 0,
723 .get_for_extraction_cnt = 0,
724 .get_for_comparison_cnt = 16,
725 }),
726 CountersMatch());
727 }
728
TEST_F(RawPtrTest,OperatorNECastHierarchy)729 TEST_F(RawPtrTest, OperatorNECastHierarchy) {
730 Derived derived_val(42, 84, 1024);
731 const Derived* raw_derived_ptr = &derived_val;
732 volatile Base1* raw_base1_ptr = &derived_val;
733 const Base2* raw_base2_ptr = &derived_val;
734 CountingRawPtr<volatile Derived> checked_derived_ptr = &derived_val;
735 CountingRawPtr<const Base1> checked_base1_ptr = &derived_val;
736 CountingRawPtr<const volatile Base2> checked_base2_ptr = &derived_val;
737 EXPECT_FALSE(checked_derived_ptr != checked_derived_ptr);
738 EXPECT_FALSE(checked_derived_ptr != raw_derived_ptr);
739 EXPECT_FALSE(raw_derived_ptr != checked_derived_ptr);
740 EXPECT_FALSE(checked_derived_ptr != checked_base1_ptr);
741 EXPECT_FALSE(checked_derived_ptr != raw_base1_ptr);
742 EXPECT_FALSE(raw_derived_ptr != checked_base1_ptr);
743 EXPECT_FALSE(checked_base1_ptr != checked_derived_ptr);
744 EXPECT_FALSE(checked_base1_ptr != raw_derived_ptr);
745 EXPECT_FALSE(raw_base1_ptr != checked_derived_ptr);
746 // |base2_ptr| points to the second base class of |derived|, so will be
747 // located at an offset. While the stored raw uinptr_t values shouldn't match,
748 // ensure that the internal pointer manipulation correctly offsets when
749 // casting up and down the class hierarchy.
750 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
751 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
752 EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
753 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
754 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
755 reinterpret_cast<uintptr_t>(raw_derived_ptr));
756 EXPECT_FALSE(checked_derived_ptr != checked_base2_ptr);
757 EXPECT_FALSE(checked_derived_ptr != raw_base2_ptr);
758 EXPECT_FALSE(raw_derived_ptr != checked_base2_ptr);
759 EXPECT_FALSE(checked_base2_ptr != checked_derived_ptr);
760 EXPECT_FALSE(checked_base2_ptr != raw_derived_ptr);
761 EXPECT_FALSE(raw_base2_ptr != checked_derived_ptr);
762 // Make sure that all cases are handled by operator== (faster) and none by the
763 // cast operator (slower).
764 // The 4 extractions come from .get() checks, that compare raw addresses.
765 EXPECT_THAT((CountingRawPtrExpectations{
766 .get_for_dereference_cnt = 0,
767 .get_for_extraction_cnt = 4,
768 .get_for_comparison_cnt = 20,
769 }),
770 CountersMatch());
771 }
772
TEST_F(RawPtrTest,Cast)773 TEST_F(RawPtrTest, Cast) {
774 Derived derived_val(42, 84, 1024);
775 raw_ptr<Derived> checked_derived_ptr = &derived_val;
776 Base1* raw_base1_ptr = checked_derived_ptr;
777 EXPECT_EQ(raw_base1_ptr->b1, 42);
778 Base2* raw_base2_ptr = checked_derived_ptr;
779 EXPECT_EQ(raw_base2_ptr->b2, 84);
780
781 Derived* raw_derived_ptr = static_cast<Derived*>(raw_base1_ptr);
782 EXPECT_EQ(raw_derived_ptr->b1, 42);
783 EXPECT_EQ(raw_derived_ptr->b2, 84);
784 EXPECT_EQ(raw_derived_ptr->d, 1024);
785 raw_derived_ptr = static_cast<Derived*>(raw_base2_ptr);
786 EXPECT_EQ(raw_derived_ptr->b1, 42);
787 EXPECT_EQ(raw_derived_ptr->b2, 84);
788 EXPECT_EQ(raw_derived_ptr->d, 1024);
789
790 raw_ptr<Base1> checked_base1_ptr = raw_derived_ptr;
791 EXPECT_EQ(checked_base1_ptr->b1, 42);
792 raw_ptr<Base2> checked_base2_ptr = raw_derived_ptr;
793 EXPECT_EQ(checked_base2_ptr->b2, 84);
794
795 raw_ptr<Derived> checked_derived_ptr2 =
796 static_cast<Derived*>(checked_base1_ptr);
797 EXPECT_EQ(checked_derived_ptr2->b1, 42);
798 EXPECT_EQ(checked_derived_ptr2->b2, 84);
799 EXPECT_EQ(checked_derived_ptr2->d, 1024);
800 checked_derived_ptr2 = static_cast<Derived*>(checked_base2_ptr);
801 EXPECT_EQ(checked_derived_ptr2->b1, 42);
802 EXPECT_EQ(checked_derived_ptr2->b2, 84);
803 EXPECT_EQ(checked_derived_ptr2->d, 1024);
804
805 const Derived* raw_const_derived_ptr = checked_derived_ptr2;
806 EXPECT_EQ(raw_const_derived_ptr->b1, 42);
807 EXPECT_EQ(raw_const_derived_ptr->b2, 84);
808 EXPECT_EQ(raw_const_derived_ptr->d, 1024);
809
810 raw_ptr<const Derived> checked_const_derived_ptr = raw_const_derived_ptr;
811 EXPECT_EQ(checked_const_derived_ptr->b1, 42);
812 EXPECT_EQ(checked_const_derived_ptr->b2, 84);
813 EXPECT_EQ(checked_const_derived_ptr->d, 1024);
814
815 const Derived* raw_const_derived_ptr2 = checked_const_derived_ptr;
816 EXPECT_EQ(raw_const_derived_ptr2->b1, 42);
817 EXPECT_EQ(raw_const_derived_ptr2->b2, 84);
818 EXPECT_EQ(raw_const_derived_ptr2->d, 1024);
819
820 raw_ptr<const Derived> checked_const_derived_ptr2 = raw_derived_ptr;
821 EXPECT_EQ(checked_const_derived_ptr2->b1, 42);
822 EXPECT_EQ(checked_const_derived_ptr2->b2, 84);
823 EXPECT_EQ(checked_const_derived_ptr2->d, 1024);
824
825 raw_ptr<const Derived> checked_const_derived_ptr3 = checked_derived_ptr2;
826 EXPECT_EQ(checked_const_derived_ptr3->b1, 42);
827 EXPECT_EQ(checked_const_derived_ptr3->b2, 84);
828 EXPECT_EQ(checked_const_derived_ptr3->d, 1024);
829
830 volatile Derived* raw_volatile_derived_ptr = checked_derived_ptr2;
831 EXPECT_EQ(raw_volatile_derived_ptr->b1, 42);
832 EXPECT_EQ(raw_volatile_derived_ptr->b2, 84);
833 EXPECT_EQ(raw_volatile_derived_ptr->d, 1024);
834
835 raw_ptr<volatile Derived> checked_volatile_derived_ptr =
836 raw_volatile_derived_ptr;
837 EXPECT_EQ(checked_volatile_derived_ptr->b1, 42);
838 EXPECT_EQ(checked_volatile_derived_ptr->b2, 84);
839 EXPECT_EQ(checked_volatile_derived_ptr->d, 1024);
840
841 void* raw_void_ptr = checked_derived_ptr;
842 raw_ptr<void> checked_void_ptr = raw_derived_ptr;
843 raw_ptr<Derived> checked_derived_ptr3 = static_cast<Derived*>(raw_void_ptr);
844 raw_ptr<Derived> checked_derived_ptr4 =
845 static_cast<Derived*>(checked_void_ptr);
846 EXPECT_EQ(checked_derived_ptr3->b1, 42);
847 EXPECT_EQ(checked_derived_ptr3->b2, 84);
848 EXPECT_EQ(checked_derived_ptr3->d, 1024);
849 EXPECT_EQ(checked_derived_ptr4->b1, 42);
850 EXPECT_EQ(checked_derived_ptr4->b2, 84);
851 EXPECT_EQ(checked_derived_ptr4->d, 1024);
852 }
853
TEST_F(RawPtrTest,UpcastConvertible)854 TEST_F(RawPtrTest, UpcastConvertible) {
855 {
856 Derived derived_val(42, 84, 1024);
857 raw_ptr<Derived> checked_derived_ptr = &derived_val;
858
859 raw_ptr<Base1> checked_base1_ptr(checked_derived_ptr);
860 EXPECT_EQ(checked_base1_ptr->b1, 42);
861 raw_ptr<Base2> checked_base2_ptr(checked_derived_ptr);
862 EXPECT_EQ(checked_base2_ptr->b2, 84);
863
864 checked_base1_ptr = checked_derived_ptr;
865 EXPECT_EQ(checked_base1_ptr->b1, 42);
866 checked_base2_ptr = checked_derived_ptr;
867 EXPECT_EQ(checked_base2_ptr->b2, 84);
868
869 EXPECT_EQ(checked_base1_ptr, checked_derived_ptr);
870 EXPECT_EQ(checked_base2_ptr, checked_derived_ptr);
871 }
872
873 {
874 Derived derived_val(42, 84, 1024);
875 raw_ptr<Derived> checked_derived_ptr1 = &derived_val;
876 raw_ptr<Derived> checked_derived_ptr2 = &derived_val;
877 raw_ptr<Derived> checked_derived_ptr3 = &derived_val;
878 raw_ptr<Derived> checked_derived_ptr4 = &derived_val;
879
880 raw_ptr<Base1> checked_base1_ptr(std::move(checked_derived_ptr1));
881 EXPECT_EQ(checked_base1_ptr->b1, 42);
882 raw_ptr<Base2> checked_base2_ptr(std::move(checked_derived_ptr2));
883 EXPECT_EQ(checked_base2_ptr->b2, 84);
884
885 checked_base1_ptr = std::move(checked_derived_ptr3);
886 EXPECT_EQ(checked_base1_ptr->b1, 42);
887 checked_base2_ptr = std::move(checked_derived_ptr4);
888 EXPECT_EQ(checked_base2_ptr->b2, 84);
889 }
890 }
891
TEST_F(RawPtrTest,UpcastNotConvertible)892 TEST_F(RawPtrTest, UpcastNotConvertible) {
893 class Base {};
894 class Derived : private Base {};
895 class Unrelated {};
896 EXPECT_FALSE((std::is_convertible_v<raw_ptr<Derived>, raw_ptr<Base>>));
897 EXPECT_FALSE((std::is_convertible_v<raw_ptr<Unrelated>, raw_ptr<Base>>));
898 EXPECT_FALSE((std::is_convertible_v<raw_ptr<Unrelated>, raw_ptr<void>>));
899 EXPECT_FALSE((std::is_convertible_v<raw_ptr<void>, raw_ptr<Unrelated>>));
900 EXPECT_FALSE((std::is_convertible_v<raw_ptr<int64_t>, raw_ptr<int32_t>>));
901 EXPECT_FALSE((std::is_convertible_v<raw_ptr<int16_t>, raw_ptr<int32_t>>));
902 }
903
TEST_F(RawPtrTest,UpcastPerformance)904 TEST_F(RawPtrTest, UpcastPerformance) {
905 {
906 Derived derived_val(42, 84, 1024);
907 CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
908 CountingRawPtr<Base1> checked_base1_ptr(checked_derived_ptr);
909 CountingRawPtr<Base2> checked_base2_ptr(checked_derived_ptr);
910 checked_base1_ptr = checked_derived_ptr;
911 checked_base2_ptr = checked_derived_ptr;
912 }
913
914 {
915 Derived derived_val(42, 84, 1024);
916 CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
917 CountingRawPtr<Base1> checked_base1_ptr(std::move(checked_derived_ptr));
918 CountingRawPtr<Base2> checked_base2_ptr(std::move(checked_derived_ptr));
919 checked_base1_ptr = std::move(checked_derived_ptr);
920 checked_base2_ptr = std::move(checked_derived_ptr);
921 }
922
923 EXPECT_THAT((CountingRawPtrExpectations{
924 .get_for_dereference_cnt = 0,
925 .get_for_extraction_cnt = 0,
926 .get_for_comparison_cnt = 0,
927 }),
928 CountersMatch());
929 }
930
TEST_F(RawPtrTest,CustomSwap)931 TEST_F(RawPtrTest, CustomSwap) {
932 int foo1, foo2;
933 CountingRawPtr<int> ptr1(&foo1);
934 CountingRawPtr<int> ptr2(&foo2);
935 // Recommended use pattern.
936 using std::swap;
937 swap(ptr1, ptr2);
938 EXPECT_EQ(ptr1.get(), &foo2);
939 EXPECT_EQ(ptr2.get(), &foo1);
940 EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 1);
941 }
942
TEST_F(RawPtrTest,StdSwap)943 TEST_F(RawPtrTest, StdSwap) {
944 int foo1, foo2;
945 CountingRawPtr<int> ptr1(&foo1);
946 CountingRawPtr<int> ptr2(&foo2);
947 std::swap(ptr1, ptr2);
948 EXPECT_EQ(ptr1.get(), &foo2);
949 EXPECT_EQ(ptr2.get(), &foo1);
950 EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 0);
951 }
952
TEST_F(RawPtrTest,PostIncrementOperator)953 TEST_F(RawPtrTest, PostIncrementOperator) {
954 std::vector<int> foo({42, 43, 44, 45});
955 CountingRawPtr<int> ptr = &foo[0];
956 for (int i = 0; i < 4; ++i) {
957 ASSERT_EQ(*ptr++, 42 + i);
958 }
959 EXPECT_THAT((CountingRawPtrExpectations{
960 .get_for_dereference_cnt = 4,
961 .get_for_extraction_cnt = 0,
962 .get_for_comparison_cnt = 0,
963 }),
964 CountersMatch());
965 }
966
TEST_F(RawPtrTest,PostDecrementOperator)967 TEST_F(RawPtrTest, PostDecrementOperator) {
968 std::vector<int> foo({42, 43, 44, 45});
969 CountingRawPtr<int> ptr = &foo[3];
970 // Avoid decrementing out of the slot holding the vector's backing store.
971 for (int i = 3; i > 0; --i) {
972 ASSERT_EQ(*ptr--, 42 + i);
973 }
974 ASSERT_EQ(*ptr, 42);
975 EXPECT_THAT((CountingRawPtrExpectations{
976 .get_for_dereference_cnt = 4,
977 .get_for_extraction_cnt = 0,
978 .get_for_comparison_cnt = 0,
979 }),
980 CountersMatch());
981 }
982
TEST_F(RawPtrTest,PreIncrementOperator)983 TEST_F(RawPtrTest, PreIncrementOperator) {
984 std::vector<int> foo({42, 43, 44, 45});
985 CountingRawPtr<int> ptr = &foo[0];
986 for (int i = 0; i < 4; ++i, ++ptr) {
987 ASSERT_EQ(*ptr, 42 + i);
988 }
989 EXPECT_THAT((CountingRawPtrExpectations{
990 .get_for_dereference_cnt = 4,
991 .get_for_extraction_cnt = 0,
992 .get_for_comparison_cnt = 0,
993 }),
994 CountersMatch());
995 }
996
TEST_F(RawPtrTest,PreDecrementOperator)997 TEST_F(RawPtrTest, PreDecrementOperator) {
998 std::vector<int> foo({42, 43, 44, 45});
999 CountingRawPtr<int> ptr = &foo[3];
1000 // Avoid decrementing out of the slot holding the vector's backing store.
1001 for (int i = 3; i > 0; --i, --ptr) {
1002 ASSERT_EQ(*ptr, 42 + i);
1003 }
1004 ASSERT_EQ(*ptr, 42);
1005 EXPECT_THAT((CountingRawPtrExpectations{
1006 .get_for_dereference_cnt = 4,
1007 .get_for_extraction_cnt = 0,
1008 .get_for_comparison_cnt = 0,
1009 }),
1010 CountersMatch());
1011 }
1012
TEST_F(RawPtrTest,PlusEqualOperator)1013 TEST_F(RawPtrTest, PlusEqualOperator) {
1014 std::vector<int> foo({42, 43, 44, 45});
1015 CountingRawPtr<int> ptr = &foo[0];
1016 for (int i = 0; i < 4; i += 2, ptr += 2) {
1017 ASSERT_EQ(*ptr, 42 + i);
1018 }
1019 EXPECT_THAT((CountingRawPtrExpectations{
1020 .get_for_dereference_cnt = 2,
1021 .get_for_extraction_cnt = 0,
1022 .get_for_comparison_cnt = 0,
1023 }),
1024 CountersMatch());
1025 }
1026
TEST_F(RawPtrTest,PlusEqualOperatorTypes)1027 TEST_F(RawPtrTest, PlusEqualOperatorTypes) {
1028 std::vector<int> foo({42, 43, 44, 45});
1029 CountingRawPtr<int> ptr = &foo[0];
1030 ASSERT_EQ(*ptr, 42);
1031 ptr += 2; // Positive literal.
1032 ASSERT_EQ(*ptr, 44);
1033 ptr -= 2; // Negative literal.
1034 ASSERT_EQ(*ptr, 42);
1035 ptr += ptrdiff_t{1}; // ptrdiff_t.
1036 ASSERT_EQ(*ptr, 43);
1037 ptr += size_t{2}; // size_t.
1038 ASSERT_EQ(*ptr, 45);
1039 }
1040
TEST_F(RawPtrTest,MinusEqualOperator)1041 TEST_F(RawPtrTest, MinusEqualOperator) {
1042 std::vector<int> foo({42, 43, 44, 45});
1043 CountingRawPtr<int> ptr = &foo[3];
1044 ASSERT_EQ(*ptr, 45);
1045 ptr -= 2;
1046 ASSERT_EQ(*ptr, 43);
1047 EXPECT_THAT((CountingRawPtrExpectations{
1048 .get_for_dereference_cnt = 2,
1049 .get_for_extraction_cnt = 0,
1050 .get_for_comparison_cnt = 0,
1051 }),
1052 CountersMatch());
1053 }
1054
TEST_F(RawPtrTest,MinusEqualOperatorTypes)1055 TEST_F(RawPtrTest, MinusEqualOperatorTypes) {
1056 int foo[] = {42, 43, 44, 45};
1057 CountingRawPtr<int> ptr = &foo[3];
1058 ASSERT_EQ(*ptr, 45);
1059 ptr -= 2; // Positive literal.
1060 ASSERT_EQ(*ptr, 43);
1061 ptr -= -2; // Negative literal.
1062 ASSERT_EQ(*ptr, 45);
1063 ptr -= ptrdiff_t{2}; // ptrdiff_t.
1064 ASSERT_EQ(*ptr, 43);
1065 ptr -= size_t{1}; // size_t.
1066 ASSERT_EQ(*ptr, 42);
1067 }
1068
TEST_F(RawPtrTest,PlusOperator)1069 TEST_F(RawPtrTest, PlusOperator) {
1070 int foo[] = {42, 43, 44, 45};
1071 CountingRawPtr<int> ptr = foo;
1072 for (int i = 0; i < 4; ++i) {
1073 ASSERT_EQ(*(ptr + i), 42 + i);
1074 }
1075 EXPECT_THAT((CountingRawPtrExpectations{
1076 .get_for_dereference_cnt = 4,
1077 .get_for_extraction_cnt = 0,
1078 .get_for_comparison_cnt = 0,
1079 }),
1080 CountersMatch());
1081 }
1082
TEST_F(RawPtrTest,MinusOperator)1083 TEST_F(RawPtrTest, MinusOperator) {
1084 int foo[] = {42, 43, 44, 45};
1085 CountingRawPtr<int> ptr = &foo[4];
1086 for (int i = 1; i <= 4; ++i) {
1087 ASSERT_EQ(*(ptr - i), 46 - i);
1088 }
1089 EXPECT_THAT((CountingRawPtrExpectations{
1090 .get_for_dereference_cnt = 4,
1091 .get_for_extraction_cnt = 0,
1092 .get_for_comparison_cnt = 0,
1093 }),
1094 CountersMatch());
1095 }
1096
TEST_F(RawPtrTest,MinusDeltaOperator)1097 TEST_F(RawPtrTest, MinusDeltaOperator) {
1098 int foo[] = {42, 43, 44, 45};
1099 CountingRawPtr<int> ptrs[] = {&foo[0], &foo[1], &foo[2], &foo[3], &foo[4]};
1100 for (int i = 0; i <= 4; ++i) {
1101 for (int j = 0; j <= 4; ++j) {
1102 ASSERT_EQ(ptrs[i] - ptrs[j], i - j);
1103 ASSERT_EQ(ptrs[i] - &foo[j], i - j);
1104 ASSERT_EQ(&foo[i] - ptrs[j], i - j);
1105 }
1106 }
1107 EXPECT_THAT((CountingRawPtrExpectations{
1108 .get_for_dereference_cnt = 0,
1109 .get_for_extraction_cnt = 0,
1110 .get_for_comparison_cnt = 0,
1111 }),
1112 CountersMatch());
1113 }
1114
TEST_F(RawPtrTest,AdvanceString)1115 TEST_F(RawPtrTest, AdvanceString) {
1116 const char kChars[] = "Hello";
1117 std::string str = kChars;
1118 CountingRawPtr<const char> ptr = str.c_str();
1119 for (size_t i = 0; i < str.size(); ++i, ++ptr) {
1120 ASSERT_EQ(*ptr, kChars[i]);
1121 }
1122 EXPECT_THAT((CountingRawPtrExpectations{
1123 .get_for_dereference_cnt = 5,
1124 .get_for_extraction_cnt = 0,
1125 .get_for_comparison_cnt = 0,
1126 }),
1127 CountersMatch());
1128 }
1129
TEST_F(RawPtrTest,AssignmentFromNullptr)1130 TEST_F(RawPtrTest, AssignmentFromNullptr) {
1131 CountingRawPtr<int> wrapped_ptr;
1132 wrapped_ptr = nullptr;
1133 EXPECT_THAT((CountingRawPtrExpectations{
1134 .wrap_raw_ptr_cnt = 0,
1135 .get_for_dereference_cnt = 0,
1136 .get_for_extraction_cnt = 0,
1137 .get_for_comparison_cnt = 0,
1138 }),
1139 CountersMatch());
1140 }
1141
FunctionWithRawPtrParameter(raw_ptr<int> actual_ptr,int * expected_ptr)1142 void FunctionWithRawPtrParameter(raw_ptr<int> actual_ptr, int* expected_ptr) {
1143 EXPECT_EQ(actual_ptr.get(), expected_ptr);
1144 EXPECT_EQ(*actual_ptr, *expected_ptr);
1145 }
1146
1147 // This test checks that raw_ptr<T> can be passed by value into function
1148 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_ImplicitlyMovedTemporary)1149 TEST_F(RawPtrTest, FunctionParameters_ImplicitlyMovedTemporary) {
1150 int x = 123;
1151 FunctionWithRawPtrParameter(
1152 raw_ptr<int>(&x), // Temporary that will be moved into the function.
1153 &x);
1154 }
1155
1156 // This test checks that raw_ptr<T> can be passed by value into function
1157 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_ExplicitlyMovedLValue)1158 TEST_F(RawPtrTest, FunctionParameters_ExplicitlyMovedLValue) {
1159 int x = 123;
1160 raw_ptr<int> ptr(&x);
1161 FunctionWithRawPtrParameter(std::move(ptr), &x);
1162 }
1163
1164 // This test checks that raw_ptr<T> can be passed by value into function
1165 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_Copy)1166 TEST_F(RawPtrTest, FunctionParameters_Copy) {
1167 int x = 123;
1168 raw_ptr<int> ptr(&x);
1169 FunctionWithRawPtrParameter(ptr, // `ptr` will be copied into the function.
1170 &x);
1171 }
1172
TEST_F(RawPtrTest,SetLookupUsesGetForComparison)1173 TEST_F(RawPtrTest, SetLookupUsesGetForComparison) {
1174 int x = 123;
1175 CountingRawPtr<int> ptr(&x);
1176 std::set<CountingRawPtr<int>> set;
1177
1178 RawPtrCountingImpl::ClearCounters();
1179 set.emplace(&x);
1180 EXPECT_THAT((CountingRawPtrExpectations{
1181 .wrap_raw_ptr_cnt = 1,
1182 // Nothing to compare to yet.
1183 .get_for_dereference_cnt = 0,
1184 .get_for_extraction_cnt = 0,
1185 .get_for_comparison_cnt = 0,
1186 .wrapped_ptr_less_cnt = 0,
1187 }),
1188 CountersMatch());
1189
1190 RawPtrCountingImpl::ClearCounters();
1191 set.emplace(ptr);
1192 EXPECT_THAT((CountingRawPtrExpectations{
1193 .wrap_raw_ptr_cnt = 0,
1194 .get_for_dereference_cnt = 0,
1195 .get_for_extraction_cnt = 0,
1196 // 2 items to compare to => 4 calls.
1197 .get_for_comparison_cnt = 4,
1198 // 1 element to compare to => 2 calls.
1199 .wrapped_ptr_less_cnt = 2,
1200 }),
1201 CountersMatch());
1202
1203 RawPtrCountingImpl::ClearCounters();
1204 set.count(&x);
1205 EXPECT_THAT((CountingRawPtrExpectations{
1206 .wrap_raw_ptr_cnt = 0,
1207 .get_for_dereference_cnt = 0,
1208 .get_for_extraction_cnt = 0,
1209 // 2 comparisons => 2 extractions. Less than before, because
1210 // this time a raw pointer is one side of the comparison.
1211 .get_for_comparison_cnt = 2,
1212 // 2 items to compare to => 4 calls.
1213 .wrapped_ptr_less_cnt = 2,
1214 }),
1215 CountersMatch());
1216
1217 RawPtrCountingImpl::ClearCounters();
1218 set.count(ptr);
1219 EXPECT_THAT((CountingRawPtrExpectations{
1220 .wrap_raw_ptr_cnt = 0,
1221 .get_for_dereference_cnt = 0,
1222 .get_for_extraction_cnt = 0,
1223 // 2 comparisons => 4 extractions.
1224 .get_for_comparison_cnt = 4,
1225 // 2 items to compare to => 4 calls.
1226 .wrapped_ptr_less_cnt = 2,
1227 }),
1228 CountersMatch());
1229 }
1230
TEST_F(RawPtrTest,ComparisonOperatorUsesGetForComparison)1231 TEST_F(RawPtrTest, ComparisonOperatorUsesGetForComparison) {
1232 int x = 123;
1233 CountingRawPtr<int> ptr(&x);
1234
1235 RawPtrCountingImpl::ClearCounters();
1236 EXPECT_FALSE(ptr < ptr);
1237 EXPECT_FALSE(ptr > ptr);
1238 EXPECT_TRUE(ptr <= ptr);
1239 EXPECT_TRUE(ptr >= ptr);
1240 EXPECT_THAT((CountingRawPtrExpectations{
1241 .wrap_raw_ptr_cnt = 0,
1242 .get_for_dereference_cnt = 0,
1243 .get_for_extraction_cnt = 0,
1244 .get_for_comparison_cnt = 8,
1245 // < is used directly, not std::less().
1246 .wrapped_ptr_less_cnt = 0,
1247 }),
1248 CountersMatch());
1249
1250 RawPtrCountingImpl::ClearCounters();
1251 EXPECT_FALSE(ptr < &x);
1252 EXPECT_FALSE(ptr > &x);
1253 EXPECT_TRUE(ptr <= &x);
1254 EXPECT_TRUE(ptr >= &x);
1255 EXPECT_THAT((CountingRawPtrExpectations{
1256 .wrap_raw_ptr_cnt = 0,
1257 .get_for_dereference_cnt = 0,
1258 .get_for_extraction_cnt = 0,
1259 .get_for_comparison_cnt = 4,
1260 .wrapped_ptr_less_cnt = 0,
1261 }),
1262 CountersMatch());
1263
1264 RawPtrCountingImpl::ClearCounters();
1265 EXPECT_FALSE(&x < ptr);
1266 EXPECT_FALSE(&x > ptr);
1267 EXPECT_TRUE(&x <= ptr);
1268 EXPECT_TRUE(&x >= ptr);
1269 EXPECT_THAT((CountingRawPtrExpectations{
1270 .wrap_raw_ptr_cnt = 0,
1271 .get_for_dereference_cnt = 0,
1272 .get_for_extraction_cnt = 0,
1273 .get_for_comparison_cnt = 4,
1274 .wrapped_ptr_less_cnt = 0,
1275 }),
1276 CountersMatch());
1277 }
1278
1279 // Two `raw_ptr`s with different Traits should still hit `GetForComparison()`
1280 // (as opposed to `GetForExtraction()`) in their comparison operators. We use
1281 // `CountingRawPtr` and `CountingRawPtrMayDangle` to contrast two different
1282 // Traits.
TEST_F(RawPtrTest,OperatorsUseGetForComparison)1283 TEST_F(RawPtrTest, OperatorsUseGetForComparison) {
1284 int x = 123;
1285 CountingRawPtr<int> ptr1 = &x;
1286 CountingRawPtrMayDangle<int> ptr2 = &x;
1287
1288 RawPtrCountingImpl::ClearCounters();
1289
1290 EXPECT_TRUE(ptr1 == ptr2);
1291 EXPECT_FALSE(ptr1 != ptr2);
1292 EXPECT_THAT((CountingRawPtrExpectations{
1293 .get_for_extraction_cnt = 0,
1294 .get_for_comparison_cnt = 4,
1295 }),
1296 CountersMatch());
1297
1298 EXPECT_FALSE(ptr1 < ptr2);
1299 EXPECT_FALSE(ptr1 > ptr2);
1300 EXPECT_TRUE(ptr1 <= ptr2);
1301 EXPECT_TRUE(ptr1 >= ptr2);
1302 EXPECT_THAT((CountingRawPtrExpectations{
1303 .get_for_extraction_cnt = 0,
1304 .get_for_comparison_cnt = 12,
1305 }),
1306 CountersMatch());
1307 }
1308
1309 // This test checks how the std library handles collections like
1310 // std::vector<raw_ptr<T>>.
TEST_F(RawPtrTest,TrivialRelocability)1311 TEST_F(RawPtrTest, TrivialRelocability) {
1312 std::vector<CountingRawPtr<int>> vector;
1313 int x = 123;
1314
1315 // See how many times raw_ptr's destructor is called when std::vector
1316 // needs to increase its capacity and reallocate the internal vector
1317 // storage (moving the raw_ptr elements).
1318 RawPtrCountingImpl::ClearCounters();
1319 size_t number_of_capacity_changes = 0;
1320 do {
1321 size_t previous_capacity = vector.capacity();
1322 while (vector.capacity() == previous_capacity) {
1323 vector.emplace_back(&x);
1324 }
1325 number_of_capacity_changes++;
1326 } while (number_of_capacity_changes < 10);
1327 EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1328 // Basic smoke test that raw_ptr elements in a vector work okay.
1329 for (const auto& elem : vector) {
1330 EXPECT_EQ(elem.get(), &x);
1331 EXPECT_EQ(*elem, x);
1332 }
1333
1334 // Verification that release_wrapped_ptr_cnt does capture how many times the
1335 // destructors are called (e.g. that it is not always zero).
1336 RawPtrCountingImpl::ClearCounters();
1337 size_t number_of_cleared_elements = vector.size();
1338 vector.clear();
1339 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) || \
1340 BUILDFLAG(USE_ASAN_UNOWNED_PTR) || BUILDFLAG(USE_HOOKABLE_RAW_PTR) || \
1341 BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
1342 EXPECT_EQ((int)number_of_cleared_elements,
1343 RawPtrCountingImpl::release_wrapped_ptr_cnt);
1344 #else
1345 // TODO(lukasza): NoOpImpl has a default destructor that, unless zeroing is
1346 // requested, doesn't go through RawPtrCountingImpl::ReleaseWrappedPtr. So we
1347 // can't really depend on `g_release_wrapped_ptr_cnt`. This #else branch
1348 // should be deleted once USE_BACKUP_REF_PTR is removed (e.g. once
1349 // BackupRefPtr ships to the Stable channel).
1350 EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1351 std::ignore = number_of_cleared_elements;
1352 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) ||
1353 // BUILDFLAG(USE_ASAN_UNOWNED_PTR) ||
1354 // BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
1355 }
1356
1357 struct BaseStruct {
BaseStruct__anon154ccca30411::BaseStruct1358 explicit BaseStruct(int a) : a(a) {}
1359 virtual ~BaseStruct() = default;
1360
1361 int a;
1362 };
1363
1364 struct DerivedType1 : public BaseStruct {
DerivedType1__anon154ccca30411::DerivedType11365 explicit DerivedType1(int a, int b) : BaseStruct(a), b(b) {}
1366 int b;
1367 };
1368
1369 struct DerivedType2 : public BaseStruct {
DerivedType2__anon154ccca30411::DerivedType21370 explicit DerivedType2(int a, int c) : BaseStruct(a), c(c) {}
1371 int c;
1372 };
1373
TEST_F(RawPtrTest,DerivedStructsComparison)1374 TEST_F(RawPtrTest, DerivedStructsComparison) {
1375 DerivedType1 derived_1(42, 84);
1376 raw_ptr<DerivedType1> checked_derived1_ptr = &derived_1;
1377 DerivedType2 derived_2(21, 10);
1378 raw_ptr<DerivedType2> checked_derived2_ptr = &derived_2;
1379
1380 // Make sure that comparing a |DerivedType2*| to a |DerivedType1*| casted
1381 // as a |BaseStruct*| doesn't cause CFI errors.
1382 EXPECT_NE(checked_derived1_ptr,
1383 static_cast<BaseStruct*>(checked_derived2_ptr.get()));
1384 EXPECT_NE(static_cast<BaseStruct*>(checked_derived1_ptr.get()),
1385 checked_derived2_ptr);
1386 }
1387
1388 class PmfTestBase {
1389 public:
MemFunc(char,double) const1390 int MemFunc(char, double) const { return 11; }
1391 };
1392
1393 class PmfTestDerived : public PmfTestBase {
1394 public:
1395 using PmfTestBase::MemFunc;
MemFunc(float,double)1396 int MemFunc(float, double) { return 22; }
1397 };
1398
TEST_F(RawPtrTest,PointerToMemberFunction)1399 TEST_F(RawPtrTest, PointerToMemberFunction) {
1400 PmfTestDerived object;
1401 int (PmfTestBase::*pmf_base_base)(char, double) const = &PmfTestBase::MemFunc;
1402 int (PmfTestDerived::*pmf_derived_base)(char, double) const =
1403 &PmfTestDerived::MemFunc;
1404 int (PmfTestDerived::*pmf_derived_derived)(float, double) =
1405 &PmfTestDerived::MemFunc;
1406
1407 // Test for `derived_ptr`
1408 CountingRawPtr<PmfTestDerived> derived_ptr = &object;
1409
1410 EXPECT_EQ((derived_ptr->*pmf_base_base)(0, 0), 11);
1411 EXPECT_EQ((derived_ptr->*pmf_derived_base)(0, 0), 11);
1412 EXPECT_EQ((derived_ptr->*pmf_derived_derived)(0, 0), 22);
1413
1414 // Test for `derived_ptr_const`
1415 const CountingRawPtr<PmfTestDerived> derived_ptr_const = &object;
1416
1417 EXPECT_EQ((derived_ptr_const->*pmf_base_base)(0, 0), 11);
1418 EXPECT_EQ((derived_ptr_const->*pmf_derived_base)(0, 0), 11);
1419 EXPECT_EQ((derived_ptr_const->*pmf_derived_derived)(0, 0), 22);
1420
1421 // Test for `const_derived_ptr`
1422 CountingRawPtr<const PmfTestDerived> const_derived_ptr = &object;
1423
1424 EXPECT_EQ((const_derived_ptr->*pmf_base_base)(0, 0), 11);
1425 EXPECT_EQ((const_derived_ptr->*pmf_derived_base)(0, 0), 11);
1426 // const_derived_ptr->*pmf_derived_derived is not a const member function,
1427 // so it's not possible to test it.
1428 }
1429
TEST_F(RawPtrTest,WorksWithOptional)1430 TEST_F(RawPtrTest, WorksWithOptional) {
1431 int x = 0;
1432 std::optional<raw_ptr<int>> maybe_int;
1433 EXPECT_FALSE(maybe_int.has_value());
1434
1435 maybe_int = nullptr;
1436 ASSERT_TRUE(maybe_int.has_value());
1437 EXPECT_EQ(nullptr, maybe_int.value());
1438
1439 maybe_int = &x;
1440 ASSERT_TRUE(maybe_int.has_value());
1441 EXPECT_EQ(&x, maybe_int.value());
1442 }
1443
TEST_F(RawPtrTest,WorksWithVariant)1444 TEST_F(RawPtrTest, WorksWithVariant) {
1445 int x = 100;
1446 absl::variant<int, raw_ptr<int>> vary;
1447 ASSERT_EQ(0u, vary.index());
1448 EXPECT_EQ(0, absl::get<int>(vary));
1449
1450 vary = x;
1451 ASSERT_EQ(0u, vary.index());
1452 EXPECT_EQ(100, absl::get<int>(vary));
1453
1454 vary = nullptr;
1455 ASSERT_EQ(1u, vary.index());
1456 EXPECT_EQ(nullptr, absl::get<raw_ptr<int>>(vary));
1457
1458 vary = &x;
1459 ASSERT_EQ(1u, vary.index());
1460 EXPECT_EQ(&x, absl::get<raw_ptr<int>>(vary));
1461 }
1462
TEST_F(RawPtrTest,CrossKindConversion)1463 TEST_F(RawPtrTest, CrossKindConversion) {
1464 int x = 123;
1465 CountingRawPtr<int> ptr1 = &x;
1466
1467 RawPtrCountingImpl::ClearCounters();
1468
1469 CountingRawPtrMayDangle<int> ptr2(ptr1);
1470 CountingRawPtrMayDangle<int> ptr3(std::move(ptr1)); // Falls back to copy.
1471
1472 EXPECT_THAT((CountingRawPtrExpectations{.wrap_raw_ptr_cnt = 0,
1473 .get_for_dereference_cnt = 0,
1474 .get_for_extraction_cnt = 0,
1475 .wrap_raw_ptr_for_dup_cnt = 2,
1476 .get_for_duplication_cnt = 2}),
1477 CountersMatch());
1478 }
1479
TEST_F(RawPtrTest,CrossKindAssignment)1480 TEST_F(RawPtrTest, CrossKindAssignment) {
1481 int x = 123;
1482 CountingRawPtr<int> ptr1 = &x;
1483
1484 RawPtrCountingImpl::ClearCounters();
1485
1486 CountingRawPtrMayDangle<int> ptr2;
1487 CountingRawPtrMayDangle<int> ptr3;
1488 ptr2 = ptr1;
1489 ptr3 = std::move(ptr1); // Falls back to copy.
1490
1491 EXPECT_THAT((CountingRawPtrExpectations{.wrap_raw_ptr_cnt = 0,
1492 .get_for_dereference_cnt = 0,
1493 .get_for_extraction_cnt = 0,
1494 .wrap_raw_ptr_for_dup_cnt = 2,
1495 .get_for_duplication_cnt = 2}),
1496 CountersMatch());
1497 }
1498
1499 // Without the explicitly customized `raw_ptr::to_address()`,
1500 // `std::to_address()` will use the dereference operator. This is not
1501 // what we want; this test enforces extraction semantics for
1502 // `to_address()`.
TEST_F(RawPtrTest,ToAddressDoesNotDereference)1503 TEST_F(RawPtrTest, ToAddressDoesNotDereference) {
1504 CountingRawPtr<int> ptr = nullptr;
1505 int* raw = base::to_address(ptr);
1506 std::ignore = raw;
1507 EXPECT_THAT((CountingRawPtrExpectations{.get_for_dereference_cnt = 0,
1508 .get_for_extraction_cnt = 1,
1509 .get_for_comparison_cnt = 0,
1510 .get_for_duplication_cnt = 0}),
1511 CountersMatch());
1512 }
1513
TEST_F(RawPtrTest,ToAddressGivesBackRawAddress)1514 TEST_F(RawPtrTest, ToAddressGivesBackRawAddress) {
1515 int* raw = nullptr;
1516 raw_ptr<int> miracle = raw;
1517 EXPECT_EQ(base::to_address(raw), base::to_address(miracle));
1518 }
1519
InOutParamFuncWithPointer(int * in,int ** out)1520 void InOutParamFuncWithPointer(int* in, int** out) {
1521 *out = in;
1522 }
1523
TEST_F(RawPtrTest,EphemeralRawAddrPointerPointer)1524 TEST_F(RawPtrTest, EphemeralRawAddrPointerPointer) {
1525 int v1 = 123;
1526 int v2 = 456;
1527 raw_ptr<int> ptr = &v1;
1528 // Pointer pointer should point to a pointer other than one inside raw_ptr.
1529 EXPECT_NE(&ptr.AsEphemeralRawAddr(),
1530 reinterpret_cast<int**>(std::addressof(ptr)));
1531 // But inner pointer should point to the same address.
1532 EXPECT_EQ(*&ptr.AsEphemeralRawAddr(), &v1);
1533
1534 // Inner pointer can be rewritten via the pointer pointer.
1535 *&ptr.AsEphemeralRawAddr() = &v2;
1536 EXPECT_EQ(ptr.get(), &v2);
1537 InOutParamFuncWithPointer(&v1, &ptr.AsEphemeralRawAddr());
1538 EXPECT_EQ(ptr.get(), &v1);
1539 }
1540
InOutParamFuncWithReference(int * in,int * & out)1541 void InOutParamFuncWithReference(int* in, int*& out) {
1542 out = in;
1543 }
1544
TEST_F(RawPtrTest,EphemeralRawAddrPointerReference)1545 TEST_F(RawPtrTest, EphemeralRawAddrPointerReference) {
1546 int v1 = 123;
1547 int v2 = 456;
1548 raw_ptr<int> ptr = &v1;
1549 // Pointer reference should refer to a pointer other than one inside raw_ptr.
1550 EXPECT_NE(&static_cast<int*&>(ptr.AsEphemeralRawAddr()),
1551 reinterpret_cast<int**>(std::addressof(ptr)));
1552 // But inner pointer should point to the same address.
1553 EXPECT_EQ(static_cast<int*&>(ptr.AsEphemeralRawAddr()), &v1);
1554
1555 // Inner pointer can be rewritten via the pointer pointer.
1556 static_cast<int*&>(ptr.AsEphemeralRawAddr()) = &v2;
1557 EXPECT_EQ(ptr.get(), &v2);
1558 InOutParamFuncWithReference(&v1, ptr.AsEphemeralRawAddr());
1559 EXPECT_EQ(ptr.get(), &v1);
1560 }
1561
1562 // InstanceTracer has additional fields, so just skip this test when instance
1563 // tracing is enabled.
1564 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_INSTANCE_TRACER)
1565 #if defined(COMPILER_GCC) && !defined(__clang__)
1566 // In GCC this test will optimize the return value of the constructor, so
1567 // assert fails. Disable optimizations to verify uninitialized attribute works
1568 // as expected.
1569 #pragma GCC push_options
1570 #pragma GCC optimize("O0")
1571 #endif
TEST_F(RawPtrTest,AllowUninitialized)1572 TEST_F(RawPtrTest, AllowUninitialized) {
1573 constexpr uintptr_t kPattern = 0x12345678;
1574 uintptr_t storage = kPattern;
1575 // Placement new over stored pattern must not change it.
1576 new (&storage) CountingRawPtrUninitialized<int>;
1577 EXPECT_EQ(storage, kPattern);
1578 }
1579 #if defined(COMPILER_GCC) && !defined(__clang__)
1580 #pragma GCC pop_options
1581 #endif
1582 #endif // !BUILDFLAG(ENABLE_BACKUP_REF_PTR_INSTANCE_TRACER)
1583
1584 } // namespace
1585
1586 namespace base::internal {
1587
1588 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
1589 !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
1590
HandleOOM(size_t unused_size)1591 void HandleOOM(size_t unused_size) {
1592 LOG(FATAL) << "Out of memory";
1593 }
1594
1595 class BackupRefPtrTest : public testing::Test {
1596 protected:
SetUp()1597 void SetUp() override {
1598 // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
1599 // new/delete once PartitionAlloc Everywhere is fully enabled.
1600 partition_alloc::PartitionAllocGlobalInit(HandleOOM);
1601 }
1602
GetRequestSizeThatFills512BSlot()1603 size_t GetRequestSizeThatFills512BSlot() {
1604 // This requires some internal PartitionAlloc knowledge, but for the test to
1605 // work well the allocation + extras have to fill out the entire slot.
1606 // That's because PartitionAlloc doesn't know exact allocation size and
1607 // bases the guards on the slot size.
1608 //
1609 // A power of two is a safe choice for a slot size, then adjust it for
1610 // extras.
1611 size_t slot_size = 512;
1612 size_t requested_size =
1613 allocator_.root()->AdjustSizeForExtrasSubtract(slot_size);
1614 // Verify that we're indeed filling up the slot.
1615 // (ASSERT_EQ is more appropriate here, because it verifies test setup, but
1616 // it doesn't compile.)
1617 EXPECT_EQ(
1618 requested_size,
1619 allocator_.root()->AllocationCapacityFromRequestedSize(requested_size));
1620 return requested_size;
1621 }
1622
1623 partition_alloc::PartitionAllocator allocator_ =
__anon154ccca30502() 1624 partition_alloc::PartitionAllocator([]() {
1625 partition_alloc::PartitionOptions opts;
1626 opts.backup_ref_ptr = partition_alloc::PartitionOptions::kEnabled;
1627 opts.memory_tagging = {
1628 .enabled = base::CPU::GetInstanceNoAllocation().has_mte()
1629 ? partition_alloc::PartitionOptions::kEnabled
1630 : partition_alloc::PartitionOptions::kDisabled};
1631 return opts;
1632 }());
1633 };
1634
TEST_F(BackupRefPtrTest,Basic)1635 TEST_F(BackupRefPtrTest, Basic) {
1636 base::CPU cpu;
1637
1638 int* raw_ptr1 =
1639 reinterpret_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
1640 // Use the actual raw_ptr implementation, not a test substitute, to
1641 // exercise real PartitionAlloc paths.
1642 raw_ptr<int, DisableDanglingPtrDetection> wrapped_ptr1 = raw_ptr1;
1643
1644 *raw_ptr1 = 42;
1645 EXPECT_EQ(*raw_ptr1, *wrapped_ptr1);
1646
1647 allocator_.root()->Free(raw_ptr1);
1648 #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1649 // In debug builds, the use-after-free should be caught immediately.
1650 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
1651 #else // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1652 if (cpu.has_mte()) {
1653 // If the hardware supports MTE, the use-after-free should also be caught.
1654 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
1655 } else {
1656 // The allocation should be poisoned since there's a raw_ptr alive.
1657 EXPECT_NE(*wrapped_ptr1, 42);
1658 }
1659
1660 // The allocator should not be able to reuse the slot at this point.
1661 void* raw_ptr2 = allocator_.root()->Alloc(sizeof(int), "");
1662 EXPECT_NE(partition_alloc::UntagPtr(raw_ptr1),
1663 partition_alloc::UntagPtr(raw_ptr2));
1664 allocator_.root()->Free(raw_ptr2);
1665
1666 // When the last reference is released, the slot should become reusable.
1667 wrapped_ptr1 = nullptr;
1668 void* raw_ptr3 = allocator_.root()->Alloc(sizeof(int), "");
1669 EXPECT_EQ(partition_alloc::UntagPtr(raw_ptr1),
1670 partition_alloc::UntagPtr(raw_ptr3));
1671 allocator_.root()->Free(raw_ptr3);
1672 #endif // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1673 }
1674
TEST_F(BackupRefPtrTest,ZeroSized)1675 TEST_F(BackupRefPtrTest, ZeroSized) {
1676 std::vector<raw_ptr<void>> ptrs;
1677 // Use a reasonable number of elements to fill up the slot span.
1678 for (int i = 0; i < 128 * 1024; ++i) {
1679 // Constructing a raw_ptr instance from a zero-sized allocation should
1680 // not result in a crash.
1681 ptrs.emplace_back(allocator_.root()->Alloc(0));
1682 }
1683 }
1684
TEST_F(BackupRefPtrTest,EndPointer)1685 TEST_F(BackupRefPtrTest, EndPointer) {
1686 // This test requires a fresh partition with an empty free list.
1687 // Check multiple size buckets and levels of slot filling.
1688 for (int size = 0; size < 1024; size += sizeof(void*)) {
1689 // Creating a raw_ptr from an address right past the end of an allocation
1690 // should not result in a crash or corrupt the free list.
1691 char* raw_ptr1 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1692 raw_ptr<char, AllowPtrArithmetic> wrapped_ptr = raw_ptr1 + size;
1693 wrapped_ptr = nullptr;
1694 // We need to make two more allocations to turn the possible free list
1695 // corruption into an observable crash.
1696 char* raw_ptr2 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1697 char* raw_ptr3 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1698
1699 // Similarly for operator+=.
1700 char* raw_ptr4 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1701 wrapped_ptr = raw_ptr4;
1702 wrapped_ptr += size;
1703 wrapped_ptr = nullptr;
1704 char* raw_ptr5 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1705 char* raw_ptr6 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1706
1707 allocator_.root()->Free(raw_ptr1);
1708 allocator_.root()->Free(raw_ptr2);
1709 allocator_.root()->Free(raw_ptr3);
1710 allocator_.root()->Free(raw_ptr4);
1711 allocator_.root()->Free(raw_ptr5);
1712 allocator_.root()->Free(raw_ptr6);
1713 }
1714 }
1715
TEST_F(BackupRefPtrTest,QuarantinedBytes)1716 TEST_F(BackupRefPtrTest, QuarantinedBytes) {
1717 uint64_t* raw_ptr1 = reinterpret_cast<uint64_t*>(
1718 allocator_.root()->Alloc(sizeof(uint64_t), ""));
1719 raw_ptr<uint64_t, DisableDanglingPtrDetection> wrapped_ptr1 = raw_ptr1;
1720 EXPECT_EQ(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1721 std::memory_order_relaxed),
1722 0U);
1723 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1724 std::memory_order_relaxed),
1725 0U);
1726
1727 // Memory should get quarantined.
1728 allocator_.root()->Free(raw_ptr1);
1729 EXPECT_GT(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1730 std::memory_order_relaxed),
1731 0U);
1732 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1733 std::memory_order_relaxed),
1734 1U);
1735
1736 // Non quarantined free should not effect total_size_of_brp_quarantined_bytes
1737 void* raw_ptr2 = allocator_.root()->Alloc(sizeof(uint64_t), "");
1738 allocator_.root()->Free(raw_ptr2);
1739
1740 // Freeing quarantined memory should bring the size back down to zero.
1741 wrapped_ptr1 = nullptr;
1742 EXPECT_EQ(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1743 std::memory_order_relaxed),
1744 0U);
1745 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1746 std::memory_order_relaxed),
1747 0U);
1748 }
1749
RunBackupRefPtrImplAdvanceTest(partition_alloc::PartitionAllocator & allocator,size_t requested_size)1750 void RunBackupRefPtrImplAdvanceTest(
1751 partition_alloc::PartitionAllocator& allocator,
1752 size_t requested_size) {
1753 #if BUILDFLAG(BACKUP_REF_PTR_EXTRA_OOB_CHECKS)
1754 char* ptr = static_cast<char*>(allocator.root()->Alloc(requested_size));
1755 raw_ptr<char, AllowPtrArithmetic> protected_ptr = ptr;
1756 protected_ptr += 123;
1757 protected_ptr -= 123;
1758 protected_ptr = protected_ptr + 123;
1759 protected_ptr = protected_ptr - 123;
1760 protected_ptr += requested_size / 2;
1761 // end-of-allocation address should not cause an error immediately, but it may
1762 // result in the pointer being poisoned.
1763 protected_ptr = protected_ptr + (requested_size + 1) / 2;
1764 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1765 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr = ' ', "");
1766 protected_ptr -= 1; // This brings the pointer back within
1767 // bounds, which causes the poison to be removed.
1768 *protected_ptr = ' ';
1769 protected_ptr += 1; // Reposition pointer back past end of allocation.
1770 #endif
1771 EXPECT_CHECK_DEATH(protected_ptr = protected_ptr + 1);
1772 EXPECT_CHECK_DEATH(protected_ptr += 1);
1773 EXPECT_CHECK_DEATH(++protected_ptr);
1774
1775 // Even though |protected_ptr| is already pointing to the end of the
1776 // allocation, assign it explicitly to make sure the underlying implementation
1777 // doesn't "switch" to the next slot.
1778 protected_ptr = ptr + requested_size;
1779 protected_ptr -= (requested_size + 1) / 2;
1780 protected_ptr = protected_ptr - requested_size / 2;
1781 EXPECT_CHECK_DEATH(protected_ptr = protected_ptr - 1);
1782 EXPECT_CHECK_DEATH(protected_ptr -= 1);
1783 EXPECT_CHECK_DEATH(--protected_ptr);
1784
1785 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1786 // An array of a size that doesn't cleanly fit into the allocation. This is to
1787 // check that one can't access elements that don't fully fit in the
1788 // allocation.
1789 const size_t kArraySize = 199;
1790 ASSERT_LT(kArraySize, requested_size);
1791 ASSERT_NE(requested_size % kArraySize, 0U);
1792 typedef char FunkyArray[kArraySize];
1793 raw_ptr<FunkyArray, AllowPtrArithmetic> protected_arr_ptr =
1794 reinterpret_cast<FunkyArray*>(ptr);
1795
1796 **protected_arr_ptr = 4;
1797 protected_arr_ptr += requested_size / kArraySize;
1798 EXPECT_CHECK_DEATH(** protected_arr_ptr = 4);
1799 protected_arr_ptr--;
1800 **protected_arr_ptr = 4;
1801 protected_arr_ptr++;
1802 EXPECT_CHECK_DEATH(** protected_arr_ptr = 4);
1803 protected_arr_ptr = nullptr;
1804 #endif // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1805
1806 protected_ptr = nullptr;
1807 allocator.root()->Free(ptr);
1808 #endif // BUILDFLAG(BACKUP_REF_PTR_EXTRA_OOB_CHECKS)
1809 }
1810
TEST_F(BackupRefPtrTest,Advance)1811 TEST_F(BackupRefPtrTest, Advance) {
1812 size_t requested_size = GetRequestSizeThatFills512BSlot();
1813 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1814
1815 // We don't have the same worry for single-slot spans, as PartitionAlloc knows
1816 // exactly where the allocation ends.
1817 size_t raw_size = 300003;
1818 ASSERT_GT(raw_size, partition_alloc::internal::MaxRegularSlotSpanSize());
1819 ASSERT_LE(raw_size, partition_alloc::internal::kMaxBucketed);
1820 requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(raw_size);
1821 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1822
1823 // Same for direct map.
1824 raw_size = 1001001;
1825 ASSERT_GT(raw_size, partition_alloc::internal::kMaxBucketed);
1826 requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(raw_size);
1827 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1828 }
1829
TEST_F(BackupRefPtrTest,AdvanceAcrossPools)1830 TEST_F(BackupRefPtrTest, AdvanceAcrossPools) {
1831 char array1[1000];
1832 char array2[1000];
1833
1834 char* in_pool_ptr = static_cast<char*>(allocator_.root()->Alloc(123));
1835
1836 raw_ptr<char, AllowPtrArithmetic> protected_ptr = array1;
1837 // Nothing bad happens. Both pointers are outside of the BRP pool, so no
1838 // checks are triggered.
1839 protected_ptr += (array2 - array1);
1840 // A pointer is shifted from outside of the BRP pool into the BRP pool. This
1841 // should trigger death to avoid
1842 EXPECT_CHECK_DEATH(protected_ptr += (in_pool_ptr - array2));
1843
1844 protected_ptr = in_pool_ptr;
1845 // Same when a pointer is shifted from inside the BRP pool out of it.
1846 EXPECT_CHECK_DEATH(protected_ptr += (array1 - in_pool_ptr));
1847
1848 protected_ptr = nullptr;
1849 allocator_.root()->Free(in_pool_ptr);
1850 }
1851
TEST_F(BackupRefPtrTest,GetDeltaElems)1852 TEST_F(BackupRefPtrTest, GetDeltaElems) {
1853 size_t requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(512);
1854 char* ptr1 = static_cast<char*>(allocator_.root()->Alloc(requested_size));
1855 char* ptr2 = static_cast<char*>(allocator_.root()->Alloc(requested_size));
1856 ASSERT_LT(ptr1, ptr2); // There should be a ref-count between slots.
1857 raw_ptr<char, AllowPtrArithmetic> protected_ptr1 = ptr1;
1858 raw_ptr<char, AllowPtrArithmetic> protected_ptr1_2 = ptr1 + 1;
1859 raw_ptr<char, AllowPtrArithmetic> protected_ptr1_3 =
1860 ptr1 + requested_size - 1;
1861 raw_ptr<char, AllowPtrArithmetic> protected_ptr1_4 = ptr1 + requested_size;
1862 raw_ptr<char, AllowPtrArithmetic> protected_ptr2 = ptr2;
1863 raw_ptr<char, AllowPtrArithmetic> protected_ptr2_2 = ptr2 + 1;
1864
1865 EXPECT_EQ(protected_ptr1_2 - protected_ptr1, 1);
1866 EXPECT_EQ(protected_ptr1 - protected_ptr1_2, -1);
1867 EXPECT_EQ(protected_ptr1_3 - protected_ptr1,
1868 checked_cast<ptrdiff_t>(requested_size) - 1);
1869 EXPECT_EQ(protected_ptr1 - protected_ptr1_3,
1870 -checked_cast<ptrdiff_t>(requested_size) + 1);
1871 EXPECT_EQ(protected_ptr1_4 - protected_ptr1,
1872 checked_cast<ptrdiff_t>(requested_size));
1873 EXPECT_EQ(protected_ptr1 - protected_ptr1_4,
1874 -checked_cast<ptrdiff_t>(requested_size));
1875 #if BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
1876 EXPECT_CHECK_DEATH(protected_ptr2 - protected_ptr1);
1877 EXPECT_CHECK_DEATH(protected_ptr1 - protected_ptr2);
1878 EXPECT_CHECK_DEATH(protected_ptr2 - protected_ptr1_4);
1879 EXPECT_CHECK_DEATH(protected_ptr1_4 - protected_ptr2);
1880 EXPECT_CHECK_DEATH(protected_ptr2_2 - protected_ptr1);
1881 EXPECT_CHECK_DEATH(protected_ptr1 - protected_ptr2_2);
1882 EXPECT_CHECK_DEATH(protected_ptr2_2 - protected_ptr1_4);
1883 EXPECT_CHECK_DEATH(protected_ptr1_4 - protected_ptr2_2);
1884 #endif // BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
1885 EXPECT_EQ(protected_ptr2_2 - protected_ptr2, 1);
1886 EXPECT_EQ(protected_ptr2 - protected_ptr2_2, -1);
1887
1888 protected_ptr1 = nullptr;
1889 protected_ptr1_2 = nullptr;
1890 protected_ptr1_3 = nullptr;
1891 protected_ptr1_4 = nullptr;
1892 protected_ptr2 = nullptr;
1893 protected_ptr2_2 = nullptr;
1894
1895 allocator_.root()->Free(ptr1);
1896 allocator_.root()->Free(ptr2);
1897 }
1898
1899 volatile char g_volatile_char_to_ignore;
1900
TEST_F(BackupRefPtrTest,IndexOperator)1901 TEST_F(BackupRefPtrTest, IndexOperator) {
1902 #if BUILDFLAG(BACKUP_REF_PTR_EXTRA_OOB_CHECKS)
1903 size_t requested_size = GetRequestSizeThatFills512BSlot();
1904 char* ptr = static_cast<char*>(allocator_.root()->Alloc(requested_size));
1905 {
1906 raw_ptr<char, AllowPtrArithmetic> array = ptr;
1907 std::ignore = array[0];
1908 std::ignore = array[requested_size - 1];
1909 EXPECT_CHECK_DEATH(std::ignore = array[-1]);
1910 EXPECT_CHECK_DEATH(std::ignore = array[requested_size + 1]);
1911 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1912 EXPECT_DEATH_IF_SUPPORTED(g_volatile_char_to_ignore = array[requested_size],
1913 "");
1914 #endif
1915 }
1916 allocator_.root()->Free(ptr);
1917 #endif // BUILDFLAG(BACKUP_REF_PTR_EXTRA_OOB_CHECKS)
1918 }
1919
IsQuarantineEmpty(partition_alloc::PartitionAllocator & allocator)1920 bool IsQuarantineEmpty(partition_alloc::PartitionAllocator& allocator) {
1921 return allocator.root()->total_size_of_brp_quarantined_bytes.load(
1922 std::memory_order_relaxed) == 0;
1923 }
1924
1925 struct BoundRawPtrTestHelper {
Createbase::internal::BoundRawPtrTestHelper1926 static BoundRawPtrTestHelper* Create(
1927 partition_alloc::PartitionAllocator& allocator) {
1928 return new (allocator.root()->Alloc(sizeof(BoundRawPtrTestHelper), ""))
1929 BoundRawPtrTestHelper(allocator);
1930 }
1931
BoundRawPtrTestHelperbase::internal::BoundRawPtrTestHelper1932 explicit BoundRawPtrTestHelper(partition_alloc::PartitionAllocator& allocator)
1933 : owning_allocator(allocator),
1934 once_callback(
1935 BindOnce(&BoundRawPtrTestHelper::DeleteItselfAndCheckIfInQuarantine,
1936 Unretained(this))),
1937 repeating_callback(BindRepeating(
1938 &BoundRawPtrTestHelper::DeleteItselfAndCheckIfInQuarantine,
1939 Unretained(this))) {}
1940
DeleteItselfAndCheckIfInQuarantinebase::internal::BoundRawPtrTestHelper1941 void DeleteItselfAndCheckIfInQuarantine() {
1942 auto& allocator = *owning_allocator;
1943 EXPECT_TRUE(IsQuarantineEmpty(allocator));
1944
1945 // Since we use a non-default partition, `delete` has to be simulated.
1946 this->~BoundRawPtrTestHelper();
1947 allocator.root()->Free(this);
1948
1949 EXPECT_FALSE(IsQuarantineEmpty(allocator));
1950 }
1951
1952 const raw_ref<partition_alloc::PartitionAllocator> owning_allocator;
1953 OnceClosure once_callback;
1954 RepeatingClosure repeating_callback;
1955 };
1956
1957 // Check that bound callback arguments remain protected by BRP for the
1958 // entire duration of a callback invocation.
TEST_F(BackupRefPtrTest,Bind)1959 TEST_F(BackupRefPtrTest, Bind) {
1960 // This test requires a separate partition; otherwise, unrelated allocations
1961 // might interfere with `IsQuarantineEmpty`.
1962 auto* object_for_once_callback1 = BoundRawPtrTestHelper::Create(allocator_);
1963 std::move(object_for_once_callback1->once_callback).Run();
1964 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1965
1966 auto* object_for_repeating_callback1 =
1967 BoundRawPtrTestHelper::Create(allocator_);
1968 std::move(object_for_repeating_callback1->repeating_callback).Run();
1969 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1970
1971 // `RepeatingCallback` has both lvalue and rvalue versions of `Run`.
1972 auto* object_for_repeating_callback2 =
1973 BoundRawPtrTestHelper::Create(allocator_);
1974 object_for_repeating_callback2->repeating_callback.Run();
1975 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1976 }
1977
1978 #if PA_CONFIG(IN_SLOT_METADATA_CHECK_COOKIE)
TEST_F(BackupRefPtrTest,ReinterpretCast)1979 TEST_F(BackupRefPtrTest, ReinterpretCast) {
1980 void* ptr = allocator_.root()->Alloc(16);
1981 allocator_.root()->Free(ptr);
1982
1983 raw_ptr<void>* wrapped_ptr = reinterpret_cast<raw_ptr<void>*>(&ptr);
1984 // The reference count cookie check should detect that the allocation has
1985 // been already freed.
1986 BASE_EXPECT_DEATH(*wrapped_ptr = nullptr, "");
1987 }
1988 #endif // PA_CONFIG(IN_SLOT_METADATA_CHECK_COOKIE)
1989
1990 // Tests that ref-count management is correct, despite `std::optional` may be
1991 // using `union` underneath.
TEST_F(BackupRefPtrTest,WorksWithOptional)1992 TEST_F(BackupRefPtrTest, WorksWithOptional) {
1993 void* ptr = allocator_.root()->Alloc(16);
1994 auto* ref_count =
1995 allocator_.root()->InSlotMetadataPointerFromObjectForTesting(ptr);
1996 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
1997
1998 std::optional<raw_ptr<void>> opt = ptr;
1999 ASSERT_TRUE(opt.has_value());
2000 EXPECT_TRUE(ref_count->IsAlive() && !ref_count->IsAliveWithNoKnownRefs());
2001
2002 opt.reset();
2003 ASSERT_TRUE(!opt.has_value());
2004 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
2005
2006 opt = ptr;
2007 ASSERT_TRUE(opt.has_value());
2008 EXPECT_TRUE(ref_count->IsAlive() && !ref_count->IsAliveWithNoKnownRefs());
2009
2010 opt = nullptr;
2011 ASSERT_TRUE(opt.has_value());
2012 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
2013
2014 {
2015 std::optional<raw_ptr<void>> opt2 = ptr;
2016 ASSERT_TRUE(opt2.has_value());
2017 EXPECT_TRUE(ref_count->IsAlive() && !ref_count->IsAliveWithNoKnownRefs());
2018 }
2019 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
2020
2021 allocator_.root()->Free(ptr);
2022 }
2023
2024 // Tests that ref-count management is correct, despite `absl::variant` may be
2025 // using `union` underneath.
TEST_F(BackupRefPtrTest,WorksWithVariant)2026 TEST_F(BackupRefPtrTest, WorksWithVariant) {
2027 void* ptr = allocator_.root()->Alloc(16);
2028 auto* ref_count =
2029 allocator_.root()->InSlotMetadataPointerFromObjectForTesting(ptr);
2030 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
2031
2032 absl::variant<uintptr_t, raw_ptr<void>> vary = ptr;
2033 ASSERT_EQ(1u, vary.index());
2034 EXPECT_TRUE(ref_count->IsAlive() && !ref_count->IsAliveWithNoKnownRefs());
2035
2036 vary = 42u;
2037 ASSERT_EQ(0u, vary.index());
2038 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
2039
2040 vary = ptr;
2041 ASSERT_EQ(1u, vary.index());
2042 EXPECT_TRUE(ref_count->IsAlive() && !ref_count->IsAliveWithNoKnownRefs());
2043
2044 vary = nullptr;
2045 ASSERT_EQ(1u, vary.index());
2046 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
2047
2048 {
2049 absl::variant<uintptr_t, raw_ptr<void>> vary2 = ptr;
2050 ASSERT_EQ(1u, vary2.index());
2051 EXPECT_TRUE(ref_count->IsAlive() && !ref_count->IsAliveWithNoKnownRefs());
2052 }
2053 EXPECT_TRUE(ref_count->IsAliveWithNoKnownRefs());
2054
2055 allocator_.root()->Free(ptr);
2056 }
2057
2058 namespace {
2059
2060 // Install dangling raw_ptr handlers and restore them when going out of scope.
2061 class ScopedInstallDanglingRawPtrChecks {
2062 public:
ScopedInstallDanglingRawPtrChecks()2063 ScopedInstallDanglingRawPtrChecks() {
2064 enabled_feature_list_.InitWithFeaturesAndParameters(
2065 {{features::kPartitionAllocDanglingPtr, {{"mode", "crash"}}}},
2066 {/* disabled_features */});
2067 old_detected_fn_ = partition_alloc::GetDanglingRawPtrDetectedFn();
2068 old_dereferenced_fn_ = partition_alloc::GetDanglingRawPtrReleasedFn();
2069 allocator::InstallDanglingRawPtrChecks();
2070 }
~ScopedInstallDanglingRawPtrChecks()2071 ~ScopedInstallDanglingRawPtrChecks() {
2072 partition_alloc::SetDanglingRawPtrDetectedFn(old_detected_fn_);
2073 partition_alloc::SetDanglingRawPtrReleasedFn(old_dereferenced_fn_);
2074 }
2075
2076 private:
2077 test::ScopedFeatureList enabled_feature_list_;
2078 partition_alloc::DanglingRawPtrDetectedFn* old_detected_fn_;
2079 partition_alloc::DanglingRawPtrReleasedFn* old_dereferenced_fn_;
2080 };
2081
2082 } // namespace
2083
TEST_F(BackupRefPtrTest,RawPtrMayDangle)2084 TEST_F(BackupRefPtrTest, RawPtrMayDangle) {
2085 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2086
2087 void* ptr = allocator_.root()->Alloc(16);
2088 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr = ptr;
2089 allocator_.root()->Free(ptr); // No dangling raw_ptr reported.
2090 dangling_ptr = nullptr; // No dangling raw_ptr reported.
2091 }
2092
TEST_F(BackupRefPtrTest,RawPtrNotDangling)2093 TEST_F(BackupRefPtrTest, RawPtrNotDangling) {
2094 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2095
2096 void* ptr = allocator_.root()->Alloc(16);
2097 raw_ptr<void> dangling_ptr = ptr;
2098 #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
2099 BASE_EXPECT_DEATH(
2100 {
2101 allocator_.root()->Free(ptr); // Dangling raw_ptr detected.
2102 dangling_ptr = nullptr; // Dangling raw_ptr released.
2103 },
2104 AllOf(HasSubstr("Detected dangling raw_ptr"),
2105 HasSubstr("The memory was freed at:"),
2106 HasSubstr("The dangling raw_ptr was released at:")));
2107 #else
2108 allocator_.root()->Free(ptr);
2109 dangling_ptr = nullptr;
2110 #endif
2111 }
2112
2113 // Check the comparator operators work, even across raw_ptr with different
2114 // dangling policies.
TEST_F(BackupRefPtrTest,DanglingPtrComparison)2115 TEST_F(BackupRefPtrTest, DanglingPtrComparison) {
2116 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2117
2118 void* ptr_1 = allocator_.root()->Alloc(16);
2119 void* ptr_2 = allocator_.root()->Alloc(16);
2120
2121 if (ptr_1 > ptr_2) {
2122 std::swap(ptr_1, ptr_2);
2123 }
2124
2125 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_1 = ptr_1;
2126 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_2 = ptr_2;
2127 raw_ptr<void> not_dangling_ptr_1 = ptr_1;
2128 raw_ptr<void> not_dangling_ptr_2 = ptr_2;
2129
2130 EXPECT_EQ(dangling_ptr_1, not_dangling_ptr_1);
2131 EXPECT_EQ(dangling_ptr_2, not_dangling_ptr_2);
2132 EXPECT_NE(dangling_ptr_1, not_dangling_ptr_2);
2133 EXPECT_NE(dangling_ptr_2, not_dangling_ptr_1);
2134 EXPECT_LT(dangling_ptr_1, not_dangling_ptr_2);
2135 EXPECT_GT(dangling_ptr_2, not_dangling_ptr_1);
2136 EXPECT_LT(not_dangling_ptr_1, dangling_ptr_2);
2137 EXPECT_GT(not_dangling_ptr_2, dangling_ptr_1);
2138
2139 not_dangling_ptr_1 = nullptr;
2140 not_dangling_ptr_2 = nullptr;
2141
2142 allocator_.root()->Free(ptr_1);
2143 allocator_.root()->Free(ptr_2);
2144 }
2145
2146 // Check the assignment operator works, even across raw_ptr with different
2147 // dangling policies (only `not dangling` -> `dangling` direction is supported).
TEST_F(BackupRefPtrTest,DanglingPtrAssignment)2148 TEST_F(BackupRefPtrTest, DanglingPtrAssignment) {
2149 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2150
2151 void* ptr = allocator_.root()->Alloc(16);
2152
2153 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr;
2154 raw_ptr<void> not_dangling_ptr;
2155
2156 not_dangling_ptr = ptr;
2157 dangling_ptr = not_dangling_ptr;
2158 not_dangling_ptr = nullptr;
2159
2160 allocator_.root()->Free(ptr);
2161
2162 dangling_ptr = nullptr;
2163 }
2164
2165 // Check the copy constructor works, even across raw_ptr with different dangling
2166 // policies (only `not dangling` -> `dangling` direction is supported).
TEST_F(BackupRefPtrTest,DanglingPtrCopyContructor)2167 TEST_F(BackupRefPtrTest, DanglingPtrCopyContructor) {
2168 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2169
2170 void* ptr = allocator_.root()->Alloc(16);
2171
2172 raw_ptr<void> not_dangling_ptr(ptr);
2173 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr(not_dangling_ptr);
2174
2175 not_dangling_ptr = nullptr;
2176 dangling_ptr = nullptr;
2177
2178 allocator_.root()->Free(ptr);
2179 }
2180
TEST_F(BackupRefPtrTest,RawPtrExtractAsDangling)2181 TEST_F(BackupRefPtrTest, RawPtrExtractAsDangling) {
2182 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2183
2184 raw_ptr<int> ptr =
2185 static_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
2186 allocator_.root()->Free(
2187 ptr.ExtractAsDangling()); // No dangling raw_ptr reported.
2188 EXPECT_EQ(ptr, nullptr);
2189 }
2190
TEST_F(BackupRefPtrTest,RawPtrDeleteWithoutExtractAsDangling)2191 TEST_F(BackupRefPtrTest, RawPtrDeleteWithoutExtractAsDangling) {
2192 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2193
2194 raw_ptr<int> ptr =
2195 static_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
2196 #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
2197 BASE_EXPECT_DEATH(
2198 {
2199 allocator_.root()->Free(ptr.get()); // Dangling raw_ptr detected.
2200 ptr = nullptr; // Dangling raw_ptr released.
2201 },
2202 AllOf(HasSubstr("Detected dangling raw_ptr"),
2203 HasSubstr("The memory was freed at:"),
2204 HasSubstr("The dangling raw_ptr was released at:")));
2205 #else
2206 allocator_.root()->Free(ptr.get());
2207 ptr = nullptr;
2208 #endif // BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
2209 }
2210
TEST_F(BackupRefPtrTest,SpatialAlgoCompat)2211 TEST_F(BackupRefPtrTest, SpatialAlgoCompat) {
2212 size_t requested_size = GetRequestSizeThatFills512BSlot();
2213 size_t requested_elements = requested_size / sizeof(uint32_t);
2214
2215 uint32_t* ptr =
2216 reinterpret_cast<uint32_t*>(allocator_.root()->Alloc(requested_size));
2217 uint32_t* ptr_end = ptr + requested_elements;
2218
2219 CountingRawPtr<uint32_t> counting_ptr = ptr;
2220 CountingRawPtr<uint32_t> counting_ptr_end = counting_ptr + requested_elements;
2221
2222 RawPtrCountingImpl::ClearCounters();
2223
2224 uint32_t gen_val = 1;
2225 std::generate(counting_ptr, counting_ptr_end, [&gen_val]() {
2226 gen_val ^= gen_val + 1;
2227 return gen_val;
2228 });
2229
2230 EXPECT_THAT((CountingRawPtrExpectations{
2231 .get_for_dereference_cnt = requested_elements,
2232 .get_for_extraction_cnt = 0,
2233 .get_for_comparison_cnt = (requested_elements + 1) * 2,
2234 }),
2235 CountersMatch());
2236
2237 RawPtrCountingImpl::ClearCounters();
2238
2239 for (CountingRawPtr<uint32_t> counting_ptr_i = counting_ptr;
2240 counting_ptr_i < counting_ptr_end; counting_ptr_i++) {
2241 *counting_ptr_i ^= *counting_ptr_i + 1;
2242 }
2243
2244 EXPECT_THAT((CountingRawPtrExpectations{
2245 .get_for_dereference_cnt = requested_elements * 2,
2246 .get_for_extraction_cnt = 0,
2247 .get_for_comparison_cnt = (requested_elements + 1) * 2,
2248 }),
2249 CountersMatch());
2250
2251 RawPtrCountingImpl::ClearCounters();
2252
2253 for (CountingRawPtr<uint32_t> counting_ptr_i = counting_ptr;
2254 counting_ptr_i < ptr_end; counting_ptr_i++) {
2255 *counting_ptr_i ^= *counting_ptr_i + 1;
2256 }
2257
2258 EXPECT_THAT((CountingRawPtrExpectations{
2259 .get_for_dereference_cnt = requested_elements * 2,
2260 .get_for_extraction_cnt = 0,
2261 .get_for_comparison_cnt = requested_elements + 1,
2262 }),
2263 CountersMatch());
2264
2265 RawPtrCountingImpl::ClearCounters();
2266
2267 for (uint32_t* ptr_i = ptr; ptr_i < counting_ptr_end; ptr_i++) {
2268 *ptr_i ^= *ptr_i + 1;
2269 }
2270
2271 EXPECT_THAT((CountingRawPtrExpectations{
2272 .get_for_dereference_cnt = 0,
2273 .get_for_extraction_cnt = 0,
2274 .get_for_comparison_cnt = requested_elements + 1,
2275 }),
2276 CountersMatch());
2277
2278 RawPtrCountingImpl::ClearCounters();
2279
2280 size_t iter_cnt = 0;
2281 for (uint32_t *ptr_i = counting_ptr, *ptr_i_end = counting_ptr_end;
2282 ptr_i < ptr_i_end; ptr_i++) {
2283 *ptr_i ^= *ptr_i + 1;
2284 iter_cnt++;
2285 }
2286 EXPECT_EQ(iter_cnt, requested_elements);
2287
2288 EXPECT_THAT((CountingRawPtrExpectations{
2289 .get_for_dereference_cnt = 0,
2290 .get_for_extraction_cnt = 2,
2291 .get_for_comparison_cnt = 0,
2292 }),
2293 CountersMatch());
2294
2295 counting_ptr = nullptr;
2296 counting_ptr_end = nullptr;
2297 allocator_.root()->Free(ptr);
2298 }
2299
2300 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
TEST_F(BackupRefPtrTest,Duplicate)2301 TEST_F(BackupRefPtrTest, Duplicate) {
2302 size_t requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(512);
2303 char* ptr = static_cast<char*>(allocator_.root()->Alloc(requested_size));
2304 raw_ptr<char, AllowPtrArithmetic> protected_ptr1 = ptr;
2305 protected_ptr1 += requested_size; // Pointer should now be poisoned.
2306
2307 // Duplicating a poisoned pointer should be allowed.
2308 raw_ptr<char, AllowPtrArithmetic> protected_ptr2 = protected_ptr1;
2309
2310 // The poison bit should be propagated to the duplicate such that the OOB
2311 // access is disallowed:
2312 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr2 = ' ', "");
2313
2314 // Assignment from a poisoned pointer should be allowed.
2315 raw_ptr<char, AllowPtrArithmetic> protected_ptr3;
2316 protected_ptr3 = protected_ptr1;
2317
2318 // The poison bit should be propagated via the assignment such that the OOB
2319 // access is disallowed:
2320 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr3 = ' ', "");
2321
2322 protected_ptr1 = nullptr;
2323 protected_ptr2 = nullptr;
2324 protected_ptr3 = nullptr;
2325 allocator_.root()->Free(ptr);
2326 }
2327 #endif // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
2328
2329 #if BUILDFLAG(PA_EXPENSIVE_DCHECKS_ARE_ON)
TEST_F(BackupRefPtrTest,WriteAfterFree)2330 TEST_F(BackupRefPtrTest, WriteAfterFree) {
2331 constexpr uint64_t kPayload = 0x1234567890ABCDEF;
2332
2333 raw_ptr<uint64_t, DisableDanglingPtrDetection> ptr =
2334 static_cast<uint64_t*>(allocator_.root()->Alloc(sizeof(uint64_t), ""));
2335
2336 // Now |ptr| should be quarantined.
2337 allocator_.root()->Free(ptr);
2338
2339 EXPECT_DEATH_IF_SUPPORTED(
2340 {
2341 // Write something different from |kQuarantinedByte|.
2342 *ptr = kPayload;
2343 // Write-after-Free should lead to crash
2344 // on |PartitionAllocFreeForRefCounting|.
2345 ptr = nullptr;
2346 },
2347 "");
2348 }
2349 #endif // BUILDFLAG(PA_EXPENSIVE_DCHECKS_ARE_ON)
2350
2351 namespace {
2352 constexpr uint8_t kCustomQuarantineByte = 0xff;
2353 static_assert(kCustomQuarantineByte !=
2354 partition_alloc::internal::kQuarantinedByte);
2355
CustomQuarantineHook(void * address,size_t size)2356 void CustomQuarantineHook(void* address, size_t size) {
2357 partition_alloc::internal::SecureMemset(address, kCustomQuarantineByte, size);
2358 }
2359 } // namespace
2360
TEST_F(BackupRefPtrTest,QuarantineHook)2361 TEST_F(BackupRefPtrTest, QuarantineHook) {
2362 partition_alloc::PartitionAllocHooks::SetQuarantineOverrideHook(
2363 CustomQuarantineHook);
2364 uint8_t* native_ptr =
2365 static_cast<uint8_t*>(allocator_.root()->Alloc(sizeof(uint8_t), ""));
2366 *native_ptr = 0;
2367 {
2368 raw_ptr<uint8_t, DisableDanglingPtrDetection> smart_ptr = native_ptr;
2369
2370 allocator_.root()->Free(smart_ptr);
2371 // Access the allocation through the native pointer to avoid triggering
2372 // dereference checks in debug builds.
2373 EXPECT_EQ(*partition_alloc::internal::TagPtr(native_ptr),
2374 kCustomQuarantineByte);
2375
2376 // Leaving |smart_ptr| filled with |kCustomQuarantineByte| can
2377 // cause a crash because we have a DCHECK that expects it to be filled with
2378 // |kQuarantineByte|. We need to ensure it is unquarantined before
2379 // unregistering the hook.
2380 } // <- unquarantined here
2381
2382 partition_alloc::PartitionAllocHooks::SetQuarantineOverrideHook(nullptr);
2383 }
2384
TEST_F(BackupRefPtrTest,RawPtrTraits_DisableBRP)2385 TEST_F(BackupRefPtrTest, RawPtrTraits_DisableBRP) {
2386 // Allocate a slot so that a slot span doesn't get decommitted from memory,
2387 // while we allocate/deallocate/access the tested slot below.
2388 void* sentinel = allocator_.root()->Alloc(sizeof(unsigned int), "");
2389 constexpr uint32_t kQuarantined2Bytes =
2390 partition_alloc::internal::kQuarantinedByte |
2391 (partition_alloc::internal::kQuarantinedByte << 8);
2392 constexpr uint32_t kQuarantined4Bytes =
2393 kQuarantined2Bytes | (kQuarantined2Bytes << 16);
2394
2395 {
2396 raw_ptr<unsigned int, DanglingUntriaged> ptr = static_cast<unsigned int*>(
2397 allocator_.root()->Alloc(sizeof(unsigned int), ""));
2398 *ptr = 0;
2399 // Freeing would update the MTE tag so use |TagPtr()| to dereference it
2400 // below.
2401 allocator_.root()->Free(ptr);
2402 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
2403 // Recreate the raw_ptr so we can use a pointer with the updated MTE tag.
2404 // Reassigning to |ptr| would hit the PartitionRefCount cookie check rather
2405 // than the |IsPointeeAlive()| check.
2406 raw_ptr<unsigned int, DanglingUntriaged> dangling_ptr =
2407 partition_alloc::internal::TagPtr(ptr.get());
2408 EXPECT_DEATH_IF_SUPPORTED(*dangling_ptr = 0, "");
2409 #else
2410 EXPECT_EQ(kQuarantined4Bytes,
2411 *partition_alloc::internal::TagPtr(ptr.get()));
2412 #endif
2413 }
2414 // raw_ptr with DisableBRP, BRP is expected to be off.
2415 {
2416 raw_ptr<unsigned int, DanglingUntriaged | RawPtrTraits::kDisableBRP> ptr =
2417 static_cast<unsigned int*>(
2418 allocator_.root()->Alloc(sizeof(unsigned int), ""));
2419 *ptr = 0;
2420 allocator_.root()->Free(ptr);
2421 // A tad fragile as a new allocation or free-list pointer may be there, but
2422 // highly unlikely it'll match 4 quarantine bytes in a row.
2423 // Use |TagPtr()| for this dereference because freeing would have updated
2424 // the MTE tag.
2425 EXPECT_NE(kQuarantined4Bytes,
2426 *partition_alloc::internal::TagPtr(ptr.get()));
2427 }
2428
2429 allocator_.root()->Free(sentinel);
2430 }
2431
2432 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
2433 // !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
2434
2435 #if BUILDFLAG(USE_HOOKABLE_RAW_PTR)
2436
2437 namespace {
2438 #define FOR_EACH_RAW_PTR_OPERATION(F) \
2439 F(wrap_ptr) \
2440 F(release_wrapped_ptr) \
2441 F(safely_unwrap_for_dereference) \
2442 F(safely_unwrap_for_extraction) \
2443 F(unsafely_unwrap_for_comparison) \
2444 F(advance) \
2445 F(duplicate) \
2446 F(wrap_ptr_for_duplication) \
2447 F(unsafely_unwrap_for_duplication)
2448
2449 // Can't use gMock to count the number of invocations because
2450 // gMock itself triggers raw_ptr<T> operations.
2451 struct CountingHooks {
ResetCountsbase::internal::__anon154ccca30911::CountingHooks2452 void ResetCounts() {
2453 #define F(name) name##_count = 0;
2454 FOR_EACH_RAW_PTR_OPERATION(F)
2455 #undef F
2456 }
2457
Getbase::internal::__anon154ccca30911::CountingHooks2458 static CountingHooks* Get() {
2459 static thread_local CountingHooks instance;
2460 return &instance;
2461 }
2462
2463 // The adapter method is templated to accept any number of arguments.
2464 #define F(name) \
2465 template <typename... T> \
2466 static void name##_adapter(T...) { \
2467 Get()->name##_count++; \
2468 } \
2469 size_t name##_count = 0;
2470 FOR_EACH_RAW_PTR_OPERATION(F)
2471 #undef F
2472 };
2473
2474 constexpr RawPtrHooks raw_ptr_hooks{
2475 #define F(name) .name = CountingHooks::name##_adapter,
2476 FOR_EACH_RAW_PTR_OPERATION(F)
2477 #undef F
2478 };
2479 } // namespace
2480
2481 class HookableRawPtrImplTest : public testing::Test {
2482 protected:
SetUp()2483 void SetUp() override { InstallRawPtrHooks(&raw_ptr_hooks); }
TearDown()2484 void TearDown() override { ResetRawPtrHooks(); }
2485 };
2486
TEST_F(HookableRawPtrImplTest,WrapPtr)2487 TEST_F(HookableRawPtrImplTest, WrapPtr) {
2488 // Can't call `ResetCounts` in `SetUp` because gTest triggers
2489 // raw_ptr<T> operations between `SetUp` and the test body.
2490 CountingHooks::Get()->ResetCounts();
2491 {
2492 int* ptr = new int;
2493 [[maybe_unused]] raw_ptr<int> interesting_ptr = ptr;
2494 delete ptr;
2495 }
2496 EXPECT_EQ(CountingHooks::Get()->wrap_ptr_count, 1u);
2497 }
2498
TEST_F(HookableRawPtrImplTest,ReleaseWrappedPtr)2499 TEST_F(HookableRawPtrImplTest, ReleaseWrappedPtr) {
2500 CountingHooks::Get()->ResetCounts();
2501 {
2502 int* ptr = new int;
2503 [[maybe_unused]] raw_ptr<int> interesting_ptr = ptr;
2504 delete ptr;
2505 }
2506 EXPECT_EQ(CountingHooks::Get()->release_wrapped_ptr_count, 1u);
2507 }
2508
TEST_F(HookableRawPtrImplTest,SafelyUnwrapForDereference)2509 TEST_F(HookableRawPtrImplTest, SafelyUnwrapForDereference) {
2510 CountingHooks::Get()->ResetCounts();
2511 {
2512 int* ptr = new int;
2513 raw_ptr<int> interesting_ptr = ptr;
2514 *interesting_ptr = 1;
2515 delete ptr;
2516 }
2517 EXPECT_EQ(CountingHooks::Get()->safely_unwrap_for_dereference_count, 1u);
2518 }
2519
TEST_F(HookableRawPtrImplTest,SafelyUnwrapForExtraction)2520 TEST_F(HookableRawPtrImplTest, SafelyUnwrapForExtraction) {
2521 CountingHooks::Get()->ResetCounts();
2522 {
2523 int* ptr = new int;
2524 raw_ptr<int> interesting_ptr = ptr;
2525 ptr = interesting_ptr;
2526 delete ptr;
2527 }
2528 EXPECT_EQ(CountingHooks::Get()->safely_unwrap_for_extraction_count, 1u);
2529 }
2530
TEST_F(HookableRawPtrImplTest,UnsafelyUnwrapForComparison)2531 TEST_F(HookableRawPtrImplTest, UnsafelyUnwrapForComparison) {
2532 CountingHooks::Get()->ResetCounts();
2533 {
2534 int* ptr = new int;
2535 raw_ptr<int> interesting_ptr = ptr;
2536 EXPECT_EQ(interesting_ptr, ptr);
2537 delete ptr;
2538 }
2539 EXPECT_EQ(CountingHooks::Get()->unsafely_unwrap_for_comparison_count, 1u);
2540 }
2541
TEST_F(HookableRawPtrImplTest,Advance)2542 TEST_F(HookableRawPtrImplTest, Advance) {
2543 CountingHooks::Get()->ResetCounts();
2544 {
2545 int* ptr = new int[10];
2546 raw_ptr<int, AllowPtrArithmetic> interesting_ptr = ptr;
2547 interesting_ptr += 1;
2548 delete[] ptr;
2549 }
2550 EXPECT_EQ(CountingHooks::Get()->advance_count, 1u);
2551 }
2552
TEST_F(HookableRawPtrImplTest,Duplicate)2553 TEST_F(HookableRawPtrImplTest, Duplicate) {
2554 CountingHooks::Get()->ResetCounts();
2555 {
2556 int* ptr = new int;
2557 raw_ptr<int> interesting_ptr = ptr;
2558 raw_ptr<int> interesting_ptr2 = interesting_ptr;
2559 delete ptr;
2560 }
2561 EXPECT_EQ(CountingHooks::Get()->duplicate_count, 1u);
2562 }
2563
TEST_F(HookableRawPtrImplTest,CrossKindCopyConstruction)2564 TEST_F(HookableRawPtrImplTest, CrossKindCopyConstruction) {
2565 CountingHooks::Get()->ResetCounts();
2566 {
2567 int* ptr = new int;
2568 raw_ptr<int> non_dangling_ptr = ptr;
2569 raw_ptr<int, RawPtrTraits::kMayDangle> dangling_ptr(non_dangling_ptr);
2570 delete ptr;
2571 }
2572 EXPECT_EQ(CountingHooks::Get()->duplicate_count, 0u);
2573 EXPECT_EQ(CountingHooks::Get()->wrap_ptr_for_duplication_count, 1u);
2574 EXPECT_EQ(CountingHooks::Get()->unsafely_unwrap_for_duplication_count, 1u);
2575 }
2576
2577 #endif // BUILDFLAG(USE_HOOKABLE_RAW_PTR)
2578
TEST(DanglingPtrTest,DetectAndReset)2579 TEST(DanglingPtrTest, DetectAndReset) {
2580 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2581 if (!instrumentation.has_value()) {
2582 GTEST_SKIP() << instrumentation.error();
2583 }
2584
2585 auto owned_ptr = std::make_unique<int>(42);
2586 raw_ptr<int> dangling_ptr = owned_ptr.get();
2587 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2588 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2589 owned_ptr.reset();
2590 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2591 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2592 dangling_ptr = nullptr;
2593 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2594 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2595 }
2596
TEST(DanglingPtrTest,DetectAndDestructor)2597 TEST(DanglingPtrTest, DetectAndDestructor) {
2598 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2599 if (!instrumentation.has_value()) {
2600 GTEST_SKIP() << instrumentation.error();
2601 }
2602
2603 auto owned_ptr = std::make_unique<int>(42);
2604 {
2605 [[maybe_unused]] raw_ptr<int> dangling_ptr = owned_ptr.get();
2606 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2607 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2608 owned_ptr.reset();
2609 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2610 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2611 }
2612 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2613 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2614 }
2615
TEST(DanglingPtrTest,DetectResetAndDestructor)2616 TEST(DanglingPtrTest, DetectResetAndDestructor) {
2617 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2618 if (!instrumentation.has_value()) {
2619 GTEST_SKIP() << instrumentation.error();
2620 }
2621
2622 auto owned_ptr = std::make_unique<int>(42);
2623 {
2624 raw_ptr<int> dangling_ptr = owned_ptr.get();
2625 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2626 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2627 owned_ptr.reset();
2628 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2629 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2630 dangling_ptr = nullptr;
2631 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2632 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2633 }
2634 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2635 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2636 }
2637
2638 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_INSTANCE_TRACER) && \
2639 BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
TEST(RawPtrInstanceTracerTest,CreateAndDestroy)2640 TEST(RawPtrInstanceTracerTest, CreateAndDestroy) {
2641 auto owned = std::make_unique<int>(8);
2642
2643 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2644 IsEmpty());
2645
2646 {
2647 raw_ptr<int> ptr1 = owned.get();
2648 const auto stacks =
2649 InstanceTracer::GetStackTracesForAddressForTest(owned.get());
2650 EXPECT_THAT(stacks, SizeIs(1));
2651 {
2652 // A second raw_ptr to the same object should result in an additional
2653 // stack trace.
2654 raw_ptr<int> ptr2 = owned.get();
2655 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2656 SizeIs(2));
2657 }
2658 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2659 Eq(stacks));
2660 }
2661 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2662 IsEmpty());
2663 }
2664
TEST(RawPtrInstanceTracerTest,CopyConstruction)2665 TEST(RawPtrInstanceTracerTest, CopyConstruction) {
2666 auto owned = std::make_unique<int>(8);
2667
2668 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2669 IsEmpty());
2670 {
2671 raw_ptr<int> ptr1 = owned.get();
2672 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2673 SizeIs(1));
2674 {
2675 // Copying `ptr1` to `ptr2` should result in an additional stack trace.
2676 raw_ptr<int> ptr2 = ptr1;
2677 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2678 SizeIs(2));
2679 }
2680 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2681 SizeIs(1));
2682 }
2683
2684 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2685 IsEmpty());
2686 }
2687
TEST(RawPtrInstanceTracerTest,CopyAssignment)2688 TEST(RawPtrInstanceTracerTest, CopyAssignment) {
2689 auto owned1 = std::make_unique<int>(8);
2690 auto owned2 = std::make_unique<int>(9);
2691
2692 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2693 IsEmpty());
2694 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2695 IsEmpty());
2696
2697 {
2698 raw_ptr<int> ptr1 = owned1.get();
2699 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2700 SizeIs(1));
2701 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2702 IsEmpty());
2703
2704 raw_ptr<int> ptr2 = owned2.get();
2705 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2706 SizeIs(1));
2707 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2708 SizeIs(1));
2709
2710 ptr2 = ptr1;
2711 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2712 SizeIs(2));
2713 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2714 IsEmpty());
2715 }
2716
2717 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2718 IsEmpty());
2719 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2720 IsEmpty());
2721 }
2722
TEST(RawPtrInstanceTracerTest,MoveConstruction)2723 TEST(RawPtrInstanceTracerTest, MoveConstruction) {
2724 auto owned = std::make_unique<int>(8);
2725
2726 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2727 IsEmpty());
2728 {
2729 raw_ptr<int> ptr1 = owned.get();
2730 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2731 SizeIs(1));
2732 {
2733 // Moving `ptr1` to `ptr2` should not result in an additional stack trace.
2734 raw_ptr<int> ptr2 = std::move(ptr1);
2735 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2736 SizeIs(1));
2737 }
2738 // Once `ptr2` goes out of scope, there should be no more traces.
2739 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2740 IsEmpty());
2741 }
2742 }
2743
TEST(RawPtrInstanceTracerTest,MoveAssignment)2744 TEST(RawPtrInstanceTracerTest, MoveAssignment) {
2745 auto owned1 = std::make_unique<int>(8);
2746 auto owned2 = std::make_unique<int>(9);
2747
2748 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2749 IsEmpty());
2750 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2751 IsEmpty());
2752
2753 {
2754 raw_ptr<int> ptr1 = owned1.get();
2755 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2756 SizeIs(1));
2757 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2758 IsEmpty());
2759
2760 raw_ptr<int> ptr2 = owned2.get();
2761 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2762 SizeIs(1));
2763 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2764 SizeIs(1));
2765
2766 ptr2 = std::move(ptr1);
2767 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2768 SizeIs(1));
2769 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2770 IsEmpty());
2771 }
2772
2773 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2774 IsEmpty());
2775 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2776 IsEmpty());
2777 }
2778
TEST(RawPtrInstanceTracerTest,SelfCopy)2779 TEST(RawPtrInstanceTracerTest, SelfCopy) {
2780 auto owned = std::make_unique<int>(8);
2781
2782 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2783 IsEmpty());
2784
2785 {
2786 raw_ptr<int> ptr = owned.get();
2787 auto& ptr2 = ptr; // To get around compiler self-assignment warning :)
2788 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2789 SizeIs(1));
2790
2791 ptr2 = ptr;
2792 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2793 SizeIs(1));
2794 }
2795
2796 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2797 IsEmpty());
2798 }
2799
TEST(RawPtrInstanceTracerTest,SelfMove)2800 TEST(RawPtrInstanceTracerTest, SelfMove) {
2801 auto owned = std::make_unique<int>(8);
2802
2803 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2804 IsEmpty());
2805
2806 {
2807 raw_ptr<int> ptr = owned.get();
2808 auto& ptr2 = ptr; // To get around compiler self-assignment warning :)
2809 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2810 SizeIs(1));
2811
2812 ptr2 = std::move(ptr);
2813 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2814 SizeIs(1));
2815 }
2816
2817 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2818 IsEmpty());
2819 }
2820
TEST(RawPtrInstanceTracerTest,ConversionCreateAndDestroy)2821 TEST(RawPtrInstanceTracerTest, ConversionCreateAndDestroy) {
2822 auto owned = std::make_unique<Derived>(1, 2, 3);
2823
2824 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2825 IsEmpty());
2826
2827 {
2828 raw_ptr<Base1> ptr1 = owned.get();
2829 const auto stacks =
2830 InstanceTracer::GetStackTracesForAddressForTest(owned.get());
2831 EXPECT_THAT(stacks, SizeIs(1));
2832 {
2833 // A second raw_ptr to the same object should result in an additional
2834 // stack trace.
2835 raw_ptr<Base2> ptr2 = owned.get();
2836 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2837 SizeIs(2));
2838 }
2839 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2840 Eq(stacks));
2841 }
2842 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2843 IsEmpty());
2844 }
2845
TEST(RawPtrInstanceTracerTest,CopyConversionConstruction)2846 TEST(RawPtrInstanceTracerTest, CopyConversionConstruction) {
2847 auto owned = std::make_unique<Derived>(1, 2, 3);
2848
2849 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2850 IsEmpty());
2851 {
2852 raw_ptr<Derived> ptr1 = owned.get();
2853 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2854 SizeIs(1));
2855 {
2856 // Copying `ptr1` to `ptr2` should result in an additional stack trace.
2857 raw_ptr<Base1> ptr2 = ptr1;
2858 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2859 SizeIs(2));
2860 }
2861 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2862 SizeIs(1));
2863 }
2864
2865 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2866 IsEmpty());
2867 }
2868
TEST(RawPtrInstanceTracerTest,CopyConversionAssignment)2869 TEST(RawPtrInstanceTracerTest, CopyConversionAssignment) {
2870 auto owned1 = std::make_unique<Derived>(1, 2, 3);
2871 auto owned2 = std::make_unique<Derived>(4, 5, 6);
2872
2873 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2874 IsEmpty());
2875 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2876 IsEmpty());
2877
2878 {
2879 raw_ptr<Derived> ptr1 = owned1.get();
2880 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2881 SizeIs(1));
2882 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2883 IsEmpty());
2884
2885 raw_ptr<Base1> ptr2 = owned2.get();
2886 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2887 SizeIs(1));
2888 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2889 SizeIs(1));
2890
2891 ptr2 = ptr1;
2892 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2893 SizeIs(2));
2894 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2895 IsEmpty());
2896 }
2897
2898 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2899 IsEmpty());
2900 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2901 IsEmpty());
2902 }
2903
TEST(RawPtrInstanceTracerTest,MoveConversionConstruction)2904 TEST(RawPtrInstanceTracerTest, MoveConversionConstruction) {
2905 auto owned = std::make_unique<Derived>(1, 2, 3);
2906
2907 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2908 IsEmpty());
2909 {
2910 raw_ptr<Derived> ptr1 = owned.get();
2911 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2912 SizeIs(1));
2913 {
2914 // Moving `ptr1` to `ptr2` should not result in an additional stack trace.
2915 raw_ptr<Base1> ptr2 = std::move(ptr1);
2916 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2917 SizeIs(1));
2918 }
2919 // Once `ptr2` goes out of scope, there should be no more traces.
2920 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned.get()),
2921 IsEmpty());
2922 }
2923 }
2924
TEST(RawPtrInstanceTracerTest,MoveConversionAssignment)2925 TEST(RawPtrInstanceTracerTest, MoveConversionAssignment) {
2926 auto owned1 = std::make_unique<Derived>(1, 2, 3);
2927 auto owned2 = std::make_unique<Derived>(4, 5, 6);
2928
2929 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2930 IsEmpty());
2931 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2932 IsEmpty());
2933
2934 {
2935 raw_ptr<Derived> ptr1 = owned1.get();
2936 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2937 SizeIs(1));
2938 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2939 IsEmpty());
2940
2941 raw_ptr<Base1> ptr2 = owned2.get();
2942 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2943 SizeIs(1));
2944 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2945 SizeIs(1));
2946
2947 ptr2 = std::move(ptr1);
2948 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2949 SizeIs(1));
2950 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2951 IsEmpty());
2952 }
2953
2954 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned1.get()),
2955 IsEmpty());
2956 EXPECT_THAT(InstanceTracer::GetStackTracesForAddressForTest(owned2.get()),
2957 IsEmpty());
2958 }
2959 #endif
2960
2961 } // namespace base::internal
2962