1 // Copyright 2019 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "absl/container/inlined_vector.h"
16
17 #include <algorithm>
18 #include <cstddef>
19 #include <forward_list>
20 #include <iterator>
21 #include <list>
22 #include <memory>
23 #include <scoped_allocator>
24 #include <sstream>
25 #include <stdexcept>
26 #include <string>
27 #include <utility>
28 #include <vector>
29
30 #include "gmock/gmock.h"
31 #include "gtest/gtest.h"
32 #include "absl/base/attributes.h"
33 #include "absl/base/internal/exception_testing.h"
34 #include "absl/base/macros.h"
35 #include "absl/base/options.h"
36 #include "absl/container/internal/test_allocator.h"
37 #include "absl/container/internal/test_instance_tracker.h"
38 #include "absl/hash/hash_testing.h"
39 #include "absl/log/check.h"
40 #include "absl/memory/memory.h"
41 #include "absl/strings/str_cat.h"
42
43 namespace {
44
45 using absl::container_internal::CountingAllocator;
46 using absl::test_internal::CopyableMovableInstance;
47 using absl::test_internal::CopyableOnlyInstance;
48 using absl::test_internal::InstanceTracker;
49 using testing::AllOf;
50 using testing::Each;
51 using testing::ElementsAre;
52 using testing::ElementsAreArray;
53 using testing::Eq;
54 using testing::Gt;
55 using testing::Pointee;
56 using testing::Pointwise;
57 using testing::PrintToString;
58 using testing::SizeIs;
59
60 using IntVec = absl::InlinedVector<int, 8>;
61
62 MATCHER_P(CapacityIs, n, "") {
63 return testing::ExplainMatchResult(n, arg.capacity(), result_listener);
64 }
65
66 MATCHER_P(ValueIs, e, "") {
67 return testing::ExplainMatchResult(e, arg.value(), result_listener);
68 }
69
70 // TODO(bsamwel): Add support for movable-only types.
71
72 // Test fixture for typed tests on BaseCountedInstance derived classes, see
73 // test_instance_tracker.h.
74 template <typename T>
75 class InstanceTest : public ::testing::Test {};
76 TYPED_TEST_SUITE_P(InstanceTest);
77
78 // A simple reference counted class to make sure that the proper elements are
79 // destroyed in the erase(begin, end) test.
80 class RefCounted {
81 public:
RefCounted(int value,int * count)82 RefCounted(int value, int* count) : value_(value), count_(count) { Ref(); }
83
RefCounted(const RefCounted & v)84 RefCounted(const RefCounted& v) : value_(v.value_), count_(v.count_) {
85 Ref();
86 }
87
~RefCounted()88 ~RefCounted() {
89 Unref();
90 count_ = nullptr;
91 }
92
swap(RefCounted & a,RefCounted & b)93 friend void swap(RefCounted& a, RefCounted& b) {
94 using std::swap;
95 swap(a.value_, b.value_);
96 swap(a.count_, b.count_);
97 }
98
operator =(RefCounted v)99 RefCounted& operator=(RefCounted v) {
100 using std::swap;
101 swap(*this, v);
102 return *this;
103 }
104
Ref() const105 void Ref() const {
106 CHECK_NE(count_, nullptr);
107 ++(*count_);
108 }
109
Unref() const110 void Unref() const {
111 --(*count_);
112 CHECK_GE(*count_, 0);
113 }
114
115 int value_;
116 int* count_;
117 };
118
119 using RefCountedVec = absl::InlinedVector<RefCounted, 8>;
120
121 // A class with a vtable pointer
122 class Dynamic {
123 public:
~Dynamic()124 virtual ~Dynamic() {}
125 };
126
127 using DynamicVec = absl::InlinedVector<Dynamic, 8>;
128
129 // Append 0..len-1 to *v
130 template <typename Container>
Fill(Container * v,size_t len,int offset=0)131 static void Fill(Container* v, size_t len, int offset = 0) {
132 for (size_t i = 0; i < len; i++) {
133 v->push_back(static_cast<int>(i) + offset);
134 }
135 }
136
Fill(size_t len,int offset=0)137 static IntVec Fill(size_t len, int offset = 0) {
138 IntVec v;
139 Fill(&v, len, offset);
140 return v;
141 }
142
TEST(IntVec,SimpleOps)143 TEST(IntVec, SimpleOps) {
144 for (size_t len = 0; len < 20; len++) {
145 IntVec v;
146 const IntVec& cv = v; // const alias
147
148 Fill(&v, len);
149 EXPECT_EQ(len, v.size());
150 EXPECT_LE(len, v.capacity());
151
152 for (size_t i = 0; i < len; i++) {
153 EXPECT_EQ(static_cast<int>(i), v[i]);
154 EXPECT_EQ(static_cast<int>(i), v.at(i));
155 }
156 EXPECT_EQ(v.begin(), v.data());
157 EXPECT_EQ(cv.begin(), cv.data());
158
159 size_t counter = 0;
160 for (IntVec::iterator iter = v.begin(); iter != v.end(); ++iter) {
161 EXPECT_EQ(static_cast<int>(counter), *iter);
162 counter++;
163 }
164 EXPECT_EQ(counter, len);
165
166 counter = 0;
167 for (IntVec::const_iterator iter = v.begin(); iter != v.end(); ++iter) {
168 EXPECT_EQ(static_cast<int>(counter), *iter);
169 counter++;
170 }
171 EXPECT_EQ(counter, len);
172
173 counter = 0;
174 for (IntVec::const_iterator iter = v.cbegin(); iter != v.cend(); ++iter) {
175 EXPECT_EQ(static_cast<int>(counter), *iter);
176 counter++;
177 }
178 EXPECT_EQ(counter, len);
179
180 if (len > 0) {
181 EXPECT_EQ(0, v.front());
182 EXPECT_EQ(static_cast<int>(len - 1), v.back());
183 v.pop_back();
184 EXPECT_EQ(len - 1, v.size());
185 for (size_t i = 0; i < v.size(); ++i) {
186 EXPECT_EQ(static_cast<int>(i), v[i]);
187 EXPECT_EQ(static_cast<int>(i), v.at(i));
188 }
189 }
190 }
191 }
192
TEST(IntVec,PopBackNoOverflow)193 TEST(IntVec, PopBackNoOverflow) {
194 IntVec v = {1};
195 v.pop_back();
196 EXPECT_EQ(v.size(), 0u);
197 }
198
TEST(IntVec,AtThrows)199 TEST(IntVec, AtThrows) {
200 IntVec v = {1, 2, 3};
201 EXPECT_EQ(v.at(2), 3);
202 ABSL_BASE_INTERNAL_EXPECT_FAIL(v.at(3), std::out_of_range,
203 "failed bounds check");
204 }
205
TEST(IntVec,ReverseIterator)206 TEST(IntVec, ReverseIterator) {
207 for (size_t len = 0; len < 20; len++) {
208 IntVec v;
209 Fill(&v, len);
210
211 size_t counter = len;
212 for (IntVec::reverse_iterator iter = v.rbegin(); iter != v.rend(); ++iter) {
213 counter--;
214 EXPECT_EQ(static_cast<int>(counter), *iter);
215 }
216 EXPECT_EQ(counter, 0u);
217
218 counter = len;
219 for (IntVec::const_reverse_iterator iter = v.rbegin(); iter != v.rend();
220 ++iter) {
221 counter--;
222 EXPECT_EQ(static_cast<int>(counter), *iter);
223 }
224 EXPECT_EQ(counter, 0u);
225
226 counter = len;
227 for (IntVec::const_reverse_iterator iter = v.crbegin(); iter != v.crend();
228 ++iter) {
229 counter--;
230 EXPECT_EQ(static_cast<int>(counter), *iter);
231 }
232 EXPECT_EQ(counter, 0u);
233 }
234 }
235
TEST(IntVec,Erase)236 TEST(IntVec, Erase) {
237 for (size_t len = 1; len < 20; len++) {
238 for (size_t i = 0; i < len; ++i) {
239 IntVec v;
240 Fill(&v, len);
241 v.erase(v.begin() + i);
242 EXPECT_EQ(len - 1, v.size());
243 for (size_t j = 0; j < i; ++j) {
244 EXPECT_EQ(static_cast<int>(j), v[j]);
245 }
246 for (size_t j = i; j < len - 1; ++j) {
247 EXPECT_EQ(static_cast<int>(j + 1), v[j]);
248 }
249 }
250 }
251 }
252
TEST(IntVec,Hardened)253 TEST(IntVec, Hardened) {
254 IntVec v;
255 Fill(&v, 10);
256 EXPECT_EQ(v[9], 9);
257 #if !defined(NDEBUG) || ABSL_OPTION_HARDENED
258 EXPECT_DEATH_IF_SUPPORTED(v[10], "");
259 EXPECT_DEATH_IF_SUPPORTED(v[static_cast<size_t>(-1)], "");
260 EXPECT_DEATH_IF_SUPPORTED(v.resize(v.max_size() + 1), "");
261 #endif
262 }
263
264 // Move construction of a container of unique pointers should work fine, with no
265 // leaks, despite the fact that unique pointers are trivially relocatable but
266 // not trivially destructible.
TEST(UniquePtr,MoveConstruct)267 TEST(UniquePtr, MoveConstruct) {
268 for (size_t size = 0; size < 16; ++size) {
269 SCOPED_TRACE(size);
270
271 absl::InlinedVector<std::unique_ptr<size_t>, 2> a;
272 for (size_t i = 0; i < size; ++i) {
273 a.push_back(std::make_unique<size_t>(i));
274 }
275
276 absl::InlinedVector<std::unique_ptr<size_t>, 2> b(std::move(a));
277
278 ASSERT_THAT(b, SizeIs(size));
279 for (size_t i = 0; i < size; ++i) {
280 ASSERT_THAT(b[i], Pointee(i));
281 }
282 }
283 }
284
285 // Move assignment of a container of unique pointers should work fine, with no
286 // leaks, despite the fact that unique pointers are trivially relocatable but
287 // not trivially destructible.
TEST(UniquePtr,MoveAssign)288 TEST(UniquePtr, MoveAssign) {
289 for (size_t size = 0; size < 16; ++size) {
290 SCOPED_TRACE(size);
291
292 absl::InlinedVector<std::unique_ptr<size_t>, 2> a;
293 for (size_t i = 0; i < size; ++i) {
294 a.push_back(std::make_unique<size_t>(i));
295 }
296
297 absl::InlinedVector<std::unique_ptr<size_t>, 2> b;
298 b = std::move(a);
299
300 ASSERT_THAT(b, SizeIs(size));
301 for (size_t i = 0; i < size; ++i) {
302 ASSERT_THAT(b[i], Pointee(i));
303 }
304 }
305 }
306
307 // Swapping containers of unique pointers should work fine, with no
308 // leaks, despite the fact that unique pointers are trivially relocatable but
309 // not trivially destructible.
310 // TODO(absl-team): Using unique_ptr here is technically correct, but
311 // a trivially relocatable struct would be less semantically confusing.
TEST(UniquePtr,Swap)312 TEST(UniquePtr, Swap) {
313 for (size_t size1 = 0; size1 < 5; ++size1) {
314 for (size_t size2 = 0; size2 < 5; ++size2) {
315 absl::InlinedVector<std::unique_ptr<size_t>, 2> a;
316 absl::InlinedVector<std::unique_ptr<size_t>, 2> b;
317 for (size_t i = 0; i < size1; ++i) {
318 a.push_back(std::make_unique<size_t>(i + 10));
319 }
320 for (size_t i = 0; i < size2; ++i) {
321 b.push_back(std::make_unique<size_t>(i + 20));
322 }
323 a.swap(b);
324 ASSERT_THAT(a, SizeIs(size2));
325 ASSERT_THAT(b, SizeIs(size1));
326 for (size_t i = 0; i < a.size(); ++i) {
327 ASSERT_THAT(a[i], Pointee(i + 20));
328 }
329 for (size_t i = 0; i < b.size(); ++i) {
330 ASSERT_THAT(b[i], Pointee(i + 10));
331 }
332 }
333 }
334 }
335
336 // Erasing from a container of unique pointers should work fine, with no
337 // leaks, despite the fact that unique pointers are trivially relocatable but
338 // not trivially destructible.
339 // TODO(absl-team): Using unique_ptr here is technically correct, but
340 // a trivially relocatable struct would be less semantically confusing.
TEST(UniquePtr,EraseSingle)341 TEST(UniquePtr, EraseSingle) {
342 for (size_t size = 4; size < 16; ++size) {
343 absl::InlinedVector<std::unique_ptr<size_t>, 8> a;
344 for (size_t i = 0; i < size; ++i) {
345 a.push_back(std::make_unique<size_t>(i));
346 }
347 a.erase(a.begin());
348 ASSERT_THAT(a, SizeIs(size - 1));
349 for (size_t i = 0; i < size - 1; ++i) {
350 ASSERT_THAT(a[i], Pointee(i + 1));
351 }
352 a.erase(a.begin() + 2);
353 ASSERT_THAT(a, SizeIs(size - 2));
354 ASSERT_THAT(a[0], Pointee(1));
355 ASSERT_THAT(a[1], Pointee(2));
356 for (size_t i = 2; i < size - 2; ++i) {
357 ASSERT_THAT(a[i], Pointee(i + 2));
358 }
359 }
360 }
361
362 // Erasing from a container of unique pointers should work fine, with no
363 // leaks, despite the fact that unique pointers are trivially relocatable but
364 // not trivially destructible.
365 // TODO(absl-team): Using unique_ptr here is technically correct, but
366 // a trivially relocatable struct would be less semantically confusing.
TEST(UniquePtr,EraseMulti)367 TEST(UniquePtr, EraseMulti) {
368 for (size_t size = 5; size < 16; ++size) {
369 absl::InlinedVector<std::unique_ptr<size_t>, 8> a;
370 for (size_t i = 0; i < size; ++i) {
371 a.push_back(std::make_unique<size_t>(i));
372 }
373 a.erase(a.begin(), a.begin() + 2);
374 ASSERT_THAT(a, SizeIs(size - 2));
375 for (size_t i = 0; i < size - 2; ++i) {
376 ASSERT_THAT(a[i], Pointee(i + 2));
377 }
378 a.erase(a.begin() + 1, a.begin() + 3);
379 ASSERT_THAT(a, SizeIs(size - 4));
380 ASSERT_THAT(a[0], Pointee(2));
381 for (size_t i = 1; i < size - 4; ++i) {
382 ASSERT_THAT(a[i], Pointee(i + 4));
383 }
384 }
385 }
386
387 // At the end of this test loop, the elements between [erase_begin, erase_end)
388 // should have reference counts == 0, and all others elements should have
389 // reference counts == 1.
TEST(RefCountedVec,EraseBeginEnd)390 TEST(RefCountedVec, EraseBeginEnd) {
391 for (size_t len = 1; len < 20; ++len) {
392 for (size_t erase_begin = 0; erase_begin < len; ++erase_begin) {
393 for (size_t erase_end = erase_begin; erase_end <= len; ++erase_end) {
394 std::vector<int> counts(len, 0);
395 RefCountedVec v;
396 for (size_t i = 0; i < len; ++i) {
397 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
398 }
399
400 size_t erase_len = erase_end - erase_begin;
401
402 v.erase(v.begin() + erase_begin, v.begin() + erase_end);
403
404 EXPECT_EQ(len - erase_len, v.size());
405
406 // Check the elements before the first element erased.
407 for (size_t i = 0; i < erase_begin; ++i) {
408 EXPECT_EQ(static_cast<int>(i), v[i].value_);
409 }
410
411 // Check the elements after the first element erased.
412 for (size_t i = erase_begin; i < v.size(); ++i) {
413 EXPECT_EQ(static_cast<int>(i + erase_len), v[i].value_);
414 }
415
416 // Check that the elements at the beginning are preserved.
417 for (size_t i = 0; i < erase_begin; ++i) {
418 EXPECT_EQ(1, counts[i]);
419 }
420
421 // Check that the erased elements are destroyed
422 for (size_t i = erase_begin; i < erase_end; ++i) {
423 EXPECT_EQ(0, counts[i]);
424 }
425
426 // Check that the elements at the end are preserved.
427 for (size_t i = erase_end; i < len; ++i) {
428 EXPECT_EQ(1, counts[i]);
429 }
430 }
431 }
432 }
433 }
434
435 struct NoDefaultCtor {
NoDefaultCtor__anon9be148610111::NoDefaultCtor436 explicit NoDefaultCtor(int) {}
437 };
438 struct NoCopy {
NoCopy__anon9be148610111::NoCopy439 NoCopy() {}
440 NoCopy(const NoCopy&) = delete;
441 };
442 struct NoAssign {
NoAssign__anon9be148610111::NoAssign443 NoAssign() {}
444 NoAssign& operator=(const NoAssign&) = delete;
445 };
446 struct MoveOnly {
MoveOnly__anon9be148610111::MoveOnly447 MoveOnly() {}
448 MoveOnly(MoveOnly&&) = default;
449 MoveOnly& operator=(MoveOnly&&) = default;
450 };
TEST(InlinedVectorTest,NoDefaultCtor)451 TEST(InlinedVectorTest, NoDefaultCtor) {
452 absl::InlinedVector<NoDefaultCtor, 1> v(10, NoDefaultCtor(2));
453 (void)v;
454 }
TEST(InlinedVectorTest,NoCopy)455 TEST(InlinedVectorTest, NoCopy) {
456 absl::InlinedVector<NoCopy, 1> v(10);
457 (void)v;
458 }
TEST(InlinedVectorTest,NoAssign)459 TEST(InlinedVectorTest, NoAssign) {
460 absl::InlinedVector<NoAssign, 1> v(10);
461 (void)v;
462 }
TEST(InlinedVectorTest,MoveOnly)463 TEST(InlinedVectorTest, MoveOnly) {
464 absl::InlinedVector<MoveOnly, 2> v;
465 v.push_back(MoveOnly{});
466 v.push_back(MoveOnly{});
467 v.push_back(MoveOnly{});
468 v.erase(v.begin());
469 v.push_back(MoveOnly{});
470 v.erase(v.begin(), v.begin() + 1);
471 v.insert(v.begin(), MoveOnly{});
472 v.emplace(v.begin());
473 v.emplace(v.begin(), MoveOnly{});
474 }
TEST(InlinedVectorTest,Noexcept)475 TEST(InlinedVectorTest, Noexcept) {
476 EXPECT_TRUE(std::is_nothrow_move_constructible<IntVec>::value);
477 EXPECT_TRUE((std::is_nothrow_move_constructible<
478 absl::InlinedVector<MoveOnly, 2>>::value));
479
480 struct MoveCanThrow {
481 MoveCanThrow(MoveCanThrow&&) {}
482 };
483 EXPECT_EQ(absl::default_allocator_is_nothrow::value,
484 (std::is_nothrow_move_constructible<
485 absl::InlinedVector<MoveCanThrow, 2>>::value));
486 }
487
TEST(InlinedVectorTest,EmplaceBack)488 TEST(InlinedVectorTest, EmplaceBack) {
489 absl::InlinedVector<std::pair<std::string, int>, 1> v;
490
491 auto& inlined_element = v.emplace_back("answer", 42);
492 EXPECT_EQ(&inlined_element, &v[0]);
493 EXPECT_EQ(inlined_element.first, "answer");
494 EXPECT_EQ(inlined_element.second, 42);
495
496 auto& allocated_element = v.emplace_back("taxicab", 1729);
497 EXPECT_EQ(&allocated_element, &v[1]);
498 EXPECT_EQ(allocated_element.first, "taxicab");
499 EXPECT_EQ(allocated_element.second, 1729);
500 }
501
TEST(InlinedVectorTest,ShrinkToFitGrowingVector)502 TEST(InlinedVectorTest, ShrinkToFitGrowingVector) {
503 absl::InlinedVector<std::pair<std::string, int>, 1> v;
504
505 v.shrink_to_fit();
506 EXPECT_EQ(v.capacity(), 1u);
507
508 v.emplace_back("answer", 42);
509 v.shrink_to_fit();
510 EXPECT_EQ(v.capacity(), 1u);
511
512 v.emplace_back("taxicab", 1729);
513 EXPECT_GE(v.capacity(), 2u);
514 v.shrink_to_fit();
515 EXPECT_EQ(v.capacity(), 2u);
516
517 v.reserve(100);
518 EXPECT_GE(v.capacity(), 100u);
519 v.shrink_to_fit();
520 EXPECT_EQ(v.capacity(), 2u);
521 }
522
TEST(InlinedVectorTest,ShrinkToFitEdgeCases)523 TEST(InlinedVectorTest, ShrinkToFitEdgeCases) {
524 {
525 absl::InlinedVector<std::pair<std::string, int>, 1> v;
526 v.emplace_back("answer", 42);
527 v.emplace_back("taxicab", 1729);
528 EXPECT_GE(v.capacity(), 2u);
529 v.pop_back();
530 v.shrink_to_fit();
531 EXPECT_EQ(v.capacity(), 1u);
532 EXPECT_EQ(v[0].first, "answer");
533 EXPECT_EQ(v[0].second, 42);
534 }
535
536 {
537 absl::InlinedVector<std::string, 2> v(100);
538 v.resize(0);
539 v.shrink_to_fit();
540 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
541 }
542
543 {
544 absl::InlinedVector<std::string, 2> v(100);
545 v.resize(1);
546 v.shrink_to_fit();
547 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
548 }
549
550 {
551 absl::InlinedVector<std::string, 2> v(100);
552 v.resize(2);
553 v.shrink_to_fit();
554 EXPECT_EQ(v.capacity(), 2u);
555 }
556
557 {
558 absl::InlinedVector<std::string, 2> v(100);
559 v.resize(3);
560 v.shrink_to_fit();
561 EXPECT_EQ(v.capacity(), 3u);
562 }
563 }
564
TEST(IntVec,Insert)565 TEST(IntVec, Insert) {
566 for (size_t len = 0; len < 20; len++) {
567 for (ptrdiff_t pos = 0; pos <= static_cast<ptrdiff_t>(len); pos++) {
568 {
569 // Single element
570 std::vector<int> std_v;
571 Fill(&std_v, len);
572 IntVec v;
573 Fill(&v, len);
574
575 std_v.insert(std_v.begin() + pos, 9999);
576 IntVec::iterator it = v.insert(v.cbegin() + pos, 9999);
577 EXPECT_THAT(v, ElementsAreArray(std_v));
578 EXPECT_EQ(it, v.cbegin() + pos);
579 }
580 {
581 // n elements
582 std::vector<int> std_v;
583 Fill(&std_v, len);
584 IntVec v;
585 Fill(&v, len);
586
587 IntVec::size_type n = 5;
588 std_v.insert(std_v.begin() + pos, n, 9999);
589 IntVec::iterator it = v.insert(v.cbegin() + pos, n, 9999);
590 EXPECT_THAT(v, ElementsAreArray(std_v));
591 EXPECT_EQ(it, v.cbegin() + pos);
592 }
593 {
594 // Iterator range (random access iterator)
595 std::vector<int> std_v;
596 Fill(&std_v, len);
597 IntVec v;
598 Fill(&v, len);
599
600 const std::vector<int> input = {9999, 8888, 7777};
601 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
602 IntVec::iterator it =
603 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
604 EXPECT_THAT(v, ElementsAreArray(std_v));
605 EXPECT_EQ(it, v.cbegin() + pos);
606 }
607 {
608 // Iterator range (forward iterator)
609 std::vector<int> std_v;
610 Fill(&std_v, len);
611 IntVec v;
612 Fill(&v, len);
613
614 const std::forward_list<int> input = {9999, 8888, 7777};
615 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
616 IntVec::iterator it =
617 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
618 EXPECT_THAT(v, ElementsAreArray(std_v));
619 EXPECT_EQ(it, v.cbegin() + pos);
620 }
621 {
622 // Iterator range (input iterator)
623 std::vector<int> std_v;
624 Fill(&std_v, len);
625 IntVec v;
626 Fill(&v, len);
627
628 std_v.insert(std_v.begin() + pos, {9999, 8888, 7777});
629 std::istringstream input("9999 8888 7777");
630 IntVec::iterator it =
631 v.insert(v.cbegin() + pos, std::istream_iterator<int>(input),
632 std::istream_iterator<int>());
633 EXPECT_THAT(v, ElementsAreArray(std_v));
634 EXPECT_EQ(it, v.cbegin() + pos);
635 }
636 {
637 // Initializer list
638 std::vector<int> std_v;
639 Fill(&std_v, len);
640 IntVec v;
641 Fill(&v, len);
642
643 std_v.insert(std_v.begin() + pos, {9999, 8888});
644 IntVec::iterator it = v.insert(v.cbegin() + pos, {9999, 8888});
645 EXPECT_THAT(v, ElementsAreArray(std_v));
646 EXPECT_EQ(it, v.cbegin() + pos);
647 }
648 }
649 }
650 }
651
TEST(RefCountedVec,InsertConstructorDestructor)652 TEST(RefCountedVec, InsertConstructorDestructor) {
653 // Make sure the proper construction/destruction happen during insert
654 // operations.
655 for (size_t len = 0; len < 20; len++) {
656 SCOPED_TRACE(len);
657 for (size_t pos = 0; pos <= len; pos++) {
658 SCOPED_TRACE(pos);
659 std::vector<int> counts(len, 0);
660 int inserted_count = 0;
661 RefCountedVec v;
662 for (size_t i = 0; i < len; ++i) {
663 SCOPED_TRACE(i);
664 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
665 }
666
667 EXPECT_THAT(counts, Each(Eq(1)));
668
669 RefCounted insert_element(9999, &inserted_count);
670 EXPECT_EQ(1, inserted_count);
671 v.insert(v.begin() + pos, insert_element);
672 EXPECT_EQ(2, inserted_count);
673 // Check that the elements at the end are preserved.
674 EXPECT_THAT(counts, Each(Eq(1)));
675 EXPECT_EQ(2, inserted_count);
676 }
677 }
678 }
679
TEST(IntVec,Resize)680 TEST(IntVec, Resize) {
681 for (size_t len = 0; len < 20; len++) {
682 IntVec v;
683 Fill(&v, len);
684
685 // Try resizing up and down by k elements
686 static const int kResizeElem = 1000000;
687 for (size_t k = 0; k < 10; k++) {
688 // Enlarging resize
689 v.resize(len + k, kResizeElem);
690 EXPECT_EQ(len + k, v.size());
691 EXPECT_LE(len + k, v.capacity());
692 for (size_t i = 0; i < len + k; i++) {
693 if (i < len) {
694 EXPECT_EQ(static_cast<int>(i), v[i]);
695 } else {
696 EXPECT_EQ(kResizeElem, v[i]);
697 }
698 }
699
700 // Shrinking resize
701 v.resize(len, kResizeElem);
702 EXPECT_EQ(len, v.size());
703 EXPECT_LE(len, v.capacity());
704 for (size_t i = 0; i < len; i++) {
705 EXPECT_EQ(static_cast<int>(i), v[i]);
706 }
707 }
708 }
709 }
710
TEST(IntVec,InitWithLength)711 TEST(IntVec, InitWithLength) {
712 for (size_t len = 0; len < 20; len++) {
713 IntVec v(len, 7);
714 EXPECT_EQ(len, v.size());
715 EXPECT_LE(len, v.capacity());
716 for (size_t i = 0; i < len; i++) {
717 EXPECT_EQ(7, v[i]);
718 }
719 }
720 }
721
TEST(IntVec,CopyConstructorAndAssignment)722 TEST(IntVec, CopyConstructorAndAssignment) {
723 for (size_t len = 0; len < 20; len++) {
724 IntVec v;
725 Fill(&v, len);
726 EXPECT_EQ(len, v.size());
727 EXPECT_LE(len, v.capacity());
728
729 IntVec v2(v);
730 EXPECT_TRUE(v == v2) << PrintToString(v) << PrintToString(v2);
731
732 for (size_t start_len = 0; start_len < 20; start_len++) {
733 IntVec v3;
734 Fill(&v3, start_len, 99); // Add dummy elements that should go away
735 v3 = v;
736 EXPECT_TRUE(v == v3) << PrintToString(v) << PrintToString(v3);
737 }
738 }
739 }
740
TEST(IntVec,AliasingCopyAssignment)741 TEST(IntVec, AliasingCopyAssignment) {
742 for (size_t len = 0; len < 20; ++len) {
743 IntVec original;
744 Fill(&original, len);
745 IntVec dup = original;
746 dup = *&dup;
747 EXPECT_EQ(dup, original);
748 }
749 }
750
TEST(IntVec,MoveConstructorAndAssignment)751 TEST(IntVec, MoveConstructorAndAssignment) {
752 for (size_t len = 0; len < 20; len++) {
753 IntVec v_in;
754 const size_t inlined_capacity = v_in.capacity();
755 Fill(&v_in, len);
756 EXPECT_EQ(len, v_in.size());
757 EXPECT_LE(len, v_in.capacity());
758
759 {
760 IntVec v_temp(v_in);
761 auto* old_data = v_temp.data();
762 IntVec v_out(std::move(v_temp));
763 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
764 if (v_in.size() > inlined_capacity) {
765 // Allocation is moved as a whole, data stays in place.
766 EXPECT_TRUE(v_out.data() == old_data);
767 } else {
768 EXPECT_FALSE(v_out.data() == old_data);
769 }
770 }
771 for (size_t start_len = 0; start_len < 20; start_len++) {
772 IntVec v_out;
773 Fill(&v_out, start_len, 99); // Add dummy elements that should go away
774 IntVec v_temp(v_in);
775 auto* old_data = v_temp.data();
776 v_out = std::move(v_temp);
777 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
778 if (v_in.size() > inlined_capacity) {
779 // Allocation is moved as a whole, data stays in place.
780 EXPECT_TRUE(v_out.data() == old_data);
781 } else {
782 EXPECT_FALSE(v_out.data() == old_data);
783 }
784 }
785 }
786 }
787
788 class NotTriviallyDestructible {
789 public:
NotTriviallyDestructible()790 NotTriviallyDestructible() : p_(new int(1)) {}
NotTriviallyDestructible(int i)791 explicit NotTriviallyDestructible(int i) : p_(new int(i)) {}
792
NotTriviallyDestructible(const NotTriviallyDestructible & other)793 NotTriviallyDestructible(const NotTriviallyDestructible& other)
794 : p_(new int(*other.p_)) {}
795
operator =(const NotTriviallyDestructible & other)796 NotTriviallyDestructible& operator=(const NotTriviallyDestructible& other) {
797 p_ = absl::make_unique<int>(*other.p_);
798 return *this;
799 }
800
operator ==(const NotTriviallyDestructible & other) const801 bool operator==(const NotTriviallyDestructible& other) const {
802 return *p_ == *other.p_;
803 }
804
805 private:
806 std::unique_ptr<int> p_;
807 };
808
TEST(AliasingTest,Emplace)809 TEST(AliasingTest, Emplace) {
810 for (size_t i = 2; i < 20; ++i) {
811 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
812 for (size_t j = 0; j < i; ++j) {
813 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
814 }
815 vec.emplace(vec.begin(), vec[0]);
816 EXPECT_EQ(vec[0], vec[1]);
817 vec.emplace(vec.begin() + i / 2, vec[i / 2]);
818 EXPECT_EQ(vec[i / 2], vec[i / 2 + 1]);
819 vec.emplace(vec.end() - 1, vec.back());
820 EXPECT_EQ(vec[vec.size() - 2], vec.back());
821 }
822 }
823
TEST(AliasingTest,InsertWithCount)824 TEST(AliasingTest, InsertWithCount) {
825 for (size_t i = 1; i < 20; ++i) {
826 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
827 for (size_t j = 0; j < i; ++j) {
828 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
829 }
830 for (size_t n = 0; n < 5; ++n) {
831 // We use back where we can because it's guaranteed to become invalidated
832 vec.insert(vec.begin(), n, vec.back());
833 auto b = vec.begin();
834 EXPECT_TRUE(
835 std::all_of(b, b + n, [&vec](const NotTriviallyDestructible& x) {
836 return x == vec.back();
837 }));
838
839 auto m_idx = vec.size() / 2;
840 vec.insert(vec.begin() + m_idx, n, vec.back());
841 auto m = vec.begin() + m_idx;
842 EXPECT_TRUE(
843 std::all_of(m, m + n, [&vec](const NotTriviallyDestructible& x) {
844 return x == vec.back();
845 }));
846
847 // We want distinct values so the equality test is meaningful,
848 // vec[vec.size() - 1] is also almost always invalidated.
849 auto old_e = vec.size() - 1;
850 auto val = vec[old_e];
851 vec.insert(vec.end(), n, vec[old_e]);
852 auto e = vec.begin() + old_e;
853 EXPECT_TRUE(std::all_of(
854 e, e + n,
855 [&val](const NotTriviallyDestructible& x) { return x == val; }));
856 }
857 }
858 }
859
TEST(OverheadTest,Storage)860 TEST(OverheadTest, Storage) {
861 // Check for size overhead.
862 // In particular, ensure that std::allocator doesn't cost anything to store.
863 // The union should be absorbing some of the allocation bookkeeping overhead
864 // in the larger vectors, leaving only the size_ field as overhead.
865
866 struct T {
867 void* val;
868 };
869 size_t expected_overhead = sizeof(T);
870
871 EXPECT_EQ((2 * expected_overhead),
872 sizeof(absl::InlinedVector<T, 1>) - sizeof(T[1]));
873 EXPECT_EQ(expected_overhead,
874 sizeof(absl::InlinedVector<T, 2>) - sizeof(T[2]));
875 EXPECT_EQ(expected_overhead,
876 sizeof(absl::InlinedVector<T, 3>) - sizeof(T[3]));
877 EXPECT_EQ(expected_overhead,
878 sizeof(absl::InlinedVector<T, 4>) - sizeof(T[4]));
879 EXPECT_EQ(expected_overhead,
880 sizeof(absl::InlinedVector<T, 5>) - sizeof(T[5]));
881 EXPECT_EQ(expected_overhead,
882 sizeof(absl::InlinedVector<T, 6>) - sizeof(T[6]));
883 EXPECT_EQ(expected_overhead,
884 sizeof(absl::InlinedVector<T, 7>) - sizeof(T[7]));
885 EXPECT_EQ(expected_overhead,
886 sizeof(absl::InlinedVector<T, 8>) - sizeof(T[8]));
887 }
888
TEST(IntVec,Clear)889 TEST(IntVec, Clear) {
890 for (size_t len = 0; len < 20; len++) {
891 SCOPED_TRACE(len);
892 IntVec v;
893 Fill(&v, len);
894 v.clear();
895 EXPECT_EQ(0u, v.size());
896 EXPECT_EQ(v.begin(), v.end());
897 }
898 }
899
TEST(IntVec,Reserve)900 TEST(IntVec, Reserve) {
901 for (size_t len = 0; len < 20; len++) {
902 IntVec v;
903 Fill(&v, len);
904
905 for (size_t newlen = 0; newlen < 100; newlen++) {
906 const int* start_rep = v.data();
907 v.reserve(newlen);
908 const int* final_rep = v.data();
909 if (newlen <= len) {
910 EXPECT_EQ(start_rep, final_rep);
911 }
912 EXPECT_LE(newlen, v.capacity());
913
914 // Filling up to newlen should not change rep
915 while (v.size() < newlen) {
916 v.push_back(0);
917 }
918 EXPECT_EQ(final_rep, v.data());
919 }
920 }
921 }
922
TEST(StringVec,SelfRefPushBack)923 TEST(StringVec, SelfRefPushBack) {
924 std::vector<std::string> std_v;
925 absl::InlinedVector<std::string, 4> v;
926 const std::string s = "A quite long string to ensure heap.";
927 std_v.push_back(s);
928 v.push_back(s);
929 for (int i = 0; i < 20; ++i) {
930 EXPECT_THAT(v, ElementsAreArray(std_v));
931
932 v.push_back(v.back());
933 std_v.push_back(std_v.back());
934 }
935 EXPECT_THAT(v, ElementsAreArray(std_v));
936 }
937
TEST(StringVec,SelfRefPushBackWithMove)938 TEST(StringVec, SelfRefPushBackWithMove) {
939 std::vector<std::string> std_v;
940 absl::InlinedVector<std::string, 4> v;
941 const std::string s = "A quite long string to ensure heap.";
942 std_v.push_back(s);
943 v.push_back(s);
944 for (int i = 0; i < 20; ++i) {
945 EXPECT_EQ(v.back(), std_v.back());
946
947 v.push_back(std::move(v.back()));
948 std_v.push_back(std::move(std_v.back()));
949 }
950 EXPECT_EQ(v.back(), std_v.back());
951 }
952
TEST(StringVec,SelfMove)953 TEST(StringVec, SelfMove) {
954 const std::string s = "A quite long string to ensure heap.";
955 for (int len = 0; len < 20; len++) {
956 SCOPED_TRACE(len);
957 absl::InlinedVector<std::string, 8> v;
958 for (int i = 0; i < len; ++i) {
959 SCOPED_TRACE(i);
960 v.push_back(s);
961 }
962 // Indirection necessary to avoid compiler warning.
963 v = std::move(*(&v));
964 // Ensure that the inlined vector is still in a valid state by copying it.
965 // We don't expect specific contents since a self-move results in an
966 // unspecified valid state.
967 std::vector<std::string> copy(v.begin(), v.end());
968 }
969 }
970
TEST(IntVec,Swap)971 TEST(IntVec, Swap) {
972 for (size_t l1 = 0; l1 < 20; l1++) {
973 SCOPED_TRACE(l1);
974 for (size_t l2 = 0; l2 < 20; l2++) {
975 SCOPED_TRACE(l2);
976 IntVec a = Fill(l1, 0);
977 IntVec b = Fill(l2, 100);
978 {
979 using std::swap;
980 swap(a, b);
981 }
982 EXPECT_EQ(l1, b.size());
983 EXPECT_EQ(l2, a.size());
984 for (size_t i = 0; i < l1; i++) {
985 SCOPED_TRACE(i);
986 EXPECT_EQ(static_cast<int>(i), b[i]);
987 }
988 for (size_t i = 0; i < l2; i++) {
989 SCOPED_TRACE(i);
990 EXPECT_EQ(100 + static_cast<int>(i), a[i]);
991 }
992 }
993 }
994 }
995
TYPED_TEST_P(InstanceTest,Swap)996 TYPED_TEST_P(InstanceTest, Swap) {
997 using Instance = TypeParam;
998 using InstanceVec = absl::InlinedVector<Instance, 8>;
999 for (size_t l1 = 0; l1 < 20; l1++) {
1000 SCOPED_TRACE(l1);
1001 for (size_t l2 = 0; l2 < 20; l2++) {
1002 SCOPED_TRACE(l2);
1003 InstanceTracker tracker;
1004 InstanceVec a, b;
1005 const size_t inlined_capacity = a.capacity();
1006 auto min_len = std::min(l1, l2);
1007 auto max_len = std::max(l1, l2);
1008 for (size_t i = 0; i < l1; i++)
1009 a.push_back(Instance(static_cast<int>(i)));
1010 for (size_t i = 0; i < l2; i++)
1011 b.push_back(Instance(100 + static_cast<int>(i)));
1012 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
1013 tracker.ResetCopiesMovesSwaps();
1014 {
1015 using std::swap;
1016 swap(a, b);
1017 }
1018 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
1019 if (a.size() > inlined_capacity && b.size() > inlined_capacity) {
1020 EXPECT_EQ(tracker.swaps(), 0); // Allocations are swapped.
1021 EXPECT_EQ(tracker.moves(), 0);
1022 } else if (a.size() <= inlined_capacity && b.size() <= inlined_capacity) {
1023 EXPECT_EQ(tracker.swaps(), static_cast<int>(min_len));
1024 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
1025 static_cast<int>(max_len - min_len));
1026 } else {
1027 // One is allocated and the other isn't. The allocation is transferred
1028 // without copying elements, and the inlined instances are copied/moved.
1029 EXPECT_EQ(tracker.swaps(), 0);
1030 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
1031 static_cast<int>(min_len));
1032 }
1033
1034 EXPECT_EQ(l1, b.size());
1035 EXPECT_EQ(l2, a.size());
1036 for (size_t i = 0; i < l1; i++) {
1037 EXPECT_EQ(static_cast<int>(i), b[i].value());
1038 }
1039 for (size_t i = 0; i < l2; i++) {
1040 EXPECT_EQ(100 + static_cast<int>(i), a[i].value());
1041 }
1042 }
1043 }
1044 }
1045
TEST(IntVec,EqualAndNotEqual)1046 TEST(IntVec, EqualAndNotEqual) {
1047 IntVec a, b;
1048 EXPECT_TRUE(a == b);
1049 EXPECT_FALSE(a != b);
1050
1051 a.push_back(3);
1052 EXPECT_FALSE(a == b);
1053 EXPECT_TRUE(a != b);
1054
1055 b.push_back(3);
1056 EXPECT_TRUE(a == b);
1057 EXPECT_FALSE(a != b);
1058
1059 b.push_back(7);
1060 EXPECT_FALSE(a == b);
1061 EXPECT_TRUE(a != b);
1062
1063 a.push_back(6);
1064 EXPECT_FALSE(a == b);
1065 EXPECT_TRUE(a != b);
1066
1067 a.clear();
1068 b.clear();
1069 for (size_t i = 0; i < 100; i++) {
1070 a.push_back(static_cast<int>(i));
1071 b.push_back(static_cast<int>(i));
1072 EXPECT_TRUE(a == b);
1073 EXPECT_FALSE(a != b);
1074
1075 b[i] = b[i] + 1;
1076 EXPECT_FALSE(a == b);
1077 EXPECT_TRUE(a != b);
1078
1079 b[i] = b[i] - 1; // Back to before
1080 EXPECT_TRUE(a == b);
1081 EXPECT_FALSE(a != b);
1082 }
1083 }
1084
TEST(IntVec,RelationalOps)1085 TEST(IntVec, RelationalOps) {
1086 IntVec a, b;
1087 EXPECT_FALSE(a < b);
1088 EXPECT_FALSE(b < a);
1089 EXPECT_FALSE(a > b);
1090 EXPECT_FALSE(b > a);
1091 EXPECT_TRUE(a <= b);
1092 EXPECT_TRUE(b <= a);
1093 EXPECT_TRUE(a >= b);
1094 EXPECT_TRUE(b >= a);
1095 b.push_back(3);
1096 EXPECT_TRUE(a < b);
1097 EXPECT_FALSE(b < a);
1098 EXPECT_FALSE(a > b);
1099 EXPECT_TRUE(b > a);
1100 EXPECT_TRUE(a <= b);
1101 EXPECT_FALSE(b <= a);
1102 EXPECT_FALSE(a >= b);
1103 EXPECT_TRUE(b >= a);
1104 }
1105
TYPED_TEST_P(InstanceTest,CountConstructorsDestructors)1106 TYPED_TEST_P(InstanceTest, CountConstructorsDestructors) {
1107 using Instance = TypeParam;
1108 using InstanceVec = absl::InlinedVector<Instance, 8>;
1109 InstanceTracker tracker;
1110 for (size_t len = 0; len < 20; len++) {
1111 SCOPED_TRACE(len);
1112 tracker.ResetCopiesMovesSwaps();
1113
1114 InstanceVec v;
1115 const size_t inlined_capacity = v.capacity();
1116 for (size_t i = 0; i < len; i++) {
1117 v.push_back(Instance(static_cast<int>(i)));
1118 }
1119 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1120 EXPECT_GE(tracker.copies() + tracker.moves(),
1121 static_cast<int>(len)); // More due to reallocation.
1122 tracker.ResetCopiesMovesSwaps();
1123
1124 // Enlarging resize() must construct some objects
1125 tracker.ResetCopiesMovesSwaps();
1126 v.resize(len + 10, Instance(100));
1127 EXPECT_EQ(tracker.instances(), static_cast<int>(len) + 10);
1128 if (len <= inlined_capacity && len + 10 > inlined_capacity) {
1129 EXPECT_EQ(tracker.copies() + tracker.moves(), 10 + static_cast<int>(len));
1130 } else {
1131 // Only specify a minimum number of copies + moves. We don't want to
1132 // depend on the reallocation policy here.
1133 EXPECT_GE(tracker.copies() + tracker.moves(),
1134 10); // More due to reallocation.
1135 }
1136
1137 // Shrinking resize() must destroy some objects
1138 tracker.ResetCopiesMovesSwaps();
1139 v.resize(len, Instance(100));
1140 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1141 EXPECT_EQ(tracker.copies(), 0);
1142 EXPECT_EQ(tracker.moves(), 0);
1143
1144 // reserve() must not increase the number of initialized objects
1145 SCOPED_TRACE("reserve");
1146 v.reserve(len + 1000);
1147 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1148 EXPECT_EQ(tracker.copies() + tracker.moves(), static_cast<int>(len));
1149
1150 // pop_back() and erase() must destroy one object
1151 if (len > 0) {
1152 tracker.ResetCopiesMovesSwaps();
1153 v.pop_back();
1154 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 1);
1155 EXPECT_EQ(tracker.copies(), 0);
1156 EXPECT_EQ(tracker.moves(), 0);
1157
1158 if (!v.empty()) {
1159 tracker.ResetCopiesMovesSwaps();
1160 v.erase(v.begin());
1161 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 2);
1162 EXPECT_EQ(tracker.copies() + tracker.moves(),
1163 static_cast<int>(len) - 2);
1164 }
1165 }
1166
1167 tracker.ResetCopiesMovesSwaps();
1168 int instances_before_empty_erase = tracker.instances();
1169 v.erase(v.begin(), v.begin());
1170 EXPECT_EQ(tracker.instances(), instances_before_empty_erase);
1171 EXPECT_EQ(tracker.copies() + tracker.moves(), 0);
1172 }
1173 }
1174
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnCopyConstruction)1175 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnCopyConstruction) {
1176 using Instance = TypeParam;
1177 using InstanceVec = absl::InlinedVector<Instance, 8>;
1178 InstanceTracker tracker;
1179 for (int len = 0; len < 20; len++) {
1180 SCOPED_TRACE(len);
1181 tracker.ResetCopiesMovesSwaps();
1182
1183 InstanceVec v;
1184 for (int i = 0; i < len; i++) {
1185 v.push_back(Instance(i));
1186 }
1187 EXPECT_EQ(tracker.instances(), len);
1188 EXPECT_GE(tracker.copies() + tracker.moves(),
1189 len); // More due to reallocation.
1190 tracker.ResetCopiesMovesSwaps();
1191 { // Copy constructor should create 'len' more instances.
1192 InstanceVec v_copy(v);
1193 EXPECT_EQ(tracker.instances(), len + len);
1194 EXPECT_EQ(tracker.copies(), len);
1195 EXPECT_EQ(tracker.moves(), 0);
1196 }
1197 EXPECT_EQ(tracker.instances(), len);
1198 }
1199 }
1200
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveConstruction)1201 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveConstruction) {
1202 using Instance = TypeParam;
1203 using InstanceVec = absl::InlinedVector<Instance, 8>;
1204 InstanceTracker tracker;
1205 for (int len = 0; len < 20; len++) {
1206 SCOPED_TRACE(len);
1207 tracker.ResetCopiesMovesSwaps();
1208
1209 InstanceVec v;
1210 const size_t inlined_capacity = v.capacity();
1211 for (int i = 0; i < len; i++) {
1212 v.push_back(Instance(i));
1213 }
1214 EXPECT_EQ(tracker.instances(), len);
1215 EXPECT_GE(tracker.copies() + tracker.moves(),
1216 len); // More due to reallocation.
1217 tracker.ResetCopiesMovesSwaps();
1218 {
1219 InstanceVec v_copy(std::move(v));
1220 if (static_cast<size_t>(len) > inlined_capacity) {
1221 // Allocation is moved as a whole.
1222 EXPECT_EQ(tracker.instances(), len);
1223 EXPECT_EQ(tracker.live_instances(), len);
1224 // Tests an implementation detail, don't rely on this in your code.
1225 EXPECT_EQ(v.size(), 0u); // NOLINT misc-use-after-move
1226 EXPECT_EQ(tracker.copies(), 0);
1227 EXPECT_EQ(tracker.moves(), 0);
1228 } else {
1229 EXPECT_EQ(tracker.instances(), len + len);
1230 if (Instance::supports_move()) {
1231 EXPECT_EQ(tracker.live_instances(), len);
1232 EXPECT_EQ(tracker.copies(), 0);
1233 EXPECT_EQ(tracker.moves(), len);
1234 } else {
1235 EXPECT_EQ(tracker.live_instances(), len + len);
1236 EXPECT_EQ(tracker.copies(), len);
1237 EXPECT_EQ(tracker.moves(), 0);
1238 }
1239 }
1240 EXPECT_EQ(tracker.swaps(), 0);
1241 }
1242 }
1243 }
1244
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnAssignment)1245 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnAssignment) {
1246 using Instance = TypeParam;
1247 using InstanceVec = absl::InlinedVector<Instance, 8>;
1248 InstanceTracker tracker;
1249 for (int len = 0; len < 20; len++) {
1250 SCOPED_TRACE(len);
1251 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1252 SCOPED_TRACE(longorshort);
1253 tracker.ResetCopiesMovesSwaps();
1254
1255 InstanceVec longer, shorter;
1256 for (int i = 0; i < len; i++) {
1257 longer.push_back(Instance(i));
1258 shorter.push_back(Instance(i));
1259 }
1260 longer.push_back(Instance(len));
1261 EXPECT_EQ(tracker.instances(), len + len + 1);
1262 EXPECT_GE(tracker.copies() + tracker.moves(),
1263 len + len + 1); // More due to reallocation.
1264
1265 tracker.ResetCopiesMovesSwaps();
1266 if (longorshort) {
1267 shorter = longer;
1268 EXPECT_EQ(tracker.instances(), (len + 1) + (len + 1));
1269 EXPECT_GE(tracker.copies() + tracker.moves(),
1270 len + 1); // More due to reallocation.
1271 } else {
1272 longer = shorter;
1273 EXPECT_EQ(tracker.instances(), len + len);
1274 EXPECT_EQ(tracker.copies() + tracker.moves(), len);
1275 }
1276 }
1277 }
1278 }
1279
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveAssignment)1280 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveAssignment) {
1281 using Instance = TypeParam;
1282 using InstanceVec = absl::InlinedVector<Instance, 8>;
1283 InstanceTracker tracker;
1284 for (int len = 0; len < 20; len++) {
1285 SCOPED_TRACE(len);
1286 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1287 SCOPED_TRACE(longorshort);
1288 tracker.ResetCopiesMovesSwaps();
1289
1290 InstanceVec longer, shorter;
1291 const size_t inlined_capacity = longer.capacity();
1292 for (int i = 0; i < len; i++) {
1293 longer.push_back(Instance(i));
1294 shorter.push_back(Instance(i));
1295 }
1296 longer.push_back(Instance(len));
1297 EXPECT_EQ(tracker.instances(), len + len + 1);
1298 EXPECT_GE(tracker.copies() + tracker.moves(),
1299 len + len + 1); // More due to reallocation.
1300
1301 tracker.ResetCopiesMovesSwaps();
1302 int src_len;
1303 if (longorshort) {
1304 src_len = len + 1;
1305 shorter = std::move(longer);
1306 } else {
1307 src_len = len;
1308 longer = std::move(shorter);
1309 }
1310 if (static_cast<size_t>(src_len) > inlined_capacity) {
1311 // Allocation moved as a whole.
1312 EXPECT_EQ(tracker.instances(), src_len);
1313 EXPECT_EQ(tracker.live_instances(), src_len);
1314 EXPECT_EQ(tracker.copies(), 0);
1315 EXPECT_EQ(tracker.moves(), 0);
1316 } else {
1317 // Elements are all copied.
1318 EXPECT_EQ(tracker.instances(), src_len + src_len);
1319 if (Instance::supports_move()) {
1320 EXPECT_EQ(tracker.copies(), 0);
1321 EXPECT_EQ(tracker.moves(), src_len);
1322 EXPECT_EQ(tracker.live_instances(), src_len);
1323 } else {
1324 EXPECT_EQ(tracker.copies(), src_len);
1325 EXPECT_EQ(tracker.moves(), 0);
1326 EXPECT_EQ(tracker.live_instances(), src_len + src_len);
1327 }
1328 }
1329 EXPECT_EQ(tracker.swaps(), 0);
1330 }
1331 }
1332 }
1333
TEST(CountElemAssign,SimpleTypeWithInlineBacking)1334 TEST(CountElemAssign, SimpleTypeWithInlineBacking) {
1335 const size_t inlined_capacity = absl::InlinedVector<int, 2>().capacity();
1336
1337 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1338 SCOPED_TRACE(original_size);
1339 // Original contents are [12345, 12345, ...]
1340 std::vector<int> original_contents(original_size, 12345);
1341
1342 absl::InlinedVector<int, 2> v(original_contents.begin(),
1343 original_contents.end());
1344 v.assign(2, 123);
1345 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(123, 123)));
1346 if (original_size <= inlined_capacity) {
1347 // If the original had inline backing, it should stay inline.
1348 EXPECT_EQ(v.capacity(), inlined_capacity);
1349 }
1350 }
1351 }
1352
TEST(CountElemAssign,SimpleTypeWithAllocation)1353 TEST(CountElemAssign, SimpleTypeWithAllocation) {
1354 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1355 SCOPED_TRACE(original_size);
1356 // Original contents are [12345, 12345, ...]
1357 std::vector<int> original_contents(original_size, 12345);
1358
1359 absl::InlinedVector<int, 2> v(original_contents.begin(),
1360 original_contents.end());
1361 v.assign(3, 123);
1362 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(123, 123, 123)));
1363 EXPECT_LE(v.size(), v.capacity());
1364 }
1365 }
1366
TYPED_TEST_P(InstanceTest,CountElemAssignInlineBacking)1367 TYPED_TEST_P(InstanceTest, CountElemAssignInlineBacking) {
1368 using Instance = TypeParam;
1369 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1370 SCOPED_TRACE(original_size);
1371 // Original contents are [12345, 12345, ...]
1372 std::vector<Instance> original_contents(original_size, Instance(12345));
1373
1374 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1375 original_contents.end());
1376 v.assign(2, Instance(123));
1377 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(ValueIs(123), ValueIs(123))));
1378 if (original_size <= 2) {
1379 // If the original had inline backing, it should stay inline.
1380 EXPECT_EQ(2u, v.capacity());
1381 }
1382 }
1383 }
1384
1385 template <typename Instance>
InstanceCountElemAssignWithAllocationTest()1386 void InstanceCountElemAssignWithAllocationTest() {
1387 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1388 SCOPED_TRACE(original_size);
1389 // Original contents are [12345, 12345, ...]
1390 std::vector<Instance> original_contents(original_size, Instance(12345));
1391
1392 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1393 original_contents.end());
1394 v.assign(3, Instance(123));
1395 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(ValueIs(123), ValueIs(123),
1396 ValueIs(123))));
1397 EXPECT_LE(v.size(), v.capacity());
1398 }
1399 }
TEST(CountElemAssign,WithAllocationCopyableInstance)1400 TEST(CountElemAssign, WithAllocationCopyableInstance) {
1401 InstanceCountElemAssignWithAllocationTest<CopyableOnlyInstance>();
1402 }
TEST(CountElemAssign,WithAllocationCopyableMovableInstance)1403 TEST(CountElemAssign, WithAllocationCopyableMovableInstance) {
1404 InstanceCountElemAssignWithAllocationTest<CopyableMovableInstance>();
1405 }
1406
TEST(RangedConstructor,SimpleType)1407 TEST(RangedConstructor, SimpleType) {
1408 std::vector<int> source_v = {4, 5, 6};
1409 // First try to fit in inline backing
1410 absl::InlinedVector<int, 4> v(source_v.begin(), source_v.end());
1411 EXPECT_EQ(3u, v.size());
1412 EXPECT_EQ(4u,
1413 v.capacity()); // Indication that we're still on inlined storage
1414 EXPECT_EQ(4, v[0]);
1415 EXPECT_EQ(5, v[1]);
1416 EXPECT_EQ(6, v[2]);
1417
1418 // Now, force a re-allocate
1419 absl::InlinedVector<int, 2> realloc_v(source_v.begin(), source_v.end());
1420 EXPECT_EQ(3u, realloc_v.size());
1421 EXPECT_LT(2u, realloc_v.capacity());
1422 EXPECT_EQ(4, realloc_v[0]);
1423 EXPECT_EQ(5, realloc_v[1]);
1424 EXPECT_EQ(6, realloc_v[2]);
1425 }
1426
1427 // Test for ranged constructors using Instance as the element type and
1428 // SourceContainer as the source container type.
1429 template <typename Instance, typename SourceContainer, int inlined_capacity>
InstanceRangedConstructorTestForContainer()1430 void InstanceRangedConstructorTestForContainer() {
1431 InstanceTracker tracker;
1432 SourceContainer source_v = {Instance(0), Instance(1)};
1433 tracker.ResetCopiesMovesSwaps();
1434 absl::InlinedVector<Instance, inlined_capacity> v(source_v.begin(),
1435 source_v.end());
1436 EXPECT_EQ(2u, v.size());
1437 EXPECT_LT(1u, v.capacity());
1438 EXPECT_EQ(0, v[0].value());
1439 EXPECT_EQ(1, v[1].value());
1440 EXPECT_EQ(tracker.copies(), 2);
1441 EXPECT_EQ(tracker.moves(), 0);
1442 }
1443
1444 template <typename Instance, int inlined_capacity>
InstanceRangedConstructorTestWithCapacity()1445 void InstanceRangedConstructorTestWithCapacity() {
1446 // Test with const and non-const, random access and non-random-access sources.
1447 // TODO(bsamwel): Test with an input iterator source.
1448 {
1449 SCOPED_TRACE("std::list");
1450 InstanceRangedConstructorTestForContainer<Instance, std::list<Instance>,
1451 inlined_capacity>();
1452 {
1453 SCOPED_TRACE("const std::list");
1454 InstanceRangedConstructorTestForContainer<
1455 Instance, const std::list<Instance>, inlined_capacity>();
1456 }
1457 {
1458 SCOPED_TRACE("std::vector");
1459 InstanceRangedConstructorTestForContainer<Instance, std::vector<Instance>,
1460 inlined_capacity>();
1461 }
1462 {
1463 SCOPED_TRACE("const std::vector");
1464 InstanceRangedConstructorTestForContainer<
1465 Instance, const std::vector<Instance>, inlined_capacity>();
1466 }
1467 }
1468 }
1469
TYPED_TEST_P(InstanceTest,RangedConstructor)1470 TYPED_TEST_P(InstanceTest, RangedConstructor) {
1471 using Instance = TypeParam;
1472 SCOPED_TRACE("capacity=1");
1473 InstanceRangedConstructorTestWithCapacity<Instance, 1>();
1474 SCOPED_TRACE("capacity=2");
1475 InstanceRangedConstructorTestWithCapacity<Instance, 2>();
1476 }
1477
TEST(RangedConstructor,ElementsAreConstructed)1478 TEST(RangedConstructor, ElementsAreConstructed) {
1479 std::vector<std::string> source_v = {"cat", "dog"};
1480
1481 // Force expansion and re-allocation of v. Ensures that when the vector is
1482 // expanded that new elements are constructed.
1483 absl::InlinedVector<std::string, 1> v(source_v.begin(), source_v.end());
1484 EXPECT_EQ("cat", v[0]);
1485 EXPECT_EQ("dog", v[1]);
1486 }
1487
TEST(RangedAssign,SimpleType)1488 TEST(RangedAssign, SimpleType) {
1489 const size_t inlined_capacity = absl::InlinedVector<int, 3>().capacity();
1490
1491 // Test for all combinations of original sizes (empty and non-empty inline,
1492 // and out of line) and target sizes.
1493 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1494 SCOPED_TRACE(original_size);
1495 // Original contents are [12345, 12345, ...]
1496 std::vector<int> original_contents(original_size, 12345);
1497
1498 for (size_t target_size = 0; target_size <= 5; ++target_size) {
1499 SCOPED_TRACE(target_size);
1500
1501 // New contents are [3, 4, ...]
1502 std::vector<int> new_contents;
1503 for (size_t i = 0; i < target_size; ++i) {
1504 new_contents.push_back(static_cast<int>(i + 3));
1505 }
1506
1507 absl::InlinedVector<int, 3> v(original_contents.begin(),
1508 original_contents.end());
1509 v.assign(new_contents.begin(), new_contents.end());
1510
1511 EXPECT_EQ(new_contents.size(), v.size());
1512 EXPECT_LE(new_contents.size(), v.capacity());
1513 if (target_size <= inlined_capacity &&
1514 original_size <= inlined_capacity) {
1515 // Storage should stay inline when target size is small.
1516 EXPECT_EQ(v.capacity(), inlined_capacity);
1517 }
1518 EXPECT_THAT(v, ElementsAreArray(new_contents));
1519 }
1520 }
1521 }
1522
1523 // Returns true if lhs and rhs have the same value.
1524 template <typename Instance>
InstanceValuesEqual(const Instance & lhs,const Instance & rhs)1525 static bool InstanceValuesEqual(const Instance& lhs, const Instance& rhs) {
1526 return lhs.value() == rhs.value();
1527 }
1528
1529 // Test for ranged assign() using Instance as the element type and
1530 // SourceContainer as the source container type.
1531 template <typename Instance, typename SourceContainer>
InstanceRangedAssignTestForContainer()1532 void InstanceRangedAssignTestForContainer() {
1533 // Test for all combinations of original sizes (empty and non-empty inline,
1534 // and out of line) and target sizes.
1535 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1536 SCOPED_TRACE(original_size);
1537 // Original contents are [12345, 12345, ...]
1538 std::vector<Instance> original_contents(original_size, Instance(12345));
1539
1540 for (size_t target_size = 0; target_size <= 5; ++target_size) {
1541 SCOPED_TRACE(target_size);
1542
1543 // New contents are [3, 4, ...]
1544 // Generate data using a non-const container, because SourceContainer
1545 // itself may be const.
1546 // TODO(bsamwel): Test with an input iterator.
1547 std::vector<Instance> new_contents_in;
1548 for (size_t i = 0; i < target_size; ++i) {
1549 new_contents_in.push_back(Instance(static_cast<int>(i) + 3));
1550 }
1551 SourceContainer new_contents(new_contents_in.begin(),
1552 new_contents_in.end());
1553
1554 absl::InlinedVector<Instance, 3> v(original_contents.begin(),
1555 original_contents.end());
1556 v.assign(new_contents.begin(), new_contents.end());
1557
1558 EXPECT_EQ(new_contents.size(), v.size());
1559 EXPECT_LE(new_contents.size(), v.capacity());
1560 if (target_size <= 3 && original_size <= 3) {
1561 // Storage should stay inline when target size is small.
1562 EXPECT_EQ(3u, v.capacity());
1563 }
1564 EXPECT_TRUE(std::equal(v.begin(), v.end(), new_contents.begin(),
1565 InstanceValuesEqual<Instance>));
1566 }
1567 }
1568 }
1569
TYPED_TEST_P(InstanceTest,RangedAssign)1570 TYPED_TEST_P(InstanceTest, RangedAssign) {
1571 using Instance = TypeParam;
1572 // Test with const and non-const, random access and non-random-access sources.
1573 // TODO(bsamwel): Test with an input iterator source.
1574 SCOPED_TRACE("std::list");
1575 InstanceRangedAssignTestForContainer<Instance, std::list<Instance>>();
1576 SCOPED_TRACE("const std::list");
1577 InstanceRangedAssignTestForContainer<Instance, const std::list<Instance>>();
1578 SCOPED_TRACE("std::vector");
1579 InstanceRangedAssignTestForContainer<Instance, std::vector<Instance>>();
1580 SCOPED_TRACE("const std::vector");
1581 InstanceRangedAssignTestForContainer<Instance, const std::vector<Instance>>();
1582 }
1583
TEST(InitializerListConstructor,SimpleTypeWithInlineBacking)1584 TEST(InitializerListConstructor, SimpleTypeWithInlineBacking) {
1585 EXPECT_THAT((absl::InlinedVector<int, 4>{4, 5, 6}),
1586 AllOf(SizeIs(3u), CapacityIs(4u), ElementsAre(4, 5, 6)));
1587 }
1588
TEST(InitializerListConstructor,SimpleTypeWithReallocationRequired)1589 TEST(InitializerListConstructor, SimpleTypeWithReallocationRequired) {
1590 EXPECT_THAT((absl::InlinedVector<int, 2>{4, 5, 6}),
1591 AllOf(SizeIs(3u), CapacityIs(Gt(2u)), ElementsAre(4, 5, 6)));
1592 }
1593
TEST(InitializerListConstructor,DisparateTypesInList)1594 TEST(InitializerListConstructor, DisparateTypesInList) {
1595 EXPECT_THAT((absl::InlinedVector<int, 2>{-7, 8ULL}), ElementsAre(-7, 8));
1596
1597 EXPECT_THAT((absl::InlinedVector<std::string, 2>{"foo", std::string("bar")}),
1598 ElementsAre("foo", "bar"));
1599 }
1600
TEST(InitializerListConstructor,ComplexTypeWithInlineBacking)1601 TEST(InitializerListConstructor, ComplexTypeWithInlineBacking) {
1602 const size_t inlined_capacity =
1603 absl::InlinedVector<CopyableMovableInstance, 1>().capacity();
1604 EXPECT_THAT(
1605 (absl::InlinedVector<CopyableMovableInstance, 1>{
1606 CopyableMovableInstance(0)}),
1607 AllOf(SizeIs(1u), CapacityIs(inlined_capacity), ElementsAre(ValueIs(0))));
1608 }
1609
TEST(InitializerListConstructor,ComplexTypeWithReallocationRequired)1610 TEST(InitializerListConstructor, ComplexTypeWithReallocationRequired) {
1611 EXPECT_THAT((absl::InlinedVector<CopyableMovableInstance, 1>{
1612 CopyableMovableInstance(0), CopyableMovableInstance(1)}),
1613 AllOf(SizeIs(2u), CapacityIs(Gt(1u)),
1614 ElementsAre(ValueIs(0), ValueIs(1))));
1615 }
1616
TEST(InitializerListAssign,SimpleTypeFitsInlineBacking)1617 TEST(InitializerListAssign, SimpleTypeFitsInlineBacking) {
1618 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1619 SCOPED_TRACE(original_size);
1620
1621 absl::InlinedVector<int, 2> v1(original_size, 12345);
1622 const size_t original_capacity_v1 = v1.capacity();
1623 v1.assign({3});
1624 EXPECT_THAT(v1, AllOf(SizeIs(1u), CapacityIs(original_capacity_v1),
1625 ElementsAre(3)));
1626
1627 absl::InlinedVector<int, 2> v2(original_size, 12345);
1628 const size_t original_capacity_v2 = v2.capacity();
1629 v2 = {3};
1630 EXPECT_THAT(v2, AllOf(SizeIs(1u), CapacityIs(original_capacity_v2),
1631 ElementsAre(3)));
1632 }
1633 }
1634
TEST(InitializerListAssign,SimpleTypeDoesNotFitInlineBacking)1635 TEST(InitializerListAssign, SimpleTypeDoesNotFitInlineBacking) {
1636 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1637 SCOPED_TRACE(original_size);
1638 absl::InlinedVector<int, 2> v1(original_size, 12345);
1639 v1.assign({3, 4, 5});
1640 EXPECT_THAT(v1, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1641 EXPECT_LE(3u, v1.capacity());
1642
1643 absl::InlinedVector<int, 2> v2(original_size, 12345);
1644 v2 = {3, 4, 5};
1645 EXPECT_THAT(v2, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1646 EXPECT_LE(3u, v2.capacity());
1647 }
1648 }
1649
TEST(InitializerListAssign,DisparateTypesInList)1650 TEST(InitializerListAssign, DisparateTypesInList) {
1651 absl::InlinedVector<int, 2> v_int1;
1652 v_int1.assign({-7, 8ULL});
1653 EXPECT_THAT(v_int1, ElementsAre(-7, 8));
1654
1655 absl::InlinedVector<int, 2> v_int2;
1656 v_int2 = {-7, 8ULL};
1657 EXPECT_THAT(v_int2, ElementsAre(-7, 8));
1658
1659 absl::InlinedVector<std::string, 2> v_string1;
1660 v_string1.assign({"foo", std::string("bar")});
1661 EXPECT_THAT(v_string1, ElementsAre("foo", "bar"));
1662
1663 absl::InlinedVector<std::string, 2> v_string2;
1664 v_string2 = {"foo", std::string("bar")};
1665 EXPECT_THAT(v_string2, ElementsAre("foo", "bar"));
1666 }
1667
TYPED_TEST_P(InstanceTest,InitializerListAssign)1668 TYPED_TEST_P(InstanceTest, InitializerListAssign) {
1669 using Instance = TypeParam;
1670 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1671 SCOPED_TRACE(original_size);
1672 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1673 const size_t original_capacity = v.capacity();
1674 v.assign({Instance(3)});
1675 EXPECT_THAT(v, AllOf(SizeIs(1u), CapacityIs(original_capacity),
1676 ElementsAre(ValueIs(3))));
1677 }
1678 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1679 SCOPED_TRACE(original_size);
1680 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1681 v.assign({Instance(3), Instance(4), Instance(5)});
1682 EXPECT_THAT(
1683 v, AllOf(SizeIs(3u), ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5))));
1684 EXPECT_LE(3u, v.capacity());
1685 }
1686 }
1687
1688 REGISTER_TYPED_TEST_SUITE_P(InstanceTest, Swap, CountConstructorsDestructors,
1689 CountConstructorsDestructorsOnCopyConstruction,
1690 CountConstructorsDestructorsOnMoveConstruction,
1691 CountConstructorsDestructorsOnAssignment,
1692 CountConstructorsDestructorsOnMoveAssignment,
1693 CountElemAssignInlineBacking, RangedConstructor,
1694 RangedAssign, InitializerListAssign);
1695
1696 using InstanceTypes =
1697 ::testing::Types<CopyableOnlyInstance, CopyableMovableInstance>;
1698 INSTANTIATE_TYPED_TEST_SUITE_P(InstanceTestOnTypes, InstanceTest,
1699 InstanceTypes);
1700
TEST(DynamicVec,DynamicVecCompiles)1701 TEST(DynamicVec, DynamicVecCompiles) {
1702 DynamicVec v;
1703 (void)v;
1704 }
1705
TEST(DynamicVec,CreateNonEmptyDynamicVec)1706 TEST(DynamicVec, CreateNonEmptyDynamicVec) {
1707 DynamicVec v(1);
1708 EXPECT_EQ(v.size(), 1u);
1709 }
1710
TEST(DynamicVec,EmplaceBack)1711 TEST(DynamicVec, EmplaceBack) {
1712 DynamicVec v;
1713 v.emplace_back(Dynamic{});
1714 EXPECT_EQ(v.size(), 1u);
1715 }
1716
TEST(DynamicVec,EmplaceBackAfterHeapAllocation)1717 TEST(DynamicVec, EmplaceBackAfterHeapAllocation) {
1718 DynamicVec v;
1719 v.reserve(10);
1720 v.emplace_back(Dynamic{});
1721 EXPECT_EQ(v.size(), 1u);
1722 }
1723
TEST(DynamicVec,EmptyIteratorComparison)1724 TEST(DynamicVec, EmptyIteratorComparison) {
1725 DynamicVec v;
1726 EXPECT_EQ(v.begin(), v.end());
1727 EXPECT_EQ(v.cbegin(), v.cend());
1728 }
1729
TEST(AllocatorSupportTest,Constructors)1730 TEST(AllocatorSupportTest, Constructors) {
1731 using MyAlloc = CountingAllocator<int>;
1732 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1733 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1734 int64_t allocated = 0;
1735 MyAlloc alloc(&allocated);
1736 { AllocVec ABSL_ATTRIBUTE_UNUSED v; }
1737 { AllocVec ABSL_ATTRIBUTE_UNUSED v(alloc); }
1738 { AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); }
1739 { AllocVec ABSL_ATTRIBUTE_UNUSED v({1, 2, 3}, alloc); }
1740
1741 AllocVec v2;
1742 { AllocVec ABSL_ATTRIBUTE_UNUSED v(v2, alloc); }
1743 { AllocVec ABSL_ATTRIBUTE_UNUSED v(std::move(v2), alloc); }
1744 }
1745
TEST(AllocatorSupportTest,CountAllocations)1746 TEST(AllocatorSupportTest, CountAllocations) {
1747 using MyAlloc = CountingAllocator<int>;
1748 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1749 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1750 int64_t allocated = 0;
1751 MyAlloc alloc(&allocated);
1752 {
1753 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + 4, alloc);
1754 EXPECT_THAT(allocated, Eq(0));
1755 }
1756 EXPECT_THAT(allocated, Eq(0));
1757 {
1758 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc);
1759 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1760 }
1761 EXPECT_THAT(allocated, Eq(0));
1762 {
1763 AllocVec v(4, 1, alloc);
1764 EXPECT_THAT(allocated, Eq(0));
1765
1766 int64_t allocated2 = 0;
1767 MyAlloc alloc2(&allocated2);
1768 AllocVec v2(v, alloc2);
1769 EXPECT_THAT(allocated2, Eq(0));
1770
1771 int64_t allocated3 = 0;
1772 MyAlloc alloc3(&allocated3);
1773 AllocVec v3(std::move(v), alloc3);
1774 EXPECT_THAT(allocated3, Eq(0));
1775 }
1776 EXPECT_THAT(allocated, 0);
1777 {
1778 AllocVec v(8, 2, alloc);
1779 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1780
1781 int64_t allocated2 = 0;
1782 MyAlloc alloc2(&allocated2);
1783 AllocVec v2(v, alloc2);
1784 EXPECT_THAT(allocated2, Eq(static_cast<int64_t>(v2.size() * sizeof(int))));
1785
1786 int64_t allocated3 = 0;
1787 MyAlloc alloc3(&allocated3);
1788 AllocVec v3(std::move(v), alloc3);
1789 EXPECT_THAT(allocated3, Eq(static_cast<int64_t>(v3.size() * sizeof(int))));
1790 }
1791 EXPECT_EQ(allocated, 0);
1792 {
1793 // Test shrink_to_fit deallocations.
1794 AllocVec v(8, 2, alloc);
1795 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1796 v.resize(5);
1797 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1798 v.shrink_to_fit();
1799 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1800 v.resize(4);
1801 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1802 v.shrink_to_fit();
1803 EXPECT_EQ(allocated, 0);
1804 }
1805 }
1806
TEST(AllocatorSupportTest,SwapBothAllocated)1807 TEST(AllocatorSupportTest, SwapBothAllocated) {
1808 using MyAlloc = CountingAllocator<int>;
1809 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1810 int64_t allocated1 = 0;
1811 int64_t allocated2 = 0;
1812 {
1813 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1814 const int ia2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1815 MyAlloc a1(&allocated1);
1816 MyAlloc a2(&allocated2);
1817 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1818 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1819 EXPECT_LT(v1.capacity(), v2.capacity());
1820 EXPECT_THAT(allocated1,
1821 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1822 EXPECT_THAT(allocated2,
1823 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1824 v1.swap(v2);
1825 EXPECT_THAT(v1, ElementsAreArray(ia2));
1826 EXPECT_THAT(v2, ElementsAreArray(ia1));
1827 EXPECT_THAT(allocated1,
1828 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1829 EXPECT_THAT(allocated2,
1830 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1831 }
1832 EXPECT_THAT(allocated1, 0);
1833 EXPECT_THAT(allocated2, 0);
1834 }
1835
TEST(AllocatorSupportTest,SwapOneAllocated)1836 TEST(AllocatorSupportTest, SwapOneAllocated) {
1837 using MyAlloc = CountingAllocator<int>;
1838 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1839 int64_t allocated1 = 0;
1840 int64_t allocated2 = 0;
1841 {
1842 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1843 const int ia2[] = {0, 1, 2, 3};
1844 MyAlloc a1(&allocated1);
1845 MyAlloc a2(&allocated2);
1846 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1847 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1848 EXPECT_THAT(allocated1,
1849 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1850 EXPECT_THAT(allocated2, Eq(0));
1851 v1.swap(v2);
1852 EXPECT_THAT(v1, ElementsAreArray(ia2));
1853 EXPECT_THAT(v2, ElementsAreArray(ia1));
1854 EXPECT_THAT(allocated1,
1855 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1856 EXPECT_THAT(allocated2, Eq(0));
1857 EXPECT_TRUE(v2.get_allocator() == a1);
1858 EXPECT_TRUE(v1.get_allocator() == a2);
1859 }
1860 EXPECT_THAT(allocated1, 0);
1861 EXPECT_THAT(allocated2, 0);
1862 }
1863
TEST(AllocatorSupportTest,ScopedAllocatorWorksInlined)1864 TEST(AllocatorSupportTest, ScopedAllocatorWorksInlined) {
1865 using StdVector = std::vector<int, CountingAllocator<int>>;
1866 using Alloc = CountingAllocator<StdVector>;
1867 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1868 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1869
1870 int64_t total_allocated_byte_count = 0;
1871
1872 AllocVec inlined_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1873
1874 // Called only once to remain inlined
1875 inlined_case.emplace_back();
1876
1877 int64_t absl_responsible_for_count = total_allocated_byte_count;
1878
1879 // MSVC's allocator preemptively allocates in debug mode
1880 #if !defined(_MSC_VER)
1881 EXPECT_EQ(absl_responsible_for_count, 0);
1882 #endif // !defined(_MSC_VER)
1883
1884 inlined_case[0].emplace_back();
1885 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1886
1887 inlined_case.clear();
1888 inlined_case.shrink_to_fit();
1889 EXPECT_EQ(total_allocated_byte_count, 0);
1890 }
1891
TEST(AllocatorSupportTest,ScopedAllocatorWorksAllocated)1892 TEST(AllocatorSupportTest, ScopedAllocatorWorksAllocated) {
1893 using StdVector = std::vector<int, CountingAllocator<int>>;
1894 using Alloc = CountingAllocator<StdVector>;
1895 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1896 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1897
1898 int64_t total_allocated_byte_count = 0;
1899
1900 AllocVec allocated_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1901
1902 // Called twice to force into being allocated
1903 allocated_case.emplace_back();
1904 allocated_case.emplace_back();
1905
1906 int64_t absl_responsible_for_count = total_allocated_byte_count;
1907 EXPECT_GT(absl_responsible_for_count, 0);
1908
1909 allocated_case[1].emplace_back();
1910 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1911
1912 allocated_case.clear();
1913 allocated_case.shrink_to_fit();
1914 EXPECT_EQ(total_allocated_byte_count, 0);
1915 }
1916
TEST(AllocatorSupportTest,SizeAllocConstructor)1917 TEST(AllocatorSupportTest, SizeAllocConstructor) {
1918 constexpr size_t inlined_size = 4;
1919 using Alloc = CountingAllocator<int>;
1920 using AllocVec = absl::InlinedVector<int, inlined_size, Alloc>;
1921
1922 {
1923 auto len = inlined_size / 2;
1924 int64_t allocated = 0;
1925 auto v = AllocVec(len, Alloc(&allocated));
1926
1927 // Inline storage used; allocator should not be invoked
1928 EXPECT_THAT(allocated, Eq(0));
1929 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1930 }
1931
1932 {
1933 auto len = inlined_size * 2;
1934 int64_t allocated = 0;
1935 auto v = AllocVec(len, Alloc(&allocated));
1936
1937 // Out of line storage used; allocation of 8 elements expected
1938 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(len * sizeof(int))));
1939 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1940 }
1941 }
1942
TEST(InlinedVectorTest,MinimumAllocatorCompilesUsingTraits)1943 TEST(InlinedVectorTest, MinimumAllocatorCompilesUsingTraits) {
1944 using T = int;
1945 using A = std::allocator<T>;
1946 using ATraits = absl::allocator_traits<A>;
1947
1948 struct MinimumAllocator {
1949 using value_type = T;
1950
1951 value_type* allocate(size_t n) {
1952 A a;
1953 return ATraits::allocate(a, n);
1954 }
1955
1956 void deallocate(value_type* p, size_t n) {
1957 A a;
1958 ATraits::deallocate(a, p, n);
1959 }
1960 };
1961
1962 absl::InlinedVector<T, 1, MinimumAllocator> vec;
1963 vec.emplace_back();
1964 vec.resize(0);
1965 }
1966
TEST(InlinedVectorTest,AbslHashValueWorks)1967 TEST(InlinedVectorTest, AbslHashValueWorks) {
1968 using V = absl::InlinedVector<int, 4>;
1969 std::vector<V> cases;
1970
1971 // Generate a variety of vectors some of these are small enough for the inline
1972 // space but are stored out of line.
1973 for (size_t i = 0; i < 10; ++i) {
1974 V v;
1975 for (int j = 0; j < static_cast<int>(i); ++j) {
1976 v.push_back(j);
1977 }
1978 cases.push_back(v);
1979 v.resize(i % 4);
1980 cases.push_back(v);
1981 }
1982
1983 EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(cases));
1984 }
1985
1986 class MoveConstructibleOnlyInstance
1987 : public absl::test_internal::BaseCountedInstance {
1988 public:
MoveConstructibleOnlyInstance(int x)1989 explicit MoveConstructibleOnlyInstance(int x) : BaseCountedInstance(x) {}
1990 MoveConstructibleOnlyInstance(MoveConstructibleOnlyInstance&& other) =
1991 default;
1992 MoveConstructibleOnlyInstance& operator=(
1993 MoveConstructibleOnlyInstance&& other) = delete;
1994 };
1995
1996 MATCHER(HasValue, "") {
1997 return ::testing::get<0>(arg).value() == ::testing::get<1>(arg);
1998 }
1999
TEST(NonAssignableMoveAssignmentTest,AllocatedToInline)2000 TEST(NonAssignableMoveAssignmentTest, AllocatedToInline) {
2001 using X = MoveConstructibleOnlyInstance;
2002 InstanceTracker tracker;
2003 absl::InlinedVector<X, 2> inlined;
2004 inlined.emplace_back(1);
2005 absl::InlinedVector<X, 2> allocated;
2006 allocated.emplace_back(1);
2007 allocated.emplace_back(2);
2008 allocated.emplace_back(3);
2009 tracker.ResetCopiesMovesSwaps();
2010
2011 inlined = std::move(allocated);
2012 // passed ownership of the allocated storage
2013 EXPECT_EQ(tracker.moves(), 0);
2014 EXPECT_EQ(tracker.live_instances(), 3);
2015
2016 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
2017 }
2018
TEST(NonAssignableMoveAssignmentTest,InlineToAllocated)2019 TEST(NonAssignableMoveAssignmentTest, InlineToAllocated) {
2020 using X = MoveConstructibleOnlyInstance;
2021 InstanceTracker tracker;
2022 absl::InlinedVector<X, 2> inlined;
2023 inlined.emplace_back(1);
2024 absl::InlinedVector<X, 2> allocated;
2025 allocated.emplace_back(1);
2026 allocated.emplace_back(2);
2027 allocated.emplace_back(3);
2028 tracker.ResetCopiesMovesSwaps();
2029
2030 allocated = std::move(inlined);
2031 // Moved elements
2032 EXPECT_EQ(tracker.moves(), 1);
2033 EXPECT_EQ(tracker.live_instances(), 1);
2034
2035 EXPECT_THAT(allocated, Pointwise(HasValue(), {1}));
2036 }
2037
TEST(NonAssignableMoveAssignmentTest,InlineToInline)2038 TEST(NonAssignableMoveAssignmentTest, InlineToInline) {
2039 using X = MoveConstructibleOnlyInstance;
2040 InstanceTracker tracker;
2041 absl::InlinedVector<X, 2> inlined_a;
2042 inlined_a.emplace_back(1);
2043 absl::InlinedVector<X, 2> inlined_b;
2044 inlined_b.emplace_back(1);
2045 tracker.ResetCopiesMovesSwaps();
2046
2047 inlined_a = std::move(inlined_b);
2048 // Moved elements
2049 EXPECT_EQ(tracker.moves(), 1);
2050 EXPECT_EQ(tracker.live_instances(), 1);
2051
2052 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {1}));
2053 }
2054
TEST(NonAssignableMoveAssignmentTest,AllocatedToAllocated)2055 TEST(NonAssignableMoveAssignmentTest, AllocatedToAllocated) {
2056 using X = MoveConstructibleOnlyInstance;
2057 InstanceTracker tracker;
2058 absl::InlinedVector<X, 2> allocated_a;
2059 allocated_a.emplace_back(1);
2060 allocated_a.emplace_back(2);
2061 allocated_a.emplace_back(3);
2062 absl::InlinedVector<X, 2> allocated_b;
2063 allocated_b.emplace_back(4);
2064 allocated_b.emplace_back(5);
2065 allocated_b.emplace_back(6);
2066 allocated_b.emplace_back(7);
2067 tracker.ResetCopiesMovesSwaps();
2068
2069 allocated_a = std::move(allocated_b);
2070 // passed ownership of the allocated storage
2071 EXPECT_EQ(tracker.moves(), 0);
2072 EXPECT_EQ(tracker.live_instances(), 4);
2073
2074 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
2075 }
2076
TEST(NonAssignableMoveAssignmentTest,AssignThis)2077 TEST(NonAssignableMoveAssignmentTest, AssignThis) {
2078 using X = MoveConstructibleOnlyInstance;
2079 InstanceTracker tracker;
2080 absl::InlinedVector<X, 2> v;
2081 v.emplace_back(1);
2082 v.emplace_back(2);
2083 v.emplace_back(3);
2084
2085 tracker.ResetCopiesMovesSwaps();
2086
2087 // Obfuscated in order to pass -Wself-move.
2088 v = std::move(*std::addressof(v));
2089 // nothing happens
2090 EXPECT_EQ(tracker.moves(), 0);
2091 EXPECT_EQ(tracker.live_instances(), 3);
2092
2093 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
2094 }
2095
2096 class NonSwappableInstance : public absl::test_internal::BaseCountedInstance {
2097 public:
NonSwappableInstance(int x)2098 explicit NonSwappableInstance(int x) : BaseCountedInstance(x) {}
2099 NonSwappableInstance(const NonSwappableInstance& other) = default;
2100 NonSwappableInstance& operator=(const NonSwappableInstance& other) = default;
2101 NonSwappableInstance(NonSwappableInstance&& other) = default;
2102 NonSwappableInstance& operator=(NonSwappableInstance&& other) = default;
2103 };
2104
2105 void swap(NonSwappableInstance&, NonSwappableInstance&) = delete;
2106
TEST(NonSwappableSwapTest,InlineAndAllocatedTransferStorageAndMove)2107 TEST(NonSwappableSwapTest, InlineAndAllocatedTransferStorageAndMove) {
2108 using X = NonSwappableInstance;
2109 InstanceTracker tracker;
2110 absl::InlinedVector<X, 2> inlined;
2111 inlined.emplace_back(1);
2112 absl::InlinedVector<X, 2> allocated;
2113 allocated.emplace_back(1);
2114 allocated.emplace_back(2);
2115 allocated.emplace_back(3);
2116 tracker.ResetCopiesMovesSwaps();
2117
2118 inlined.swap(allocated);
2119 EXPECT_EQ(tracker.moves(), 1);
2120 EXPECT_EQ(tracker.live_instances(), 4);
2121
2122 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
2123 }
2124
TEST(NonSwappableSwapTest,InlineAndInlineMoveIndividualElements)2125 TEST(NonSwappableSwapTest, InlineAndInlineMoveIndividualElements) {
2126 using X = NonSwappableInstance;
2127 InstanceTracker tracker;
2128 absl::InlinedVector<X, 2> inlined_a;
2129 inlined_a.emplace_back(1);
2130 absl::InlinedVector<X, 2> inlined_b;
2131 inlined_b.emplace_back(2);
2132 tracker.ResetCopiesMovesSwaps();
2133
2134 inlined_a.swap(inlined_b);
2135 EXPECT_EQ(tracker.moves(), 3);
2136 EXPECT_EQ(tracker.live_instances(), 2);
2137
2138 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {2}));
2139 EXPECT_THAT(inlined_b, Pointwise(HasValue(), {1}));
2140 }
2141
TEST(NonSwappableSwapTest,AllocatedAndAllocatedOnlyTransferStorage)2142 TEST(NonSwappableSwapTest, AllocatedAndAllocatedOnlyTransferStorage) {
2143 using X = NonSwappableInstance;
2144 InstanceTracker tracker;
2145 absl::InlinedVector<X, 2> allocated_a;
2146 allocated_a.emplace_back(1);
2147 allocated_a.emplace_back(2);
2148 allocated_a.emplace_back(3);
2149 absl::InlinedVector<X, 2> allocated_b;
2150 allocated_b.emplace_back(4);
2151 allocated_b.emplace_back(5);
2152 allocated_b.emplace_back(6);
2153 allocated_b.emplace_back(7);
2154 tracker.ResetCopiesMovesSwaps();
2155
2156 allocated_a.swap(allocated_b);
2157 EXPECT_EQ(tracker.moves(), 0);
2158 EXPECT_EQ(tracker.live_instances(), 7);
2159
2160 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
2161 EXPECT_THAT(allocated_b, Pointwise(HasValue(), {1, 2, 3}));
2162 }
2163
TEST(NonSwappableSwapTest,SwapThis)2164 TEST(NonSwappableSwapTest, SwapThis) {
2165 using X = NonSwappableInstance;
2166 InstanceTracker tracker;
2167 absl::InlinedVector<X, 2> v;
2168 v.emplace_back(1);
2169 v.emplace_back(2);
2170 v.emplace_back(3);
2171
2172 tracker.ResetCopiesMovesSwaps();
2173
2174 v.swap(v);
2175 EXPECT_EQ(tracker.moves(), 0);
2176 EXPECT_EQ(tracker.live_instances(), 3);
2177
2178 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
2179 }
2180
2181 template <size_t N>
2182 using CharVec = absl::InlinedVector<char, N>;
2183
2184 // Warning: This struct "simulates" the type `InlinedVector::Storage::Allocated`
2185 // to make reasonable expectations for inlined storage capacity optimization. If
2186 // implementation changes `Allocated`, then `MySpan` and tests that use it need
2187 // to be updated accordingly.
2188 template <typename T>
2189 struct MySpan {
2190 T* data;
2191 size_t size;
2192 };
2193
TEST(StorageTest,InlinedCapacityAutoIncrease)2194 TEST(StorageTest, InlinedCapacityAutoIncrease) {
2195 // The requested capacity is auto increased to `sizeof(MySpan<char>)`.
2196 EXPECT_GT(CharVec<1>().capacity(), 1);
2197 EXPECT_EQ(CharVec<1>().capacity(), sizeof(MySpan<char>));
2198 EXPECT_EQ(CharVec<1>().capacity(), CharVec<2>().capacity());
2199 EXPECT_EQ(sizeof(CharVec<1>), sizeof(CharVec<2>));
2200
2201 // The requested capacity is auto increased to
2202 // `sizeof(MySpan<int>) / sizeof(int)`.
2203 EXPECT_GT((absl::InlinedVector<int, 1>().capacity()), 1);
2204 EXPECT_EQ((absl::InlinedVector<int, 1>().capacity()),
2205 sizeof(MySpan<int>) / sizeof(int));
2206 }
2207
2208 } // anonymous namespace
2209