inlined_vector_test.cc (71729B)
1 // Copyright 2019 The Abseil Authors. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // https://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 #include "absl/container/inlined_vector.h" 16 17 #include <algorithm> 18 #include <cstddef> 19 #include <forward_list> 20 #include <iterator> 21 #include <list> 22 #include <memory> 23 #include <scoped_allocator> 24 #include <sstream> 25 #include <stdexcept> 26 #include <string> 27 #include <utility> 28 #include <vector> 29 30 #include "gmock/gmock.h" 31 #include "gtest/gtest.h" 32 #include "absl/base/attributes.h" 33 #include "absl/base/internal/exception_testing.h" 34 #include "absl/base/internal/iterator_traits_test_helper.h" 35 #include "absl/base/macros.h" 36 #include "absl/base/options.h" 37 #include "absl/container/internal/test_allocator.h" 38 #include "absl/container/internal/test_instance_tracker.h" 39 #include "absl/hash/hash_testing.h" 40 #include "absl/log/check.h" 41 #include "absl/memory/memory.h" 42 #include "absl/strings/str_cat.h" 43 44 namespace { 45 46 using absl::container_internal::CountingAllocator; 47 using absl::test_internal::CopyableMovableInstance; 48 using absl::test_internal::CopyableOnlyInstance; 49 using absl::test_internal::InstanceTracker; 50 using testing::AllOf; 51 using testing::Each; 52 using testing::ElementsAre; 53 using testing::ElementsAreArray; 54 using testing::Eq; 55 using testing::Gt; 56 using testing::Pointee; 57 using testing::Pointwise; 58 using testing::PrintToString; 59 using testing::SizeIs; 60 61 using IntVec = absl::InlinedVector<int, 8>; 62 63 MATCHER_P(CapacityIs, n, "") { 64 return testing::ExplainMatchResult(n, arg.capacity(), result_listener); 65 } 66 67 MATCHER_P(ValueIs, e, "") { 68 return testing::ExplainMatchResult(e, arg.value(), result_listener); 69 } 70 71 // TODO(bsamwel): Add support for movable-only types. 72 73 // Test fixture for typed tests on BaseCountedInstance derived classes, see 74 // test_instance_tracker.h. 75 template <typename T> 76 class InstanceTest : public ::testing::Test {}; 77 TYPED_TEST_SUITE_P(InstanceTest); 78 79 // A simple reference counted class to make sure that the proper elements are 80 // destroyed in the erase(begin, end) test. 81 class RefCounted { 82 public: 83 RefCounted(int value, int* count) : value_(value), count_(count) { Ref(); } 84 85 RefCounted(const RefCounted& v) : value_(v.value_), count_(v.count_) { 86 Ref(); 87 } 88 89 ~RefCounted() { 90 Unref(); 91 count_ = nullptr; 92 } 93 94 friend void swap(RefCounted& a, RefCounted& b) { 95 using std::swap; 96 swap(a.value_, b.value_); 97 swap(a.count_, b.count_); 98 } 99 100 RefCounted& operator=(RefCounted v) { 101 using std::swap; 102 swap(*this, v); 103 return *this; 104 } 105 106 void Ref() const { 107 CHECK_NE(count_, nullptr); 108 ++(*count_); 109 } 110 111 void Unref() const { 112 --(*count_); 113 CHECK_GE(*count_, 0); 114 } 115 116 int value_; 117 int* count_; 118 }; 119 120 using RefCountedVec = absl::InlinedVector<RefCounted, 8>; 121 122 // A class with a vtable pointer 123 class Dynamic { 124 public: 125 virtual ~Dynamic() {} 126 }; 127 128 using DynamicVec = absl::InlinedVector<Dynamic, 8>; 129 130 // Append 0..len-1 to *v 131 template <typename Container> 132 static void Fill(Container* v, size_t len, int offset = 0) { 133 for (size_t i = 0; i < len; i++) { 134 v->push_back(static_cast<int>(i) + offset); 135 } 136 } 137 138 static IntVec Fill(size_t len, int offset = 0) { 139 IntVec v; 140 Fill(&v, len, offset); 141 return v; 142 } 143 144 TEST(IntVec, SimpleOps) { 145 for (size_t len = 0; len < 20; len++) { 146 IntVec v; 147 const IntVec& cv = v; // const alias 148 149 Fill(&v, len); 150 EXPECT_EQ(len, v.size()); 151 EXPECT_LE(len, v.capacity()); 152 153 for (size_t i = 0; i < len; i++) { 154 EXPECT_EQ(static_cast<int>(i), v[i]); 155 EXPECT_EQ(static_cast<int>(i), v.at(i)); 156 } 157 EXPECT_EQ(v.begin(), v.data()); 158 EXPECT_EQ(cv.begin(), cv.data()); 159 160 size_t counter = 0; 161 for (IntVec::iterator iter = v.begin(); iter != v.end(); ++iter) { 162 EXPECT_EQ(static_cast<int>(counter), *iter); 163 counter++; 164 } 165 EXPECT_EQ(counter, len); 166 167 counter = 0; 168 for (IntVec::const_iterator iter = v.begin(); iter != v.end(); ++iter) { 169 EXPECT_EQ(static_cast<int>(counter), *iter); 170 counter++; 171 } 172 EXPECT_EQ(counter, len); 173 174 counter = 0; 175 for (IntVec::const_iterator iter = v.cbegin(); iter != v.cend(); ++iter) { 176 EXPECT_EQ(static_cast<int>(counter), *iter); 177 counter++; 178 } 179 EXPECT_EQ(counter, len); 180 181 if (len > 0) { 182 EXPECT_EQ(0, v.front()); 183 EXPECT_EQ(static_cast<int>(len - 1), v.back()); 184 v.pop_back(); 185 EXPECT_EQ(len - 1, v.size()); 186 for (size_t i = 0; i < v.size(); ++i) { 187 EXPECT_EQ(static_cast<int>(i), v[i]); 188 EXPECT_EQ(static_cast<int>(i), v.at(i)); 189 } 190 } 191 } 192 } 193 194 TEST(IntVec, PopBackNoOverflow) { 195 IntVec v = {1}; 196 v.pop_back(); 197 EXPECT_EQ(v.size(), 0u); 198 } 199 200 TEST(IntVec, AtThrows) { 201 IntVec v = {1, 2, 3}; 202 EXPECT_EQ(v.at(2), 3); 203 ABSL_BASE_INTERNAL_EXPECT_FAIL(v.at(3), std::out_of_range, 204 "failed bounds check"); 205 } 206 207 TEST(IntVec, ReverseIterator) { 208 for (size_t len = 0; len < 20; len++) { 209 IntVec v; 210 Fill(&v, len); 211 212 size_t counter = len; 213 for (IntVec::reverse_iterator iter = v.rbegin(); iter != v.rend(); ++iter) { 214 counter--; 215 EXPECT_EQ(static_cast<int>(counter), *iter); 216 } 217 EXPECT_EQ(counter, 0u); 218 219 counter = len; 220 for (IntVec::const_reverse_iterator iter = v.rbegin(); iter != v.rend(); 221 ++iter) { 222 counter--; 223 EXPECT_EQ(static_cast<int>(counter), *iter); 224 } 225 EXPECT_EQ(counter, 0u); 226 227 counter = len; 228 for (IntVec::const_reverse_iterator iter = v.crbegin(); iter != v.crend(); 229 ++iter) { 230 counter--; 231 EXPECT_EQ(static_cast<int>(counter), *iter); 232 } 233 EXPECT_EQ(counter, 0u); 234 } 235 } 236 237 TEST(IntVec, Erase) { 238 for (size_t len = 1; len < 20; len++) { 239 for (size_t i = 0; i < len; ++i) { 240 IntVec v; 241 Fill(&v, len); 242 v.erase(v.begin() + i); 243 EXPECT_EQ(len - 1, v.size()); 244 for (size_t j = 0; j < i; ++j) { 245 EXPECT_EQ(static_cast<int>(j), v[j]); 246 } 247 for (size_t j = i; j < len - 1; ++j) { 248 EXPECT_EQ(static_cast<int>(j + 1), v[j]); 249 } 250 } 251 } 252 } 253 254 TEST(IntVec, Hardened) { 255 IntVec v; 256 Fill(&v, 10); 257 EXPECT_EQ(v[9], 9); 258 #if !defined(NDEBUG) || ABSL_OPTION_HARDENED 259 EXPECT_DEATH_IF_SUPPORTED(v[10], ""); 260 EXPECT_DEATH_IF_SUPPORTED(v[static_cast<size_t>(-1)], ""); 261 EXPECT_DEATH_IF_SUPPORTED(v.resize(v.max_size() + 1), ""); 262 #endif 263 } 264 265 // Move construction of a container of unique pointers should work fine, with no 266 // leaks, despite the fact that unique pointers are trivially relocatable but 267 // not trivially destructible. 268 TEST(UniquePtr, MoveConstruct) { 269 for (size_t size = 0; size < 16; ++size) { 270 SCOPED_TRACE(size); 271 272 absl::InlinedVector<std::unique_ptr<size_t>, 2> a; 273 for (size_t i = 0; i < size; ++i) { 274 a.push_back(std::make_unique<size_t>(i)); 275 } 276 277 absl::InlinedVector<std::unique_ptr<size_t>, 2> b(std::move(a)); 278 279 ASSERT_THAT(b, SizeIs(size)); 280 for (size_t i = 0; i < size; ++i) { 281 ASSERT_THAT(b[i], Pointee(i)); 282 } 283 } 284 } 285 286 // Move assignment of a container of unique pointers should work fine, with no 287 // leaks, despite the fact that unique pointers are trivially relocatable but 288 // not trivially destructible. 289 TEST(UniquePtr, MoveAssign) { 290 for (size_t size = 0; size < 16; ++size) { 291 SCOPED_TRACE(size); 292 293 absl::InlinedVector<std::unique_ptr<size_t>, 2> a; 294 for (size_t i = 0; i < size; ++i) { 295 a.push_back(std::make_unique<size_t>(i)); 296 } 297 298 absl::InlinedVector<std::unique_ptr<size_t>, 2> b; 299 b = std::move(a); 300 301 ASSERT_THAT(b, SizeIs(size)); 302 for (size_t i = 0; i < size; ++i) { 303 ASSERT_THAT(b[i], Pointee(i)); 304 } 305 } 306 } 307 308 // Swapping containers of unique pointers should work fine, with no 309 // leaks, despite the fact that unique pointers are trivially relocatable but 310 // not trivially destructible. 311 // TODO(absl-team): Using unique_ptr here is technically correct, but 312 // a trivially relocatable struct would be less semantically confusing. 313 TEST(UniquePtr, Swap) { 314 for (size_t size1 = 0; size1 < 5; ++size1) { 315 for (size_t size2 = 0; size2 < 5; ++size2) { 316 absl::InlinedVector<std::unique_ptr<size_t>, 2> a; 317 absl::InlinedVector<std::unique_ptr<size_t>, 2> b; 318 for (size_t i = 0; i < size1; ++i) { 319 a.push_back(std::make_unique<size_t>(i + 10)); 320 } 321 for (size_t i = 0; i < size2; ++i) { 322 b.push_back(std::make_unique<size_t>(i + 20)); 323 } 324 a.swap(b); 325 ASSERT_THAT(a, SizeIs(size2)); 326 ASSERT_THAT(b, SizeIs(size1)); 327 for (size_t i = 0; i < a.size(); ++i) { 328 ASSERT_THAT(a[i], Pointee(i + 20)); 329 } 330 for (size_t i = 0; i < b.size(); ++i) { 331 ASSERT_THAT(b[i], Pointee(i + 10)); 332 } 333 } 334 } 335 } 336 337 // Erasing from a container of unique pointers should work fine, with no 338 // leaks, despite the fact that unique pointers are trivially relocatable but 339 // not trivially destructible. 340 // TODO(absl-team): Using unique_ptr here is technically correct, but 341 // a trivially relocatable struct would be less semantically confusing. 342 TEST(UniquePtr, EraseSingle) { 343 for (size_t size = 4; size < 16; ++size) { 344 absl::InlinedVector<std::unique_ptr<size_t>, 8> a; 345 for (size_t i = 0; i < size; ++i) { 346 a.push_back(std::make_unique<size_t>(i)); 347 } 348 a.erase(a.begin()); 349 ASSERT_THAT(a, SizeIs(size - 1)); 350 for (size_t i = 0; i < size - 1; ++i) { 351 ASSERT_THAT(a[i], Pointee(i + 1)); 352 } 353 a.erase(a.begin() + 2); 354 ASSERT_THAT(a, SizeIs(size - 2)); 355 ASSERT_THAT(a[0], Pointee(1)); 356 ASSERT_THAT(a[1], Pointee(2)); 357 for (size_t i = 2; i < size - 2; ++i) { 358 ASSERT_THAT(a[i], Pointee(i + 2)); 359 } 360 } 361 } 362 363 // Erasing from a container of unique pointers should work fine, with no 364 // leaks, despite the fact that unique pointers are trivially relocatable but 365 // not trivially destructible. 366 // TODO(absl-team): Using unique_ptr here is technically correct, but 367 // a trivially relocatable struct would be less semantically confusing. 368 TEST(UniquePtr, EraseMulti) { 369 for (size_t size = 5; size < 16; ++size) { 370 absl::InlinedVector<std::unique_ptr<size_t>, 8> a; 371 for (size_t i = 0; i < size; ++i) { 372 a.push_back(std::make_unique<size_t>(i)); 373 } 374 a.erase(a.begin(), a.begin() + 2); 375 ASSERT_THAT(a, SizeIs(size - 2)); 376 for (size_t i = 0; i < size - 2; ++i) { 377 ASSERT_THAT(a[i], Pointee(i + 2)); 378 } 379 a.erase(a.begin() + 1, a.begin() + 3); 380 ASSERT_THAT(a, SizeIs(size - 4)); 381 ASSERT_THAT(a[0], Pointee(2)); 382 for (size_t i = 1; i < size - 4; ++i) { 383 ASSERT_THAT(a[i], Pointee(i + 4)); 384 } 385 } 386 } 387 388 // At the end of this test loop, the elements between [erase_begin, erase_end) 389 // should have reference counts == 0, and all others elements should have 390 // reference counts == 1. 391 TEST(RefCountedVec, EraseBeginEnd) { 392 for (size_t len = 1; len < 20; ++len) { 393 for (size_t erase_begin = 0; erase_begin < len; ++erase_begin) { 394 for (size_t erase_end = erase_begin; erase_end <= len; ++erase_end) { 395 std::vector<int> counts(len, 0); 396 RefCountedVec v; 397 for (size_t i = 0; i < len; ++i) { 398 v.push_back(RefCounted(static_cast<int>(i), &counts[i])); 399 } 400 401 size_t erase_len = erase_end - erase_begin; 402 403 v.erase(v.begin() + erase_begin, v.begin() + erase_end); 404 405 EXPECT_EQ(len - erase_len, v.size()); 406 407 // Check the elements before the first element erased. 408 for (size_t i = 0; i < erase_begin; ++i) { 409 EXPECT_EQ(static_cast<int>(i), v[i].value_); 410 } 411 412 // Check the elements after the first element erased. 413 for (size_t i = erase_begin; i < v.size(); ++i) { 414 EXPECT_EQ(static_cast<int>(i + erase_len), v[i].value_); 415 } 416 417 // Check that the elements at the beginning are preserved. 418 for (size_t i = 0; i < erase_begin; ++i) { 419 EXPECT_EQ(1, counts[i]); 420 } 421 422 // Check that the erased elements are destroyed 423 for (size_t i = erase_begin; i < erase_end; ++i) { 424 EXPECT_EQ(0, counts[i]); 425 } 426 427 // Check that the elements at the end are preserved. 428 for (size_t i = erase_end; i < len; ++i) { 429 EXPECT_EQ(1, counts[i]); 430 } 431 } 432 } 433 } 434 } 435 436 struct NoDefaultCtor { 437 explicit NoDefaultCtor(int) {} 438 }; 439 struct NoCopy { 440 NoCopy() {} 441 NoCopy(const NoCopy&) = delete; 442 }; 443 struct NoAssign { 444 NoAssign() {} 445 NoAssign& operator=(const NoAssign&) = delete; 446 }; 447 struct MoveOnly { 448 MoveOnly() {} 449 MoveOnly(MoveOnly&&) = default; 450 MoveOnly& operator=(MoveOnly&&) = default; 451 }; 452 TEST(InlinedVectorTest, NoDefaultCtor) { 453 absl::InlinedVector<NoDefaultCtor, 1> v(10, NoDefaultCtor(2)); 454 (void)v; 455 } 456 TEST(InlinedVectorTest, NoCopy) { 457 absl::InlinedVector<NoCopy, 1> v(10); 458 (void)v; 459 } 460 TEST(InlinedVectorTest, NoAssign) { 461 absl::InlinedVector<NoAssign, 1> v(10); 462 (void)v; 463 } 464 TEST(InlinedVectorTest, MoveOnly) { 465 absl::InlinedVector<MoveOnly, 2> v; 466 v.push_back(MoveOnly{}); 467 v.push_back(MoveOnly{}); 468 v.push_back(MoveOnly{}); 469 v.erase(v.begin()); 470 v.push_back(MoveOnly{}); 471 v.erase(v.begin(), v.begin() + 1); 472 v.insert(v.begin(), MoveOnly{}); 473 v.emplace(v.begin()); 474 v.emplace(v.begin(), MoveOnly{}); 475 } 476 TEST(InlinedVectorTest, Noexcept) { 477 EXPECT_TRUE(std::is_nothrow_move_constructible<IntVec>::value); 478 EXPECT_TRUE((std::is_nothrow_move_constructible< 479 absl::InlinedVector<MoveOnly, 2>>::value)); 480 481 struct MoveCanThrow { 482 MoveCanThrow(MoveCanThrow&&) {} 483 }; 484 EXPECT_EQ(absl::default_allocator_is_nothrow::value, 485 (std::is_nothrow_move_constructible< 486 absl::InlinedVector<MoveCanThrow, 2>>::value)); 487 } 488 489 TEST(InlinedVectorTest, EmplaceBack) { 490 absl::InlinedVector<std::pair<std::string, int>, 1> v; 491 492 auto& inlined_element = v.emplace_back("answer", 42); 493 EXPECT_EQ(&inlined_element, &v[0]); 494 EXPECT_EQ(inlined_element.first, "answer"); 495 EXPECT_EQ(inlined_element.second, 42); 496 497 auto& allocated_element = v.emplace_back("taxicab", 1729); 498 EXPECT_EQ(&allocated_element, &v[1]); 499 EXPECT_EQ(allocated_element.first, "taxicab"); 500 EXPECT_EQ(allocated_element.second, 1729); 501 } 502 503 TEST(InlinedVectorTest, ShrinkToFitGrowingVector) { 504 absl::InlinedVector<std::pair<std::string, int>, 1> v; 505 506 v.shrink_to_fit(); 507 EXPECT_EQ(v.capacity(), 1u); 508 509 v.emplace_back("answer", 42); 510 v.shrink_to_fit(); 511 EXPECT_EQ(v.capacity(), 1u); 512 513 v.emplace_back("taxicab", 1729); 514 EXPECT_GE(v.capacity(), 2u); 515 v.shrink_to_fit(); 516 EXPECT_EQ(v.capacity(), 2u); 517 518 v.reserve(100); 519 EXPECT_GE(v.capacity(), 100u); 520 v.shrink_to_fit(); 521 EXPECT_EQ(v.capacity(), 2u); 522 } 523 524 TEST(InlinedVectorTest, ShrinkToFitEdgeCases) { 525 { 526 absl::InlinedVector<std::pair<std::string, int>, 1> v; 527 v.emplace_back("answer", 42); 528 v.emplace_back("taxicab", 1729); 529 EXPECT_GE(v.capacity(), 2u); 530 v.pop_back(); 531 v.shrink_to_fit(); 532 EXPECT_EQ(v.capacity(), 1u); 533 EXPECT_EQ(v[0].first, "answer"); 534 EXPECT_EQ(v[0].second, 42); 535 } 536 537 { 538 absl::InlinedVector<std::string, 2> v(100); 539 v.resize(0); 540 v.shrink_to_fit(); 541 EXPECT_EQ(v.capacity(), 2u); // inlined capacity 542 } 543 544 { 545 absl::InlinedVector<std::string, 2> v(100); 546 v.resize(1); 547 v.shrink_to_fit(); 548 EXPECT_EQ(v.capacity(), 2u); // inlined capacity 549 } 550 551 { 552 absl::InlinedVector<std::string, 2> v(100); 553 v.resize(2); 554 v.shrink_to_fit(); 555 EXPECT_EQ(v.capacity(), 2u); 556 } 557 558 { 559 absl::InlinedVector<std::string, 2> v(100); 560 v.resize(3); 561 v.shrink_to_fit(); 562 EXPECT_EQ(v.capacity(), 3u); 563 } 564 } 565 566 TEST(IntVec, Insert) { 567 for (size_t len = 0; len < 20; len++) { 568 for (ptrdiff_t pos = 0; pos <= static_cast<ptrdiff_t>(len); pos++) { 569 { 570 // Single element 571 std::vector<int> std_v; 572 Fill(&std_v, len); 573 IntVec v; 574 Fill(&v, len); 575 576 std_v.insert(std_v.begin() + pos, 9999); 577 IntVec::iterator it = v.insert(v.cbegin() + pos, 9999); 578 EXPECT_THAT(v, ElementsAreArray(std_v)); 579 EXPECT_EQ(it, v.cbegin() + pos); 580 } 581 { 582 // n elements 583 std::vector<int> std_v; 584 Fill(&std_v, len); 585 IntVec v; 586 Fill(&v, len); 587 588 IntVec::size_type n = 5; 589 std_v.insert(std_v.begin() + pos, n, 9999); 590 IntVec::iterator it = v.insert(v.cbegin() + pos, n, 9999); 591 EXPECT_THAT(v, ElementsAreArray(std_v)); 592 EXPECT_EQ(it, v.cbegin() + pos); 593 } 594 { 595 // Iterator range (random access iterator) 596 std::vector<int> std_v; 597 Fill(&std_v, len); 598 IntVec v; 599 Fill(&v, len); 600 601 const std::vector<int> input = {9999, 8888, 7777}; 602 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend()); 603 IntVec::iterator it = 604 v.insert(v.cbegin() + pos, input.cbegin(), input.cend()); 605 EXPECT_THAT(v, ElementsAreArray(std_v)); 606 EXPECT_EQ(it, v.cbegin() + pos); 607 } 608 { 609 // Iterator range (forward iterator) 610 std::vector<int> std_v; 611 Fill(&std_v, len); 612 IntVec v; 613 Fill(&v, len); 614 615 const std::forward_list<int> input = {9999, 8888, 7777}; 616 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend()); 617 IntVec::iterator it = 618 v.insert(v.cbegin() + pos, input.cbegin(), input.cend()); 619 EXPECT_THAT(v, ElementsAreArray(std_v)); 620 EXPECT_EQ(it, v.cbegin() + pos); 621 } 622 { 623 // Iterator range (input iterator) 624 std::vector<int> std_v; 625 Fill(&std_v, len); 626 IntVec v; 627 Fill(&v, len); 628 629 std_v.insert(std_v.begin() + pos, {9999, 8888, 7777}); 630 std::istringstream input("9999 8888 7777"); 631 IntVec::iterator it = 632 v.insert(v.cbegin() + pos, std::istream_iterator<int>(input), 633 std::istream_iterator<int>()); 634 EXPECT_THAT(v, ElementsAreArray(std_v)); 635 EXPECT_EQ(it, v.cbegin() + pos); 636 } 637 { 638 // Initializer list 639 std::vector<int> std_v; 640 Fill(&std_v, len); 641 IntVec v; 642 Fill(&v, len); 643 644 std_v.insert(std_v.begin() + pos, {9999, 8888}); 645 IntVec::iterator it = v.insert(v.cbegin() + pos, {9999, 8888}); 646 EXPECT_THAT(v, ElementsAreArray(std_v)); 647 EXPECT_EQ(it, v.cbegin() + pos); 648 } 649 } 650 } 651 } 652 653 TEST(IntPairVec, Insert) { 654 for (size_t len = 0; len < 20; len++) { 655 for (ptrdiff_t pos = 0; pos <= static_cast<ptrdiff_t>(len); pos++) { 656 // Iterator range (C++20 forward iterator) 657 const std::forward_list<int> unzipped_input = {9999, 8888, 7777}; 658 const absl::base_internal::Cpp20ForwardZipIterator< 659 std::forward_list<int>::const_iterator> 660 begin(unzipped_input.cbegin(), unzipped_input.cbegin()); 661 const absl::base_internal::Cpp20ForwardZipIterator< 662 std::forward_list<int>::const_iterator> 663 end(unzipped_input.cend(), unzipped_input.cend()); 664 665 std::vector<std::pair<int, int>> std_v; 666 absl::InlinedVector<std::pair<int, int>, 8> v; 667 for (size_t i = 0; i < len; ++i) { 668 std_v.emplace_back(i, i); 669 v.emplace_back(i, i); 670 } 671 672 std_v.insert(std_v.begin() + pos, begin, end); 673 auto it = v.insert(v.cbegin() + pos, begin, end); 674 EXPECT_THAT(v, ElementsAreArray(std_v)); 675 EXPECT_EQ(it, v.cbegin() + pos); 676 } 677 } 678 } 679 680 TEST(RefCountedVec, InsertConstructorDestructor) { 681 // Make sure the proper construction/destruction happen during insert 682 // operations. 683 for (size_t len = 0; len < 20; len++) { 684 SCOPED_TRACE(len); 685 for (size_t pos = 0; pos <= len; pos++) { 686 SCOPED_TRACE(pos); 687 std::vector<int> counts(len, 0); 688 int inserted_count = 0; 689 RefCountedVec v; 690 for (size_t i = 0; i < len; ++i) { 691 SCOPED_TRACE(i); 692 v.push_back(RefCounted(static_cast<int>(i), &counts[i])); 693 } 694 695 EXPECT_THAT(counts, Each(Eq(1))); 696 697 RefCounted insert_element(9999, &inserted_count); 698 EXPECT_EQ(1, inserted_count); 699 v.insert(v.begin() + pos, insert_element); 700 EXPECT_EQ(2, inserted_count); 701 // Check that the elements at the end are preserved. 702 EXPECT_THAT(counts, Each(Eq(1))); 703 EXPECT_EQ(2, inserted_count); 704 } 705 } 706 } 707 708 TEST(IntVec, Resize) { 709 for (size_t len = 0; len < 20; len++) { 710 IntVec v; 711 Fill(&v, len); 712 713 // Try resizing up and down by k elements 714 static const int kResizeElem = 1000000; 715 for (size_t k = 0; k < 10; k++) { 716 // Enlarging resize 717 v.resize(len + k, kResizeElem); 718 EXPECT_EQ(len + k, v.size()); 719 EXPECT_LE(len + k, v.capacity()); 720 for (size_t i = 0; i < len + k; i++) { 721 if (i < len) { 722 EXPECT_EQ(static_cast<int>(i), v[i]); 723 } else { 724 EXPECT_EQ(kResizeElem, v[i]); 725 } 726 } 727 728 // Shrinking resize 729 v.resize(len, kResizeElem); 730 EXPECT_EQ(len, v.size()); 731 EXPECT_LE(len, v.capacity()); 732 for (size_t i = 0; i < len; i++) { 733 EXPECT_EQ(static_cast<int>(i), v[i]); 734 } 735 } 736 } 737 } 738 739 TEST(IntVec, InitWithLength) { 740 for (size_t len = 0; len < 20; len++) { 741 IntVec v(len, 7); 742 EXPECT_EQ(len, v.size()); 743 EXPECT_LE(len, v.capacity()); 744 for (size_t i = 0; i < len; i++) { 745 EXPECT_EQ(7, v[i]); 746 } 747 } 748 } 749 750 TEST(IntVec, CopyConstructorAndAssignment) { 751 for (size_t len = 0; len < 20; len++) { 752 IntVec v; 753 Fill(&v, len); 754 EXPECT_EQ(len, v.size()); 755 EXPECT_LE(len, v.capacity()); 756 757 IntVec v2(v); 758 EXPECT_TRUE(v == v2) << PrintToString(v) << PrintToString(v2); 759 760 for (size_t start_len = 0; start_len < 20; start_len++) { 761 IntVec v3; 762 Fill(&v3, start_len, 99); // Add dummy elements that should go away 763 v3 = v; 764 EXPECT_TRUE(v == v3) << PrintToString(v) << PrintToString(v3); 765 } 766 } 767 } 768 769 TEST(IntVec, AliasingCopyAssignment) { 770 for (size_t len = 0; len < 20; ++len) { 771 IntVec original; 772 Fill(&original, len); 773 IntVec dup = original; 774 dup = *&dup; 775 EXPECT_EQ(dup, original); 776 } 777 } 778 779 TEST(IntVec, MoveConstructorAndAssignment) { 780 for (size_t len = 0; len < 20; len++) { 781 IntVec v_in; 782 const size_t inlined_capacity = v_in.capacity(); 783 Fill(&v_in, len); 784 EXPECT_EQ(len, v_in.size()); 785 EXPECT_LE(len, v_in.capacity()); 786 787 { 788 IntVec v_temp(v_in); 789 auto* old_data = v_temp.data(); 790 IntVec v_out(std::move(v_temp)); 791 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out); 792 if (v_in.size() > inlined_capacity) { 793 // Allocation is moved as a whole, data stays in place. 794 EXPECT_TRUE(v_out.data() == old_data); 795 } else { 796 EXPECT_FALSE(v_out.data() == old_data); 797 } 798 } 799 for (size_t start_len = 0; start_len < 20; start_len++) { 800 IntVec v_out; 801 Fill(&v_out, start_len, 99); // Add dummy elements that should go away 802 IntVec v_temp(v_in); 803 auto* old_data = v_temp.data(); 804 v_out = std::move(v_temp); 805 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out); 806 if (v_in.size() > inlined_capacity) { 807 // Allocation is moved as a whole, data stays in place. 808 EXPECT_TRUE(v_out.data() == old_data); 809 } else { 810 EXPECT_FALSE(v_out.data() == old_data); 811 } 812 } 813 } 814 } 815 816 class NotTriviallyDestructible { 817 public: 818 NotTriviallyDestructible() : p_(new int(1)) {} 819 explicit NotTriviallyDestructible(int i) : p_(new int(i)) {} 820 821 NotTriviallyDestructible(const NotTriviallyDestructible& other) 822 : p_(new int(*other.p_)) {} 823 824 NotTriviallyDestructible& operator=(const NotTriviallyDestructible& other) { 825 p_ = absl::make_unique<int>(*other.p_); 826 return *this; 827 } 828 829 bool operator==(const NotTriviallyDestructible& other) const { 830 return *p_ == *other.p_; 831 } 832 833 private: 834 std::unique_ptr<int> p_; 835 }; 836 837 TEST(AliasingTest, Emplace) { 838 for (size_t i = 2; i < 20; ++i) { 839 absl::InlinedVector<NotTriviallyDestructible, 10> vec; 840 for (size_t j = 0; j < i; ++j) { 841 vec.push_back(NotTriviallyDestructible(static_cast<int>(j))); 842 } 843 vec.emplace(vec.begin(), vec[0]); 844 EXPECT_EQ(vec[0], vec[1]); 845 vec.emplace(vec.begin() + i / 2, vec[i / 2]); 846 EXPECT_EQ(vec[i / 2], vec[i / 2 + 1]); 847 vec.emplace(vec.end() - 1, vec.back()); 848 EXPECT_EQ(vec[vec.size() - 2], vec.back()); 849 } 850 } 851 852 TEST(AliasingTest, InsertWithCount) { 853 for (size_t i = 1; i < 20; ++i) { 854 absl::InlinedVector<NotTriviallyDestructible, 10> vec; 855 for (size_t j = 0; j < i; ++j) { 856 vec.push_back(NotTriviallyDestructible(static_cast<int>(j))); 857 } 858 for (size_t n = 0; n < 5; ++n) { 859 // We use back where we can because it's guaranteed to become invalidated 860 vec.insert(vec.begin(), n, vec.back()); 861 auto b = vec.begin(); 862 EXPECT_TRUE( 863 std::all_of(b, b + n, [&vec](const NotTriviallyDestructible& x) { 864 return x == vec.back(); 865 })); 866 867 auto m_idx = vec.size() / 2; 868 vec.insert(vec.begin() + m_idx, n, vec.back()); 869 auto m = vec.begin() + m_idx; 870 EXPECT_TRUE( 871 std::all_of(m, m + n, [&vec](const NotTriviallyDestructible& x) { 872 return x == vec.back(); 873 })); 874 875 // We want distinct values so the equality test is meaningful, 876 // vec[vec.size() - 1] is also almost always invalidated. 877 auto old_e = vec.size() - 1; 878 auto val = vec[old_e]; 879 vec.insert(vec.end(), n, vec[old_e]); 880 auto e = vec.begin() + old_e; 881 EXPECT_TRUE(std::all_of( 882 e, e + n, 883 [&val](const NotTriviallyDestructible& x) { return x == val; })); 884 } 885 } 886 } 887 888 TEST(OverheadTest, Storage) { 889 // Check for size overhead. 890 // In particular, ensure that std::allocator doesn't cost anything to store. 891 // The union should be absorbing some of the allocation bookkeeping overhead 892 // in the larger vectors, leaving only the size_ field as overhead. 893 894 struct T { 895 void* val; 896 }; 897 size_t expected_overhead = sizeof(T); 898 899 EXPECT_EQ((2 * expected_overhead), 900 sizeof(absl::InlinedVector<T, 1>) - sizeof(T[1])); 901 EXPECT_EQ(expected_overhead, 902 sizeof(absl::InlinedVector<T, 2>) - sizeof(T[2])); 903 EXPECT_EQ(expected_overhead, 904 sizeof(absl::InlinedVector<T, 3>) - sizeof(T[3])); 905 EXPECT_EQ(expected_overhead, 906 sizeof(absl::InlinedVector<T, 4>) - sizeof(T[4])); 907 EXPECT_EQ(expected_overhead, 908 sizeof(absl::InlinedVector<T, 5>) - sizeof(T[5])); 909 EXPECT_EQ(expected_overhead, 910 sizeof(absl::InlinedVector<T, 6>) - sizeof(T[6])); 911 EXPECT_EQ(expected_overhead, 912 sizeof(absl::InlinedVector<T, 7>) - sizeof(T[7])); 913 EXPECT_EQ(expected_overhead, 914 sizeof(absl::InlinedVector<T, 8>) - sizeof(T[8])); 915 } 916 917 TEST(IntVec, Clear) { 918 for (size_t len = 0; len < 20; len++) { 919 SCOPED_TRACE(len); 920 IntVec v; 921 Fill(&v, len); 922 v.clear(); 923 EXPECT_EQ(0u, v.size()); 924 EXPECT_EQ(v.begin(), v.end()); 925 } 926 } 927 928 TEST(IntVec, Reserve) { 929 for (size_t len = 0; len < 20; len++) { 930 IntVec v; 931 Fill(&v, len); 932 933 for (size_t newlen = 0; newlen < 100; newlen++) { 934 const int* start_rep = v.data(); 935 v.reserve(newlen); 936 const int* final_rep = v.data(); 937 if (newlen <= len) { 938 EXPECT_EQ(start_rep, final_rep); 939 } 940 EXPECT_LE(newlen, v.capacity()); 941 942 // Filling up to newlen should not change rep 943 while (v.size() < newlen) { 944 v.push_back(0); 945 } 946 EXPECT_EQ(final_rep, v.data()); 947 } 948 } 949 } 950 951 TEST(StringVec, SelfRefPushBack) { 952 std::vector<std::string> std_v; 953 absl::InlinedVector<std::string, 4> v; 954 const std::string s = "A quite long string to ensure heap."; 955 std_v.push_back(s); 956 v.push_back(s); 957 for (int i = 0; i < 20; ++i) { 958 EXPECT_THAT(v, ElementsAreArray(std_v)); 959 960 v.push_back(v.back()); 961 std_v.push_back(std_v.back()); 962 } 963 EXPECT_THAT(v, ElementsAreArray(std_v)); 964 } 965 966 TEST(StringVec, SelfRefPushBackWithMove) { 967 std::vector<std::string> std_v; 968 absl::InlinedVector<std::string, 4> v; 969 const std::string s = "A quite long string to ensure heap."; 970 std_v.push_back(s); 971 v.push_back(s); 972 for (int i = 0; i < 20; ++i) { 973 EXPECT_EQ(v.back(), std_v.back()); 974 975 v.push_back(std::move(v.back())); 976 std_v.push_back(std::move(std_v.back())); 977 } 978 EXPECT_EQ(v.back(), std_v.back()); 979 } 980 981 TEST(StringVec, SelfMove) { 982 const std::string s = "A quite long string to ensure heap."; 983 for (int len = 0; len < 20; len++) { 984 SCOPED_TRACE(len); 985 absl::InlinedVector<std::string, 8> v; 986 for (int i = 0; i < len; ++i) { 987 SCOPED_TRACE(i); 988 v.push_back(s); 989 } 990 // Indirection necessary to avoid compiler warning. 991 v = std::move(*(&v)); 992 // Ensure that the inlined vector is still in a valid state by copying it. 993 // We don't expect specific contents since a self-move results in an 994 // unspecified valid state. 995 std::vector<std::string> copy(v.begin(), v.end()); 996 } 997 } 998 999 TEST(IntVec, Swap) { 1000 for (size_t l1 = 0; l1 < 20; l1++) { 1001 SCOPED_TRACE(l1); 1002 for (size_t l2 = 0; l2 < 20; l2++) { 1003 SCOPED_TRACE(l2); 1004 IntVec a = Fill(l1, 0); 1005 IntVec b = Fill(l2, 100); 1006 { 1007 using std::swap; 1008 swap(a, b); 1009 } 1010 EXPECT_EQ(l1, b.size()); 1011 EXPECT_EQ(l2, a.size()); 1012 for (size_t i = 0; i < l1; i++) { 1013 SCOPED_TRACE(i); 1014 EXPECT_EQ(static_cast<int>(i), b[i]); 1015 } 1016 for (size_t i = 0; i < l2; i++) { 1017 SCOPED_TRACE(i); 1018 EXPECT_EQ(100 + static_cast<int>(i), a[i]); 1019 } 1020 } 1021 } 1022 } 1023 1024 TYPED_TEST_P(InstanceTest, Swap) { 1025 using Instance = TypeParam; 1026 using InstanceVec = absl::InlinedVector<Instance, 8>; 1027 for (size_t l1 = 0; l1 < 20; l1++) { 1028 SCOPED_TRACE(l1); 1029 for (size_t l2 = 0; l2 < 20; l2++) { 1030 SCOPED_TRACE(l2); 1031 InstanceTracker tracker; 1032 InstanceVec a, b; 1033 const size_t inlined_capacity = a.capacity(); 1034 auto min_len = std::min(l1, l2); 1035 auto max_len = std::max(l1, l2); 1036 for (size_t i = 0; i < l1; i++) 1037 a.push_back(Instance(static_cast<int>(i))); 1038 for (size_t i = 0; i < l2; i++) 1039 b.push_back(Instance(100 + static_cast<int>(i))); 1040 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2)); 1041 tracker.ResetCopiesMovesSwaps(); 1042 { 1043 using std::swap; 1044 swap(a, b); 1045 } 1046 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2)); 1047 if (a.size() > inlined_capacity && b.size() > inlined_capacity) { 1048 EXPECT_EQ(tracker.swaps(), 0); // Allocations are swapped. 1049 EXPECT_EQ(tracker.moves(), 0); 1050 } else if (a.size() <= inlined_capacity && b.size() <= inlined_capacity) { 1051 EXPECT_EQ(tracker.swaps(), static_cast<int>(min_len)); 1052 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()), 1053 static_cast<int>(max_len - min_len)); 1054 } else { 1055 // One is allocated and the other isn't. The allocation is transferred 1056 // without copying elements, and the inlined instances are copied/moved. 1057 EXPECT_EQ(tracker.swaps(), 0); 1058 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()), 1059 static_cast<int>(min_len)); 1060 } 1061 1062 EXPECT_EQ(l1, b.size()); 1063 EXPECT_EQ(l2, a.size()); 1064 for (size_t i = 0; i < l1; i++) { 1065 EXPECT_EQ(static_cast<int>(i), b[i].value()); 1066 } 1067 for (size_t i = 0; i < l2; i++) { 1068 EXPECT_EQ(100 + static_cast<int>(i), a[i].value()); 1069 } 1070 } 1071 } 1072 } 1073 1074 TEST(IntVec, EqualAndNotEqual) { 1075 IntVec a, b; 1076 EXPECT_TRUE(a == b); 1077 EXPECT_FALSE(a != b); 1078 1079 a.push_back(3); 1080 EXPECT_FALSE(a == b); 1081 EXPECT_TRUE(a != b); 1082 1083 b.push_back(3); 1084 EXPECT_TRUE(a == b); 1085 EXPECT_FALSE(a != b); 1086 1087 b.push_back(7); 1088 EXPECT_FALSE(a == b); 1089 EXPECT_TRUE(a != b); 1090 1091 a.push_back(6); 1092 EXPECT_FALSE(a == b); 1093 EXPECT_TRUE(a != b); 1094 1095 a.clear(); 1096 b.clear(); 1097 for (size_t i = 0; i < 100; i++) { 1098 a.push_back(static_cast<int>(i)); 1099 b.push_back(static_cast<int>(i)); 1100 EXPECT_TRUE(a == b); 1101 EXPECT_FALSE(a != b); 1102 1103 b[i] = b[i] + 1; 1104 EXPECT_FALSE(a == b); 1105 EXPECT_TRUE(a != b); 1106 1107 b[i] = b[i] - 1; // Back to before 1108 EXPECT_TRUE(a == b); 1109 EXPECT_FALSE(a != b); 1110 } 1111 } 1112 1113 TEST(IntVec, RelationalOps) { 1114 IntVec a, b; 1115 EXPECT_FALSE(a < b); 1116 EXPECT_FALSE(b < a); 1117 EXPECT_FALSE(a > b); 1118 EXPECT_FALSE(b > a); 1119 EXPECT_TRUE(a <= b); 1120 EXPECT_TRUE(b <= a); 1121 EXPECT_TRUE(a >= b); 1122 EXPECT_TRUE(b >= a); 1123 b.push_back(3); 1124 EXPECT_TRUE(a < b); 1125 EXPECT_FALSE(b < a); 1126 EXPECT_FALSE(a > b); 1127 EXPECT_TRUE(b > a); 1128 EXPECT_TRUE(a <= b); 1129 EXPECT_FALSE(b <= a); 1130 EXPECT_FALSE(a >= b); 1131 EXPECT_TRUE(b >= a); 1132 } 1133 1134 TYPED_TEST_P(InstanceTest, CountConstructorsDestructors) { 1135 using Instance = TypeParam; 1136 using InstanceVec = absl::InlinedVector<Instance, 8>; 1137 InstanceTracker tracker; 1138 for (size_t len = 0; len < 20; len++) { 1139 SCOPED_TRACE(len); 1140 tracker.ResetCopiesMovesSwaps(); 1141 1142 InstanceVec v; 1143 const size_t inlined_capacity = v.capacity(); 1144 for (size_t i = 0; i < len; i++) { 1145 v.push_back(Instance(static_cast<int>(i))); 1146 } 1147 EXPECT_EQ(tracker.instances(), static_cast<int>(len)); 1148 EXPECT_GE(tracker.copies() + tracker.moves(), 1149 static_cast<int>(len)); // More due to reallocation. 1150 tracker.ResetCopiesMovesSwaps(); 1151 1152 // Enlarging resize() must construct some objects 1153 tracker.ResetCopiesMovesSwaps(); 1154 v.resize(len + 10, Instance(100)); 1155 EXPECT_EQ(tracker.instances(), static_cast<int>(len) + 10); 1156 if (len <= inlined_capacity && len + 10 > inlined_capacity) { 1157 EXPECT_EQ(tracker.copies() + tracker.moves(), 10 + static_cast<int>(len)); 1158 } else { 1159 // Only specify a minimum number of copies + moves. We don't want to 1160 // depend on the reallocation policy here. 1161 EXPECT_GE(tracker.copies() + tracker.moves(), 1162 10); // More due to reallocation. 1163 } 1164 1165 // Shrinking resize() must destroy some objects 1166 tracker.ResetCopiesMovesSwaps(); 1167 v.resize(len, Instance(100)); 1168 EXPECT_EQ(tracker.instances(), static_cast<int>(len)); 1169 EXPECT_EQ(tracker.copies(), 0); 1170 EXPECT_EQ(tracker.moves(), 0); 1171 1172 // reserve() must not increase the number of initialized objects 1173 SCOPED_TRACE("reserve"); 1174 v.reserve(len + 1000); 1175 EXPECT_EQ(tracker.instances(), static_cast<int>(len)); 1176 EXPECT_EQ(tracker.copies() + tracker.moves(), static_cast<int>(len)); 1177 1178 // pop_back() and erase() must destroy one object 1179 if (len > 0) { 1180 tracker.ResetCopiesMovesSwaps(); 1181 v.pop_back(); 1182 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 1); 1183 EXPECT_EQ(tracker.copies(), 0); 1184 EXPECT_EQ(tracker.moves(), 0); 1185 1186 if (!v.empty()) { 1187 tracker.ResetCopiesMovesSwaps(); 1188 v.erase(v.begin()); 1189 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 2); 1190 EXPECT_EQ(tracker.copies() + tracker.moves(), 1191 static_cast<int>(len) - 2); 1192 } 1193 } 1194 1195 tracker.ResetCopiesMovesSwaps(); 1196 int instances_before_empty_erase = tracker.instances(); 1197 v.erase(v.begin(), v.begin()); 1198 EXPECT_EQ(tracker.instances(), instances_before_empty_erase); 1199 EXPECT_EQ(tracker.copies() + tracker.moves(), 0); 1200 } 1201 } 1202 1203 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnCopyConstruction) { 1204 using Instance = TypeParam; 1205 using InstanceVec = absl::InlinedVector<Instance, 8>; 1206 InstanceTracker tracker; 1207 for (int len = 0; len < 20; len++) { 1208 SCOPED_TRACE(len); 1209 tracker.ResetCopiesMovesSwaps(); 1210 1211 InstanceVec v; 1212 for (int i = 0; i < len; i++) { 1213 v.push_back(Instance(i)); 1214 } 1215 EXPECT_EQ(tracker.instances(), len); 1216 EXPECT_GE(tracker.copies() + tracker.moves(), 1217 len); // More due to reallocation. 1218 tracker.ResetCopiesMovesSwaps(); 1219 { // Copy constructor should create 'len' more instances. 1220 InstanceVec v_copy(v); 1221 EXPECT_EQ(v_copy.size(), v.size()); 1222 EXPECT_EQ(tracker.instances(), len + len); 1223 EXPECT_EQ(tracker.copies(), len); 1224 EXPECT_EQ(tracker.moves(), 0); 1225 } 1226 EXPECT_EQ(tracker.instances(), len); 1227 } 1228 } 1229 1230 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveConstruction) { 1231 using Instance = TypeParam; 1232 using InstanceVec = absl::InlinedVector<Instance, 8>; 1233 InstanceTracker tracker; 1234 for (int len = 0; len < 20; len++) { 1235 SCOPED_TRACE(len); 1236 tracker.ResetCopiesMovesSwaps(); 1237 1238 InstanceVec v; 1239 const size_t inlined_capacity = v.capacity(); 1240 for (int i = 0; i < len; i++) { 1241 v.push_back(Instance(i)); 1242 } 1243 EXPECT_EQ(tracker.instances(), len); 1244 EXPECT_GE(tracker.copies() + tracker.moves(), 1245 len); // More due to reallocation. 1246 tracker.ResetCopiesMovesSwaps(); 1247 { 1248 InstanceVec v_copy(std::move(v)); 1249 EXPECT_EQ(v_copy.size(), len); 1250 if (static_cast<size_t>(len) > inlined_capacity) { 1251 // Allocation is moved as a whole. 1252 EXPECT_EQ(tracker.instances(), len); 1253 EXPECT_EQ(tracker.live_instances(), len); 1254 // Tests an implementation detail, don't rely on this in your code. 1255 EXPECT_EQ(v.size(), 0u); // NOLINT misc-use-after-move 1256 EXPECT_EQ(tracker.copies(), 0); 1257 EXPECT_EQ(tracker.moves(), 0); 1258 } else { 1259 EXPECT_EQ(tracker.instances(), len + len); 1260 if (Instance::supports_move()) { 1261 EXPECT_EQ(tracker.live_instances(), len); 1262 EXPECT_EQ(tracker.copies(), 0); 1263 EXPECT_EQ(tracker.moves(), len); 1264 } else { 1265 EXPECT_EQ(tracker.live_instances(), len + len); 1266 EXPECT_EQ(tracker.copies(), len); 1267 EXPECT_EQ(tracker.moves(), 0); 1268 } 1269 } 1270 EXPECT_EQ(tracker.swaps(), 0); 1271 } 1272 } 1273 } 1274 1275 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnAssignment) { 1276 using Instance = TypeParam; 1277 using InstanceVec = absl::InlinedVector<Instance, 8>; 1278 InstanceTracker tracker; 1279 for (int len = 0; len < 20; len++) { 1280 SCOPED_TRACE(len); 1281 for (int longorshort = 0; longorshort <= 1; ++longorshort) { 1282 SCOPED_TRACE(longorshort); 1283 tracker.ResetCopiesMovesSwaps(); 1284 1285 InstanceVec longer, shorter; 1286 for (int i = 0; i < len; i++) { 1287 longer.push_back(Instance(i)); 1288 shorter.push_back(Instance(i)); 1289 } 1290 longer.push_back(Instance(len)); 1291 EXPECT_EQ(tracker.instances(), len + len + 1); 1292 EXPECT_GE(tracker.copies() + tracker.moves(), 1293 len + len + 1); // More due to reallocation. 1294 1295 tracker.ResetCopiesMovesSwaps(); 1296 if (longorshort) { 1297 shorter = longer; 1298 EXPECT_EQ(tracker.instances(), (len + 1) + (len + 1)); 1299 EXPECT_GE(tracker.copies() + tracker.moves(), 1300 len + 1); // More due to reallocation. 1301 } else { 1302 longer = shorter; 1303 EXPECT_EQ(tracker.instances(), len + len); 1304 EXPECT_EQ(tracker.copies() + tracker.moves(), len); 1305 } 1306 } 1307 } 1308 } 1309 1310 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveAssignment) { 1311 using Instance = TypeParam; 1312 using InstanceVec = absl::InlinedVector<Instance, 8>; 1313 InstanceTracker tracker; 1314 for (int len = 0; len < 20; len++) { 1315 SCOPED_TRACE(len); 1316 for (int longorshort = 0; longorshort <= 1; ++longorshort) { 1317 SCOPED_TRACE(longorshort); 1318 tracker.ResetCopiesMovesSwaps(); 1319 1320 InstanceVec longer, shorter; 1321 const size_t inlined_capacity = longer.capacity(); 1322 for (int i = 0; i < len; i++) { 1323 longer.push_back(Instance(i)); 1324 shorter.push_back(Instance(i)); 1325 } 1326 longer.push_back(Instance(len)); 1327 EXPECT_EQ(tracker.instances(), len + len + 1); 1328 EXPECT_GE(tracker.copies() + tracker.moves(), 1329 len + len + 1); // More due to reallocation. 1330 1331 tracker.ResetCopiesMovesSwaps(); 1332 int src_len; 1333 if (longorshort) { 1334 src_len = len + 1; 1335 shorter = std::move(longer); 1336 } else { 1337 src_len = len; 1338 longer = std::move(shorter); 1339 } 1340 if (static_cast<size_t>(src_len) > inlined_capacity) { 1341 // Allocation moved as a whole. 1342 EXPECT_EQ(tracker.instances(), src_len); 1343 EXPECT_EQ(tracker.live_instances(), src_len); 1344 EXPECT_EQ(tracker.copies(), 0); 1345 EXPECT_EQ(tracker.moves(), 0); 1346 } else { 1347 // Elements are all copied. 1348 EXPECT_EQ(tracker.instances(), src_len + src_len); 1349 if (Instance::supports_move()) { 1350 EXPECT_EQ(tracker.copies(), 0); 1351 EXPECT_EQ(tracker.moves(), src_len); 1352 EXPECT_EQ(tracker.live_instances(), src_len); 1353 } else { 1354 EXPECT_EQ(tracker.copies(), src_len); 1355 EXPECT_EQ(tracker.moves(), 0); 1356 EXPECT_EQ(tracker.live_instances(), src_len + src_len); 1357 } 1358 } 1359 EXPECT_EQ(tracker.swaps(), 0); 1360 } 1361 } 1362 } 1363 1364 TEST(CountElemAssign, SimpleTypeWithInlineBacking) { 1365 const size_t inlined_capacity = absl::InlinedVector<int, 2>().capacity(); 1366 1367 for (size_t original_size = 0; original_size <= 5; ++original_size) { 1368 SCOPED_TRACE(original_size); 1369 // Original contents are [12345, 12345, ...] 1370 std::vector<int> original_contents(original_size, 12345); 1371 1372 absl::InlinedVector<int, 2> v(original_contents.begin(), 1373 original_contents.end()); 1374 v.assign(2, 123); 1375 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(123, 123))); 1376 if (original_size <= inlined_capacity) { 1377 // If the original had inline backing, it should stay inline. 1378 EXPECT_EQ(v.capacity(), inlined_capacity); 1379 } 1380 } 1381 } 1382 1383 TEST(CountElemAssign, SimpleTypeWithAllocation) { 1384 for (size_t original_size = 0; original_size <= 5; ++original_size) { 1385 SCOPED_TRACE(original_size); 1386 // Original contents are [12345, 12345, ...] 1387 std::vector<int> original_contents(original_size, 12345); 1388 1389 absl::InlinedVector<int, 2> v(original_contents.begin(), 1390 original_contents.end()); 1391 v.assign(3, 123); 1392 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(123, 123, 123))); 1393 EXPECT_LE(v.size(), v.capacity()); 1394 } 1395 } 1396 1397 TYPED_TEST_P(InstanceTest, CountElemAssignInlineBacking) { 1398 using Instance = TypeParam; 1399 for (size_t original_size = 0; original_size <= 5; ++original_size) { 1400 SCOPED_TRACE(original_size); 1401 // Original contents are [12345, 12345, ...] 1402 std::vector<Instance> original_contents(original_size, Instance(12345)); 1403 1404 absl::InlinedVector<Instance, 2> v(original_contents.begin(), 1405 original_contents.end()); 1406 v.assign(2, Instance(123)); 1407 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(ValueIs(123), ValueIs(123)))); 1408 if (original_size <= 2) { 1409 // If the original had inline backing, it should stay inline. 1410 EXPECT_EQ(2u, v.capacity()); 1411 } 1412 } 1413 } 1414 1415 template <typename Instance> 1416 void InstanceCountElemAssignWithAllocationTest() { 1417 for (size_t original_size = 0; original_size <= 5; ++original_size) { 1418 SCOPED_TRACE(original_size); 1419 // Original contents are [12345, 12345, ...] 1420 std::vector<Instance> original_contents(original_size, Instance(12345)); 1421 1422 absl::InlinedVector<Instance, 2> v(original_contents.begin(), 1423 original_contents.end()); 1424 v.assign(3, Instance(123)); 1425 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(ValueIs(123), ValueIs(123), 1426 ValueIs(123)))); 1427 EXPECT_LE(v.size(), v.capacity()); 1428 } 1429 } 1430 TEST(CountElemAssign, WithAllocationCopyableInstance) { 1431 InstanceCountElemAssignWithAllocationTest<CopyableOnlyInstance>(); 1432 } 1433 TEST(CountElemAssign, WithAllocationCopyableMovableInstance) { 1434 InstanceCountElemAssignWithAllocationTest<CopyableMovableInstance>(); 1435 } 1436 1437 TEST(RangedConstructor, SimpleType) { 1438 std::vector<int> source_v = {4, 5, 6}; 1439 // First try to fit in inline backing 1440 absl::InlinedVector<int, 4> v(source_v.begin(), source_v.end()); 1441 EXPECT_EQ(3u, v.size()); 1442 EXPECT_EQ(4u, 1443 v.capacity()); // Indication that we're still on inlined storage 1444 EXPECT_EQ(4, v[0]); 1445 EXPECT_EQ(5, v[1]); 1446 EXPECT_EQ(6, v[2]); 1447 1448 // Now, force a re-allocate 1449 absl::InlinedVector<int, 2> realloc_v(source_v.begin(), source_v.end()); 1450 EXPECT_EQ(3u, realloc_v.size()); 1451 EXPECT_LT(2u, realloc_v.capacity()); 1452 EXPECT_EQ(4, realloc_v[0]); 1453 EXPECT_EQ(5, realloc_v[1]); 1454 EXPECT_EQ(6, realloc_v[2]); 1455 } 1456 1457 // Test for ranged constructors using Instance as the element type and 1458 // SourceContainer as the source container type. 1459 template <typename Instance, typename SourceContainer, int inlined_capacity> 1460 void InstanceRangedConstructorTestForContainer() { 1461 InstanceTracker tracker; 1462 SourceContainer source_v = {Instance(0), Instance(1)}; 1463 tracker.ResetCopiesMovesSwaps(); 1464 absl::InlinedVector<Instance, inlined_capacity> v(source_v.begin(), 1465 source_v.end()); 1466 EXPECT_EQ(2u, v.size()); 1467 EXPECT_LT(1u, v.capacity()); 1468 EXPECT_EQ(0, v[0].value()); 1469 EXPECT_EQ(1, v[1].value()); 1470 EXPECT_EQ(tracker.copies(), 2); 1471 EXPECT_EQ(tracker.moves(), 0); 1472 } 1473 1474 template <typename Instance, int inlined_capacity> 1475 void InstanceRangedConstructorTestWithCapacity() { 1476 // Test with const and non-const, random access and non-random-access sources. 1477 // TODO(bsamwel): Test with an input iterator source. 1478 { 1479 SCOPED_TRACE("std::list"); 1480 InstanceRangedConstructorTestForContainer<Instance, std::list<Instance>, 1481 inlined_capacity>(); 1482 { 1483 SCOPED_TRACE("const std::list"); 1484 InstanceRangedConstructorTestForContainer< 1485 Instance, const std::list<Instance>, inlined_capacity>(); 1486 } 1487 { 1488 SCOPED_TRACE("std::vector"); 1489 InstanceRangedConstructorTestForContainer<Instance, std::vector<Instance>, 1490 inlined_capacity>(); 1491 } 1492 { 1493 SCOPED_TRACE("const std::vector"); 1494 InstanceRangedConstructorTestForContainer< 1495 Instance, const std::vector<Instance>, inlined_capacity>(); 1496 } 1497 } 1498 } 1499 1500 TYPED_TEST_P(InstanceTest, RangedConstructor) { 1501 using Instance = TypeParam; 1502 SCOPED_TRACE("capacity=1"); 1503 InstanceRangedConstructorTestWithCapacity<Instance, 1>(); 1504 SCOPED_TRACE("capacity=2"); 1505 InstanceRangedConstructorTestWithCapacity<Instance, 2>(); 1506 } 1507 1508 TEST(RangedConstructor, ElementsAreConstructed) { 1509 std::vector<std::string> source_v = {"cat", "dog"}; 1510 1511 // Force expansion and re-allocation of v. Ensures that when the vector is 1512 // expanded that new elements are constructed. 1513 absl::InlinedVector<std::string, 1> v(source_v.begin(), source_v.end()); 1514 EXPECT_EQ("cat", v[0]); 1515 EXPECT_EQ("dog", v[1]); 1516 } 1517 1518 TEST(RangedAssign, SimpleType) { 1519 const size_t inlined_capacity = absl::InlinedVector<int, 3>().capacity(); 1520 1521 // Test for all combinations of original sizes (empty and non-empty inline, 1522 // and out of line) and target sizes. 1523 for (size_t original_size = 0; original_size <= 5; ++original_size) { 1524 SCOPED_TRACE(original_size); 1525 // Original contents are [12345, 12345, ...] 1526 std::vector<int> original_contents(original_size, 12345); 1527 1528 for (size_t target_size = 0; target_size <= 5; ++target_size) { 1529 SCOPED_TRACE(target_size); 1530 1531 // New contents are [3, 4, ...] 1532 std::vector<int> new_contents; 1533 for (size_t i = 0; i < target_size; ++i) { 1534 new_contents.push_back(static_cast<int>(i + 3)); 1535 } 1536 1537 absl::InlinedVector<int, 3> v(original_contents.begin(), 1538 original_contents.end()); 1539 v.assign(new_contents.begin(), new_contents.end()); 1540 1541 EXPECT_EQ(new_contents.size(), v.size()); 1542 EXPECT_LE(new_contents.size(), v.capacity()); 1543 if (target_size <= inlined_capacity && 1544 original_size <= inlined_capacity) { 1545 // Storage should stay inline when target size is small. 1546 EXPECT_EQ(v.capacity(), inlined_capacity); 1547 } 1548 EXPECT_THAT(v, ElementsAreArray(new_contents)); 1549 } 1550 } 1551 } 1552 1553 // Returns true if lhs and rhs have the same value. 1554 template <typename Instance> 1555 static bool InstanceValuesEqual(const Instance& lhs, const Instance& rhs) { 1556 return lhs.value() == rhs.value(); 1557 } 1558 1559 // Test for ranged assign() using Instance as the element type and 1560 // SourceContainer as the source container type. 1561 template <typename Instance, typename SourceContainer> 1562 void InstanceRangedAssignTestForContainer() { 1563 // Test for all combinations of original sizes (empty and non-empty inline, 1564 // and out of line) and target sizes. 1565 for (size_t original_size = 0; original_size <= 5; ++original_size) { 1566 SCOPED_TRACE(original_size); 1567 // Original contents are [12345, 12345, ...] 1568 std::vector<Instance> original_contents(original_size, Instance(12345)); 1569 1570 for (size_t target_size = 0; target_size <= 5; ++target_size) { 1571 SCOPED_TRACE(target_size); 1572 1573 // New contents are [3, 4, ...] 1574 // Generate data using a non-const container, because SourceContainer 1575 // itself may be const. 1576 // TODO(bsamwel): Test with an input iterator. 1577 std::vector<Instance> new_contents_in; 1578 for (size_t i = 0; i < target_size; ++i) { 1579 new_contents_in.push_back(Instance(static_cast<int>(i) + 3)); 1580 } 1581 SourceContainer new_contents(new_contents_in.begin(), 1582 new_contents_in.end()); 1583 1584 absl::InlinedVector<Instance, 3> v(original_contents.begin(), 1585 original_contents.end()); 1586 v.assign(new_contents.begin(), new_contents.end()); 1587 1588 EXPECT_EQ(new_contents.size(), v.size()); 1589 EXPECT_LE(new_contents.size(), v.capacity()); 1590 if (target_size <= 3 && original_size <= 3) { 1591 // Storage should stay inline when target size is small. 1592 EXPECT_EQ(3u, v.capacity()); 1593 } 1594 EXPECT_TRUE(std::equal(v.begin(), v.end(), new_contents.begin(), 1595 InstanceValuesEqual<Instance>)); 1596 } 1597 } 1598 } 1599 1600 TYPED_TEST_P(InstanceTest, RangedAssign) { 1601 using Instance = TypeParam; 1602 // Test with const and non-const, random access and non-random-access sources. 1603 // TODO(bsamwel): Test with an input iterator source. 1604 SCOPED_TRACE("std::list"); 1605 InstanceRangedAssignTestForContainer<Instance, std::list<Instance>>(); 1606 SCOPED_TRACE("const std::list"); 1607 InstanceRangedAssignTestForContainer<Instance, const std::list<Instance>>(); 1608 SCOPED_TRACE("std::vector"); 1609 InstanceRangedAssignTestForContainer<Instance, std::vector<Instance>>(); 1610 SCOPED_TRACE("const std::vector"); 1611 InstanceRangedAssignTestForContainer<Instance, const std::vector<Instance>>(); 1612 } 1613 1614 TEST(InitializerListConstructor, SimpleTypeWithInlineBacking) { 1615 EXPECT_THAT((absl::InlinedVector<int, 4>{4, 5, 6}), 1616 AllOf(SizeIs(3u), CapacityIs(4u), ElementsAre(4, 5, 6))); 1617 } 1618 1619 TEST(InitializerListConstructor, SimpleTypeWithReallocationRequired) { 1620 EXPECT_THAT((absl::InlinedVector<int, 2>{4, 5, 6}), 1621 AllOf(SizeIs(3u), CapacityIs(Gt(2u)), ElementsAre(4, 5, 6))); 1622 } 1623 1624 TEST(InitializerListConstructor, DisparateTypesInList) { 1625 EXPECT_THAT((absl::InlinedVector<int, 2>{-7, 8ULL}), ElementsAre(-7, 8)); 1626 1627 EXPECT_THAT((absl::InlinedVector<std::string, 2>{"foo", std::string("bar")}), 1628 ElementsAre("foo", "bar")); 1629 } 1630 1631 TEST(InitializerListConstructor, ComplexTypeWithInlineBacking) { 1632 const size_t inlined_capacity = 1633 absl::InlinedVector<CopyableMovableInstance, 1>().capacity(); 1634 EXPECT_THAT( 1635 (absl::InlinedVector<CopyableMovableInstance, 1>{ 1636 CopyableMovableInstance(0)}), 1637 AllOf(SizeIs(1u), CapacityIs(inlined_capacity), ElementsAre(ValueIs(0)))); 1638 } 1639 1640 TEST(InitializerListConstructor, ComplexTypeWithReallocationRequired) { 1641 EXPECT_THAT((absl::InlinedVector<CopyableMovableInstance, 1>{ 1642 CopyableMovableInstance(0), CopyableMovableInstance(1)}), 1643 AllOf(SizeIs(2u), CapacityIs(Gt(1u)), 1644 ElementsAre(ValueIs(0), ValueIs(1)))); 1645 } 1646 1647 TEST(InitializerListAssign, SimpleTypeFitsInlineBacking) { 1648 for (size_t original_size = 0; original_size <= 4; ++original_size) { 1649 SCOPED_TRACE(original_size); 1650 1651 absl::InlinedVector<int, 2> v1(original_size, 12345); 1652 const size_t original_capacity_v1 = v1.capacity(); 1653 v1.assign({3}); 1654 EXPECT_THAT(v1, AllOf(SizeIs(1u), CapacityIs(original_capacity_v1), 1655 ElementsAre(3))); 1656 1657 absl::InlinedVector<int, 2> v2(original_size, 12345); 1658 const size_t original_capacity_v2 = v2.capacity(); 1659 v2 = {3}; 1660 EXPECT_THAT(v2, AllOf(SizeIs(1u), CapacityIs(original_capacity_v2), 1661 ElementsAre(3))); 1662 } 1663 } 1664 1665 TEST(InitializerListAssign, SimpleTypeDoesNotFitInlineBacking) { 1666 for (size_t original_size = 0; original_size <= 4; ++original_size) { 1667 SCOPED_TRACE(original_size); 1668 absl::InlinedVector<int, 2> v1(original_size, 12345); 1669 v1.assign({3, 4, 5}); 1670 EXPECT_THAT(v1, AllOf(SizeIs(3u), ElementsAre(3, 4, 5))); 1671 EXPECT_LE(3u, v1.capacity()); 1672 1673 absl::InlinedVector<int, 2> v2(original_size, 12345); 1674 v2 = {3, 4, 5}; 1675 EXPECT_THAT(v2, AllOf(SizeIs(3u), ElementsAre(3, 4, 5))); 1676 EXPECT_LE(3u, v2.capacity()); 1677 } 1678 } 1679 1680 TEST(InitializerListAssign, DisparateTypesInList) { 1681 absl::InlinedVector<int, 2> v_int1; 1682 v_int1.assign({-7, 8ULL}); 1683 EXPECT_THAT(v_int1, ElementsAre(-7, 8)); 1684 1685 absl::InlinedVector<int, 2> v_int2; 1686 v_int2 = {-7, 8ULL}; 1687 EXPECT_THAT(v_int2, ElementsAre(-7, 8)); 1688 1689 absl::InlinedVector<std::string, 2> v_string1; 1690 v_string1.assign({"foo", std::string("bar")}); 1691 EXPECT_THAT(v_string1, ElementsAre("foo", "bar")); 1692 1693 absl::InlinedVector<std::string, 2> v_string2; 1694 v_string2 = {"foo", std::string("bar")}; 1695 EXPECT_THAT(v_string2, ElementsAre("foo", "bar")); 1696 } 1697 1698 TYPED_TEST_P(InstanceTest, InitializerListAssign) { 1699 using Instance = TypeParam; 1700 for (size_t original_size = 0; original_size <= 4; ++original_size) { 1701 SCOPED_TRACE(original_size); 1702 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345)); 1703 const size_t original_capacity = v.capacity(); 1704 v.assign({Instance(3)}); 1705 EXPECT_THAT(v, AllOf(SizeIs(1u), CapacityIs(original_capacity), 1706 ElementsAre(ValueIs(3)))); 1707 } 1708 for (size_t original_size = 0; original_size <= 4; ++original_size) { 1709 SCOPED_TRACE(original_size); 1710 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345)); 1711 v.assign({Instance(3), Instance(4), Instance(5)}); 1712 EXPECT_THAT( 1713 v, AllOf(SizeIs(3u), ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5)))); 1714 EXPECT_LE(3u, v.capacity()); 1715 } 1716 } 1717 1718 REGISTER_TYPED_TEST_SUITE_P(InstanceTest, Swap, CountConstructorsDestructors, 1719 CountConstructorsDestructorsOnCopyConstruction, 1720 CountConstructorsDestructorsOnMoveConstruction, 1721 CountConstructorsDestructorsOnAssignment, 1722 CountConstructorsDestructorsOnMoveAssignment, 1723 CountElemAssignInlineBacking, RangedConstructor, 1724 RangedAssign, InitializerListAssign); 1725 1726 using InstanceTypes = 1727 ::testing::Types<CopyableOnlyInstance, CopyableMovableInstance>; 1728 INSTANTIATE_TYPED_TEST_SUITE_P(InstanceTestOnTypes, InstanceTest, 1729 InstanceTypes); 1730 1731 TEST(DynamicVec, DynamicVecCompiles) { 1732 DynamicVec v; 1733 (void)v; 1734 } 1735 1736 TEST(DynamicVec, CreateNonEmptyDynamicVec) { 1737 DynamicVec v(1); 1738 EXPECT_EQ(v.size(), 1u); 1739 } 1740 1741 TEST(DynamicVec, EmplaceBack) { 1742 DynamicVec v; 1743 v.emplace_back(Dynamic{}); 1744 EXPECT_EQ(v.size(), 1u); 1745 } 1746 1747 TEST(DynamicVec, EmplaceBackAfterHeapAllocation) { 1748 DynamicVec v; 1749 v.reserve(10); 1750 v.emplace_back(Dynamic{}); 1751 EXPECT_EQ(v.size(), 1u); 1752 } 1753 1754 TEST(DynamicVec, EmptyIteratorComparison) { 1755 DynamicVec v; 1756 EXPECT_EQ(v.begin(), v.end()); 1757 EXPECT_EQ(v.cbegin(), v.cend()); 1758 } 1759 1760 TEST(AllocatorSupportTest, Constructors) { 1761 using MyAlloc = CountingAllocator<int>; 1762 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; 1763 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7}; 1764 int64_t allocated = 0; 1765 MyAlloc alloc(&allocated); 1766 { AllocVec ABSL_ATTRIBUTE_UNUSED v; } 1767 { AllocVec ABSL_ATTRIBUTE_UNUSED v(alloc); } 1768 { AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); } 1769 { AllocVec ABSL_ATTRIBUTE_UNUSED v({1, 2, 3}, alloc); } 1770 1771 AllocVec v2; 1772 { AllocVec ABSL_ATTRIBUTE_UNUSED v(v2, alloc); } 1773 { AllocVec ABSL_ATTRIBUTE_UNUSED v(std::move(v2), alloc); } 1774 } 1775 1776 TEST(AllocatorSupportTest, CountAllocations) { 1777 using MyAlloc = CountingAllocator<int>; 1778 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; 1779 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7}; 1780 int64_t bytes_allocated = 0; 1781 int64_t instance_count = 0; 1782 MyAlloc alloc(&bytes_allocated, &instance_count); 1783 { 1784 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + 4, alloc); 1785 EXPECT_THAT(bytes_allocated, Eq(0)); 1786 EXPECT_THAT(instance_count, Eq(4)); 1787 } 1788 EXPECT_THAT(bytes_allocated, Eq(0)); 1789 EXPECT_THAT(instance_count, Eq(0)); 1790 { 1791 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); 1792 EXPECT_THAT(bytes_allocated, 1793 Eq(static_cast<int64_t>(v.size() * sizeof(int)))); 1794 EXPECT_THAT(instance_count, Eq(static_cast<int64_t>(v.size()))); 1795 } 1796 EXPECT_THAT(bytes_allocated, Eq(0)); 1797 EXPECT_THAT(instance_count, Eq(0)); 1798 { 1799 AllocVec v(4, 1, alloc); 1800 EXPECT_THAT(bytes_allocated, Eq(0)); 1801 EXPECT_THAT(instance_count, Eq(4)); 1802 1803 int64_t bytes_allocated2 = 0; 1804 MyAlloc alloc2(&bytes_allocated2); 1805 ABSL_ATTRIBUTE_UNUSED AllocVec v2(v, alloc2); 1806 EXPECT_THAT(bytes_allocated2, Eq(0)); 1807 1808 int64_t bytes_allocated3 = 0; 1809 MyAlloc alloc3(&bytes_allocated3); 1810 ABSL_ATTRIBUTE_UNUSED AllocVec v3(std::move(v), alloc3); 1811 EXPECT_THAT(bytes_allocated3, Eq(0)); 1812 } 1813 EXPECT_THAT(bytes_allocated, Eq(0)); 1814 EXPECT_THAT(instance_count, Eq(0)); 1815 { 1816 AllocVec v(8, 2, alloc); 1817 EXPECT_THAT(bytes_allocated, 1818 Eq(static_cast<int64_t>(v.size() * sizeof(int)))); 1819 EXPECT_THAT(instance_count, Eq(static_cast<int64_t>(v.size()))); 1820 1821 int64_t bytes_allocated2 = 0; 1822 MyAlloc alloc2(&bytes_allocated2); 1823 AllocVec v2(v, alloc2); 1824 EXPECT_THAT(bytes_allocated2, 1825 Eq(static_cast<int64_t>(v2.size() * sizeof(int)))); 1826 1827 int64_t bytes_allocated3 = 0; 1828 MyAlloc alloc3(&bytes_allocated3); 1829 AllocVec v3(std::move(v), alloc3); 1830 EXPECT_THAT(bytes_allocated3, 1831 Eq(static_cast<int64_t>(v3.size() * sizeof(int)))); 1832 } 1833 EXPECT_EQ(bytes_allocated, 0); 1834 EXPECT_EQ(instance_count, 0); 1835 { 1836 // Test shrink_to_fit deallocations. 1837 AllocVec v(8, 2, alloc); 1838 EXPECT_EQ(bytes_allocated, static_cast<int64_t>(8 * sizeof(int))); 1839 EXPECT_EQ(instance_count, 8); 1840 v.resize(5); 1841 EXPECT_EQ(bytes_allocated, static_cast<int64_t>(8 * sizeof(int))); 1842 EXPECT_EQ(instance_count, 5); 1843 v.shrink_to_fit(); 1844 EXPECT_EQ(bytes_allocated, static_cast<int64_t>(5 * sizeof(int))); 1845 EXPECT_EQ(instance_count, 5); 1846 v.resize(4); 1847 EXPECT_EQ(bytes_allocated, static_cast<int64_t>(5 * sizeof(int))); 1848 EXPECT_EQ(instance_count, 4); 1849 v.shrink_to_fit(); 1850 EXPECT_EQ(bytes_allocated, 0); 1851 EXPECT_EQ(instance_count, 4); 1852 } 1853 EXPECT_EQ(instance_count, 0); 1854 } 1855 1856 TEST(AllocatorSupportTest, SwapBothAllocated) { 1857 using MyAlloc = CountingAllocator<int>; 1858 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; 1859 int64_t allocated1 = 0; 1860 int64_t allocated2 = 0; 1861 { 1862 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7}; 1863 const int ia2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8}; 1864 MyAlloc a1(&allocated1); 1865 MyAlloc a2(&allocated2); 1866 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1); 1867 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2); 1868 EXPECT_LT(v1.capacity(), v2.capacity()); 1869 EXPECT_THAT(allocated1, 1870 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int)))); 1871 EXPECT_THAT(allocated2, 1872 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int)))); 1873 v1.swap(v2); 1874 EXPECT_THAT(v1, ElementsAreArray(ia2)); 1875 EXPECT_THAT(v2, ElementsAreArray(ia1)); 1876 EXPECT_THAT(allocated1, 1877 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int)))); 1878 EXPECT_THAT(allocated2, 1879 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int)))); 1880 } 1881 EXPECT_THAT(allocated1, 0); 1882 EXPECT_THAT(allocated2, 0); 1883 } 1884 1885 TEST(AllocatorSupportTest, SwapOneAllocated) { 1886 using MyAlloc = CountingAllocator<int>; 1887 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; 1888 int64_t allocated1 = 0; 1889 int64_t allocated2 = 0; 1890 { 1891 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7}; 1892 const int ia2[] = {0, 1, 2, 3}; 1893 MyAlloc a1(&allocated1); 1894 MyAlloc a2(&allocated2); 1895 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1); 1896 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2); 1897 EXPECT_THAT(allocated1, 1898 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int)))); 1899 EXPECT_THAT(allocated2, Eq(0)); 1900 v1.swap(v2); 1901 EXPECT_THAT(v1, ElementsAreArray(ia2)); 1902 EXPECT_THAT(v2, ElementsAreArray(ia1)); 1903 EXPECT_THAT(allocated1, 1904 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int)))); 1905 EXPECT_THAT(allocated2, Eq(0)); 1906 EXPECT_TRUE(v2.get_allocator() == a1); 1907 EXPECT_TRUE(v1.get_allocator() == a2); 1908 } 1909 EXPECT_THAT(allocated1, 0); 1910 EXPECT_THAT(allocated2, 0); 1911 } 1912 1913 TEST(AllocatorSupportTest, ScopedAllocatorWorksInlined) { 1914 using StdVector = std::vector<int, CountingAllocator<int>>; 1915 using Alloc = CountingAllocator<StdVector>; 1916 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>; 1917 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>; 1918 1919 int64_t total_allocated_byte_count = 0; 1920 1921 AllocVec inlined_case(ScopedAlloc(Alloc(+&total_allocated_byte_count))); 1922 1923 // Called only once to remain inlined 1924 inlined_case.emplace_back(); 1925 1926 int64_t absl_responsible_for_count = total_allocated_byte_count; 1927 1928 // MSVC's allocator preemptively allocates in debug mode 1929 #if !defined(_MSC_VER) 1930 EXPECT_EQ(absl_responsible_for_count, 0); 1931 #endif // !defined(_MSC_VER) 1932 1933 inlined_case[0].emplace_back(); 1934 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count); 1935 1936 inlined_case.clear(); 1937 inlined_case.shrink_to_fit(); 1938 EXPECT_EQ(total_allocated_byte_count, 0); 1939 } 1940 1941 TEST(AllocatorSupportTest, ScopedAllocatorWorksAllocated) { 1942 using StdVector = std::vector<int, CountingAllocator<int>>; 1943 using Alloc = CountingAllocator<StdVector>; 1944 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>; 1945 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>; 1946 1947 int64_t total_allocated_byte_count = 0; 1948 1949 AllocVec allocated_case(ScopedAlloc(Alloc(+&total_allocated_byte_count))); 1950 1951 // Called twice to force into being allocated 1952 allocated_case.emplace_back(); 1953 allocated_case.emplace_back(); 1954 1955 int64_t absl_responsible_for_count = total_allocated_byte_count; 1956 EXPECT_GT(absl_responsible_for_count, 0); 1957 1958 allocated_case[1].emplace_back(); 1959 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count); 1960 1961 allocated_case.clear(); 1962 allocated_case.shrink_to_fit(); 1963 EXPECT_EQ(total_allocated_byte_count, 0); 1964 } 1965 1966 TEST(AllocatorSupportTest, SizeAllocConstructor) { 1967 constexpr size_t inlined_size = 4; 1968 using Alloc = CountingAllocator<int>; 1969 using AllocVec = absl::InlinedVector<int, inlined_size, Alloc>; 1970 1971 { 1972 auto len = inlined_size / 2; 1973 int64_t allocated = 0; 1974 auto v = AllocVec(len, Alloc(&allocated)); 1975 1976 // Inline storage used; allocator should not be invoked 1977 EXPECT_THAT(allocated, Eq(0)); 1978 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0))); 1979 } 1980 1981 { 1982 auto len = inlined_size * 2; 1983 int64_t allocated = 0; 1984 auto v = AllocVec(len, Alloc(&allocated)); 1985 1986 // Out of line storage used; allocation of 8 elements expected 1987 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(len * sizeof(int)))); 1988 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0))); 1989 } 1990 } 1991 1992 TEST(InlinedVectorTest, MinimumAllocatorCompilesUsingTraits) { 1993 using T = int; 1994 using A = std::allocator<T>; 1995 using ATraits = absl::allocator_traits<A>; 1996 1997 struct MinimumAllocator { 1998 using value_type = T; 1999 2000 value_type* allocate(size_t n) { 2001 A a; 2002 return ATraits::allocate(a, n); 2003 } 2004 2005 void deallocate(value_type* p, size_t n) { 2006 A a; 2007 ATraits::deallocate(a, p, n); 2008 } 2009 }; 2010 2011 absl::InlinedVector<T, 1, MinimumAllocator> vec; 2012 vec.emplace_back(); 2013 vec.resize(0); 2014 } 2015 2016 TEST(InlinedVectorTest, AbslHashValueWorks) { 2017 using V = absl::InlinedVector<int, 4>; 2018 std::vector<V> cases; 2019 2020 // Generate a variety of vectors some of these are small enough for the inline 2021 // space but are stored out of line. 2022 for (size_t i = 0; i < 10; ++i) { 2023 V v; 2024 for (int j = 0; j < static_cast<int>(i); ++j) { 2025 v.push_back(j); 2026 } 2027 cases.push_back(v); 2028 v.resize(i % 4); 2029 cases.push_back(v); 2030 } 2031 2032 EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(cases)); 2033 } 2034 2035 class MoveConstructibleOnlyInstance 2036 : public absl::test_internal::BaseCountedInstance { 2037 public: 2038 explicit MoveConstructibleOnlyInstance(int x) : BaseCountedInstance(x) {} 2039 MoveConstructibleOnlyInstance(MoveConstructibleOnlyInstance&& other) = 2040 default; 2041 MoveConstructibleOnlyInstance& operator=( 2042 MoveConstructibleOnlyInstance&& other) = delete; 2043 }; 2044 2045 MATCHER(HasValue, "") { 2046 return ::testing::get<0>(arg).value() == ::testing::get<1>(arg); 2047 } 2048 2049 TEST(NonAssignableMoveAssignmentTest, AllocatedToInline) { 2050 using X = MoveConstructibleOnlyInstance; 2051 InstanceTracker tracker; 2052 absl::InlinedVector<X, 2> inlined; 2053 inlined.emplace_back(1); 2054 absl::InlinedVector<X, 2> allocated; 2055 allocated.emplace_back(1); 2056 allocated.emplace_back(2); 2057 allocated.emplace_back(3); 2058 tracker.ResetCopiesMovesSwaps(); 2059 2060 inlined = std::move(allocated); 2061 // passed ownership of the allocated storage 2062 EXPECT_EQ(tracker.moves(), 0); 2063 EXPECT_EQ(tracker.live_instances(), 3); 2064 2065 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3})); 2066 } 2067 2068 TEST(NonAssignableMoveAssignmentTest, InlineToAllocated) { 2069 using X = MoveConstructibleOnlyInstance; 2070 InstanceTracker tracker; 2071 absl::InlinedVector<X, 2> inlined; 2072 inlined.emplace_back(1); 2073 absl::InlinedVector<X, 2> allocated; 2074 allocated.emplace_back(1); 2075 allocated.emplace_back(2); 2076 allocated.emplace_back(3); 2077 tracker.ResetCopiesMovesSwaps(); 2078 2079 allocated = std::move(inlined); 2080 // Moved elements 2081 EXPECT_EQ(tracker.moves(), 1); 2082 EXPECT_EQ(tracker.live_instances(), 1); 2083 2084 EXPECT_THAT(allocated, Pointwise(HasValue(), {1})); 2085 } 2086 2087 TEST(NonAssignableMoveAssignmentTest, InlineToInline) { 2088 using X = MoveConstructibleOnlyInstance; 2089 InstanceTracker tracker; 2090 absl::InlinedVector<X, 2> inlined_a; 2091 inlined_a.emplace_back(1); 2092 absl::InlinedVector<X, 2> inlined_b; 2093 inlined_b.emplace_back(1); 2094 tracker.ResetCopiesMovesSwaps(); 2095 2096 inlined_a = std::move(inlined_b); 2097 // Moved elements 2098 EXPECT_EQ(tracker.moves(), 1); 2099 EXPECT_EQ(tracker.live_instances(), 1); 2100 2101 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {1})); 2102 } 2103 2104 TEST(NonAssignableMoveAssignmentTest, AllocatedToAllocated) { 2105 using X = MoveConstructibleOnlyInstance; 2106 InstanceTracker tracker; 2107 absl::InlinedVector<X, 2> allocated_a; 2108 allocated_a.emplace_back(1); 2109 allocated_a.emplace_back(2); 2110 allocated_a.emplace_back(3); 2111 absl::InlinedVector<X, 2> allocated_b; 2112 allocated_b.emplace_back(4); 2113 allocated_b.emplace_back(5); 2114 allocated_b.emplace_back(6); 2115 allocated_b.emplace_back(7); 2116 tracker.ResetCopiesMovesSwaps(); 2117 2118 allocated_a = std::move(allocated_b); 2119 // passed ownership of the allocated storage 2120 EXPECT_EQ(tracker.moves(), 0); 2121 EXPECT_EQ(tracker.live_instances(), 4); 2122 2123 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7})); 2124 } 2125 2126 TEST(NonAssignableMoveAssignmentTest, AssignThis) { 2127 using X = MoveConstructibleOnlyInstance; 2128 InstanceTracker tracker; 2129 absl::InlinedVector<X, 2> v; 2130 v.emplace_back(1); 2131 v.emplace_back(2); 2132 v.emplace_back(3); 2133 2134 tracker.ResetCopiesMovesSwaps(); 2135 2136 // Obfuscated in order to pass -Wself-move. 2137 v = std::move(*std::addressof(v)); 2138 // nothing happens 2139 EXPECT_EQ(tracker.moves(), 0); 2140 EXPECT_EQ(tracker.live_instances(), 3); 2141 2142 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3})); 2143 } 2144 2145 class NonSwappableInstance : public absl::test_internal::BaseCountedInstance { 2146 public: 2147 explicit NonSwappableInstance(int x) : BaseCountedInstance(x) {} 2148 NonSwappableInstance(const NonSwappableInstance& other) = default; 2149 NonSwappableInstance& operator=(const NonSwappableInstance& other) = default; 2150 NonSwappableInstance(NonSwappableInstance&& other) = default; 2151 NonSwappableInstance& operator=(NonSwappableInstance&& other) = default; 2152 }; 2153 2154 void swap(NonSwappableInstance&, NonSwappableInstance&) = delete; 2155 2156 TEST(NonSwappableSwapTest, InlineAndAllocatedTransferStorageAndMove) { 2157 using X = NonSwappableInstance; 2158 InstanceTracker tracker; 2159 absl::InlinedVector<X, 2> inlined; 2160 inlined.emplace_back(1); 2161 absl::InlinedVector<X, 2> allocated; 2162 allocated.emplace_back(1); 2163 allocated.emplace_back(2); 2164 allocated.emplace_back(3); 2165 tracker.ResetCopiesMovesSwaps(); 2166 2167 inlined.swap(allocated); 2168 EXPECT_EQ(tracker.moves(), 1); 2169 EXPECT_EQ(tracker.live_instances(), 4); 2170 2171 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3})); 2172 } 2173 2174 TEST(NonSwappableSwapTest, InlineAndInlineMoveIndividualElements) { 2175 using X = NonSwappableInstance; 2176 InstanceTracker tracker; 2177 absl::InlinedVector<X, 2> inlined_a; 2178 inlined_a.emplace_back(1); 2179 absl::InlinedVector<X, 2> inlined_b; 2180 inlined_b.emplace_back(2); 2181 tracker.ResetCopiesMovesSwaps(); 2182 2183 inlined_a.swap(inlined_b); 2184 EXPECT_EQ(tracker.moves(), 3); 2185 EXPECT_EQ(tracker.live_instances(), 2); 2186 2187 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {2})); 2188 EXPECT_THAT(inlined_b, Pointwise(HasValue(), {1})); 2189 } 2190 2191 TEST(NonSwappableSwapTest, AllocatedAndAllocatedOnlyTransferStorage) { 2192 using X = NonSwappableInstance; 2193 InstanceTracker tracker; 2194 absl::InlinedVector<X, 2> allocated_a; 2195 allocated_a.emplace_back(1); 2196 allocated_a.emplace_back(2); 2197 allocated_a.emplace_back(3); 2198 absl::InlinedVector<X, 2> allocated_b; 2199 allocated_b.emplace_back(4); 2200 allocated_b.emplace_back(5); 2201 allocated_b.emplace_back(6); 2202 allocated_b.emplace_back(7); 2203 tracker.ResetCopiesMovesSwaps(); 2204 2205 allocated_a.swap(allocated_b); 2206 EXPECT_EQ(tracker.moves(), 0); 2207 EXPECT_EQ(tracker.live_instances(), 7); 2208 2209 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7})); 2210 EXPECT_THAT(allocated_b, Pointwise(HasValue(), {1, 2, 3})); 2211 } 2212 2213 TEST(NonSwappableSwapTest, SwapThis) { 2214 using X = NonSwappableInstance; 2215 InstanceTracker tracker; 2216 absl::InlinedVector<X, 2> v; 2217 v.emplace_back(1); 2218 v.emplace_back(2); 2219 v.emplace_back(3); 2220 2221 tracker.ResetCopiesMovesSwaps(); 2222 2223 v.swap(v); 2224 EXPECT_EQ(tracker.moves(), 0); 2225 EXPECT_EQ(tracker.live_instances(), 3); 2226 2227 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3})); 2228 } 2229 2230 template <size_t N> 2231 using CharVec = absl::InlinedVector<char, N>; 2232 2233 // Warning: This struct "simulates" the type `InlinedVector::Storage::Allocated` 2234 // to make reasonable expectations for inlined storage capacity optimization. If 2235 // implementation changes `Allocated`, then `MySpan` and tests that use it need 2236 // to be updated accordingly. 2237 template <typename T> 2238 struct MySpan { 2239 T* data; 2240 size_t size; 2241 }; 2242 2243 TEST(StorageTest, InlinedCapacityAutoIncrease) { 2244 // The requested capacity is auto increased to `sizeof(MySpan<char>)`. 2245 EXPECT_GT(CharVec<1>().capacity(), 1); 2246 EXPECT_EQ(CharVec<1>().capacity(), sizeof(MySpan<char>)); 2247 EXPECT_EQ(CharVec<1>().capacity(), CharVec<2>().capacity()); 2248 EXPECT_EQ(sizeof(CharVec<1>), sizeof(CharVec<2>)); 2249 2250 // The requested capacity is auto increased to 2251 // `sizeof(MySpan<int>) / sizeof(int)`. 2252 EXPECT_GT((absl::InlinedVector<int, 1>().capacity()), 1); 2253 EXPECT_EQ((absl::InlinedVector<int, 1>().capacity()), 2254 sizeof(MySpan<int>) / sizeof(int)); 2255 } 2256 2257 } // anonymous namespace