circular_deque.h (38157B)
1 // Copyright 2017 The Chromium Authors 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef BASE_CONTAINERS_CIRCULAR_DEQUE_H_ 6 #define BASE_CONTAINERS_CIRCULAR_DEQUE_H_ 7 8 #include <algorithm> 9 #include <cstddef> 10 #include <iterator> 11 #include <type_traits> 12 #include <utility> 13 14 #include "base/check.h" 15 #include "base/containers/vector_buffer.h" 16 #include "base/dcheck_is_on.h" 17 #include "base/memory/raw_ptr_exclusion.h" 18 #include "base/ranges/algorithm.h" 19 #include "base/template_util.h" 20 21 #if DCHECK_IS_ON() 22 #include <ostream> 23 #endif 24 25 // base::circular_deque is similar to std::deque. Unlike std::deque, the 26 // storage is provided in a flat circular buffer conceptually similar to a 27 // vector. The beginning and end will wrap around as necessary so that 28 // pushes and pops will be constant time as long as a capacity expansion is 29 // not required. 30 // 31 // The API should be identical to std::deque with the following differences: 32 // 33 // - ITERATORS ARE NOT STABLE. Mutating the container will invalidate all 34 // iterators. 35 // 36 // - Insertions may resize the vector and so are not constant time (std::deque 37 // guarantees constant time for insertions at the ends). 38 // 39 // - Container-wide comparisons are not implemented. If you want to compare 40 // two containers, use an algorithm so the expensive iteration is explicit. 41 // 42 // If you want a similar container with only a queue API, use base::queue in 43 // base/containers/queue.h. 44 // 45 // Constructors: 46 // circular_deque(); 47 // circular_deque(size_t count); 48 // circular_deque(size_t count, const T& value); 49 // circular_deque(InputIterator first, InputIterator last); 50 // circular_deque(const circular_deque&); 51 // circular_deque(circular_deque&&); 52 // circular_deque(std::initializer_list<value_type>); 53 // 54 // Assignment functions: 55 // circular_deque& operator=(const circular_deque&); 56 // circular_deque& operator=(circular_deque&&); 57 // circular_deque& operator=(std::initializer_list<T>); 58 // void assign(size_t count, const T& value); 59 // void assign(InputIterator first, InputIterator last); 60 // void assign(std::initializer_list<T> value); 61 // 62 // Random accessors: 63 // T& at(size_t); 64 // const T& at(size_t) const; 65 // T& operator[](size_t); 66 // const T& operator[](size_t) const; 67 // 68 // End accessors: 69 // T& front(); 70 // const T& front() const; 71 // T& back(); 72 // const T& back() const; 73 // 74 // Iterator functions: 75 // iterator begin(); 76 // const_iterator begin() const; 77 // const_iterator cbegin() const; 78 // iterator end(); 79 // const_iterator end() const; 80 // const_iterator cend() const; 81 // reverse_iterator rbegin(); 82 // const_reverse_iterator rbegin() const; 83 // const_reverse_iterator crbegin() const; 84 // reverse_iterator rend(); 85 // const_reverse_iterator rend() const; 86 // const_reverse_iterator crend() const; 87 // 88 // Memory management: 89 // void reserve(size_t); // SEE IMPLEMENTATION FOR SOME GOTCHAS. 90 // size_t capacity() const; 91 // void shrink_to_fit(); 92 // 93 // Size management: 94 // void clear(); 95 // bool empty() const; 96 // size_t size() const; 97 // void resize(size_t); 98 // void resize(size_t count, const T& value); 99 // 100 // Positional insert and erase: 101 // void insert(const_iterator pos, size_type count, const T& value); 102 // void insert(const_iterator pos, 103 // InputIterator first, InputIterator last); 104 // iterator insert(const_iterator pos, const T& value); 105 // iterator insert(const_iterator pos, T&& value); 106 // iterator emplace(const_iterator pos, Args&&... args); 107 // iterator erase(const_iterator pos); 108 // iterator erase(const_iterator first, const_iterator last); 109 // 110 // End insert and erase: 111 // void push_front(const T&); 112 // void push_front(T&&); 113 // void push_back(const T&); 114 // void push_back(T&&); 115 // T& emplace_front(Args&&...); 116 // T& emplace_back(Args&&...); 117 // void pop_front(); 118 // void pop_back(); 119 // 120 // General: 121 // void swap(circular_deque&); 122 123 namespace base { 124 125 template <class T> 126 class circular_deque; 127 128 namespace internal { 129 130 // Start allocating nonempty buffers with this many entries. This is the 131 // external capacity so the internal buffer will be one larger (= 4) which is 132 // more even for the allocator. See the descriptions of internal vs. external 133 // capacity on the comment above the buffer_ variable below. 134 constexpr size_t kCircularBufferInitialCapacity = 3; 135 136 template <typename T> 137 class circular_deque_const_iterator { 138 public: 139 using difference_type = std::ptrdiff_t; 140 using value_type = T; 141 using pointer = const T*; 142 using reference = const T&; 143 using iterator_category = std::random_access_iterator_tag; 144 145 circular_deque_const_iterator() : parent_deque_(nullptr), index_(0) { 146 #if DCHECK_IS_ON() 147 created_generation_ = 0; 148 #endif // DCHECK_IS_ON() 149 } 150 151 // Dereferencing. 152 const T& operator*() const { 153 CheckUnstableUsage(); 154 parent_deque_->CheckValidIndex(index_); 155 return parent_deque_->buffer_[index_]; 156 } 157 const T* operator->() const { 158 CheckUnstableUsage(); 159 parent_deque_->CheckValidIndex(index_); 160 return &parent_deque_->buffer_[index_]; 161 } 162 const value_type& operator[](difference_type i) const { return *(*this + i); } 163 164 // Increment and decrement. 165 circular_deque_const_iterator& operator++() { 166 Increment(); 167 return *this; 168 } 169 circular_deque_const_iterator operator++(int) { 170 circular_deque_const_iterator ret = *this; 171 Increment(); 172 return ret; 173 } 174 circular_deque_const_iterator& operator--() { 175 Decrement(); 176 return *this; 177 } 178 circular_deque_const_iterator operator--(int) { 179 circular_deque_const_iterator ret = *this; 180 Decrement(); 181 return ret; 182 } 183 184 // Random access mutation. 185 friend circular_deque_const_iterator operator+( 186 const circular_deque_const_iterator& iter, 187 difference_type offset) { 188 circular_deque_const_iterator ret = iter; 189 ret.Add(offset); 190 return ret; 191 } 192 circular_deque_const_iterator& operator+=(difference_type offset) { 193 Add(offset); 194 return *this; 195 } 196 friend circular_deque_const_iterator operator-( 197 const circular_deque_const_iterator& iter, 198 difference_type offset) { 199 circular_deque_const_iterator ret = iter; 200 ret.Add(-offset); 201 return ret; 202 } 203 circular_deque_const_iterator& operator-=(difference_type offset) { 204 Add(-offset); 205 return *this; 206 } 207 208 friend std::ptrdiff_t operator-(const circular_deque_const_iterator& lhs, 209 const circular_deque_const_iterator& rhs) { 210 lhs.CheckComparable(rhs); 211 return static_cast<std::ptrdiff_t>(lhs.OffsetFromBegin() - 212 rhs.OffsetFromBegin()); 213 } 214 215 // Comparisons. 216 friend bool operator==(const circular_deque_const_iterator& lhs, 217 const circular_deque_const_iterator& rhs) { 218 lhs.CheckComparable(rhs); 219 return lhs.index_ == rhs.index_; 220 } 221 friend bool operator!=(const circular_deque_const_iterator& lhs, 222 const circular_deque_const_iterator& rhs) { 223 return !(lhs == rhs); 224 } 225 friend bool operator<(const circular_deque_const_iterator& lhs, 226 const circular_deque_const_iterator& rhs) { 227 lhs.CheckComparable(rhs); 228 return lhs.OffsetFromBegin() < rhs.OffsetFromBegin(); 229 } 230 friend bool operator<=(const circular_deque_const_iterator& lhs, 231 const circular_deque_const_iterator& rhs) { 232 return !(lhs > rhs); 233 } 234 friend bool operator>(const circular_deque_const_iterator& lhs, 235 const circular_deque_const_iterator& rhs) { 236 lhs.CheckComparable(rhs); 237 return lhs.OffsetFromBegin() > rhs.OffsetFromBegin(); 238 } 239 friend bool operator>=(const circular_deque_const_iterator& lhs, 240 const circular_deque_const_iterator& rhs) { 241 return !(lhs < rhs); 242 } 243 244 protected: 245 friend class circular_deque<T>; 246 247 circular_deque_const_iterator(const circular_deque<T>* parent, size_t index) 248 : parent_deque_(parent), index_(index) { 249 #if DCHECK_IS_ON() 250 created_generation_ = parent->generation_; 251 #endif // DCHECK_IS_ON() 252 } 253 254 // Returns the offset from the beginning index of the buffer to the current 255 // item. 256 size_t OffsetFromBegin() const { 257 if (index_ >= parent_deque_->begin_) 258 return index_ - parent_deque_->begin_; // On the same side as begin. 259 return parent_deque_->buffer_.capacity() - parent_deque_->begin_ + index_; 260 } 261 262 // Most uses will be ++ and -- so use a simplified implementation. 263 void Increment() { 264 CheckUnstableUsage(); 265 parent_deque_->CheckValidIndex(index_); 266 index_++; 267 if (index_ == parent_deque_->buffer_.capacity()) 268 index_ = 0; 269 } 270 void Decrement() { 271 CheckUnstableUsage(); 272 parent_deque_->CheckValidIndexOrEnd(index_); 273 if (index_ == 0) 274 index_ = parent_deque_->buffer_.capacity() - 1; 275 else 276 index_--; 277 } 278 void Add(difference_type delta) { 279 CheckUnstableUsage(); 280 #if DCHECK_IS_ON() 281 if (delta <= 0) 282 parent_deque_->CheckValidIndexOrEnd(index_); 283 else 284 parent_deque_->CheckValidIndex(index_); 285 #endif 286 // It should be valid to add 0 to any iterator, even if the container is 287 // empty and the iterator points to end(). The modulo below will divide 288 // by 0 if the buffer capacity is empty, so it's important to check for 289 // this case explicitly. 290 if (delta == 0) 291 return; 292 293 difference_type new_offset = OffsetFromBegin() + delta; 294 DCHECK(new_offset >= 0 && 295 new_offset <= static_cast<difference_type>(parent_deque_->size())); 296 index_ = (new_offset + parent_deque_->begin_) % 297 parent_deque_->buffer_.capacity(); 298 } 299 300 #if DCHECK_IS_ON() 301 void CheckUnstableUsage() const { 302 DCHECK(parent_deque_); 303 // Since circular_deque doesn't guarantee stability, any attempt to 304 // dereference this iterator after a mutation (i.e. the generation doesn't 305 // match the original) in the container is illegal. 306 DCHECK(created_generation_ == parent_deque_->generation_) 307 << "circular_deque iterator dereferenced after mutation."; 308 } 309 void CheckComparable(const circular_deque_const_iterator& other) const { 310 DCHECK(parent_deque_ == other.parent_deque_); 311 // Since circular_deque doesn't guarantee stability, two iterators that 312 // are compared must have been generated without mutating the container. 313 // If this fires, the container was mutated between generating the two 314 // iterators being compared. 315 DCHECK(created_generation_ == other.created_generation_); 316 } 317 #else 318 inline void CheckUnstableUsage() const {} 319 inline void CheckComparable(const circular_deque_const_iterator&) const {} 320 #endif // DCHECK_IS_ON() 321 322 // `parent_deque_` is not a raw_ptr<...> for performance reasons: Usually 323 // on-stack pointer, pointing back to the collection being iterated, owned by 324 // object that iterates over it. Additionally this is supported by the 325 // analysis of sampling profiler data and tab_search:top100:2020. 326 RAW_PTR_EXCLUSION const circular_deque<T>* parent_deque_; 327 328 size_t index_; 329 330 #if DCHECK_IS_ON() 331 // The generation of the parent deque when this iterator was created. The 332 // container will update the generation for every modification so we can 333 // test if the container was modified by comparing them. 334 uint64_t created_generation_; 335 #endif // DCHECK_IS_ON() 336 }; 337 338 template <typename T> 339 class circular_deque_iterator : public circular_deque_const_iterator<T> { 340 using base = circular_deque_const_iterator<T>; 341 342 public: 343 friend class circular_deque<T>; 344 345 using difference_type = std::ptrdiff_t; 346 using value_type = T; 347 using pointer = T*; 348 using reference = T&; 349 using iterator_category = std::random_access_iterator_tag; 350 351 // Expose the base class' constructor. 352 circular_deque_iterator() : circular_deque_const_iterator<T>() {} 353 354 // Dereferencing. 355 T& operator*() const { return const_cast<T&>(base::operator*()); } 356 T* operator->() const { return const_cast<T*>(base::operator->()); } 357 T& operator[](difference_type i) { 358 return const_cast<T&>(base::operator[](i)); 359 } 360 361 // Random access mutation. 362 friend circular_deque_iterator operator+(const circular_deque_iterator& iter, 363 difference_type offset) { 364 circular_deque_iterator ret = iter; 365 ret.Add(offset); 366 return ret; 367 } 368 circular_deque_iterator& operator+=(difference_type offset) { 369 base::Add(offset); 370 return *this; 371 } 372 friend circular_deque_iterator operator-(const circular_deque_iterator& iter, 373 difference_type offset) { 374 circular_deque_iterator ret = iter; 375 ret.Add(-offset); 376 return ret; 377 } 378 circular_deque_iterator& operator-=(difference_type offset) { 379 base::Add(-offset); 380 return *this; 381 } 382 383 // Increment and decrement. 384 circular_deque_iterator& operator++() { 385 base::Increment(); 386 return *this; 387 } 388 circular_deque_iterator operator++(int) { 389 circular_deque_iterator ret = *this; 390 base::Increment(); 391 return ret; 392 } 393 circular_deque_iterator& operator--() { 394 base::Decrement(); 395 return *this; 396 } 397 circular_deque_iterator operator--(int) { 398 circular_deque_iterator ret = *this; 399 base::Decrement(); 400 return ret; 401 } 402 403 private: 404 circular_deque_iterator(const circular_deque<T>* parent, size_t index) 405 : circular_deque_const_iterator<T>(parent, index) {} 406 }; 407 408 } // namespace internal 409 410 template <typename T> 411 class circular_deque { 412 private: 413 using VectorBuffer = internal::VectorBuffer<T>; 414 415 public: 416 using value_type = T; 417 using size_type = std::size_t; 418 using difference_type = std::ptrdiff_t; 419 using reference = value_type&; 420 using const_reference = const value_type&; 421 using pointer = value_type*; 422 using const_pointer = const value_type*; 423 424 using iterator = internal::circular_deque_iterator<T>; 425 using const_iterator = internal::circular_deque_const_iterator<T>; 426 using reverse_iterator = std::reverse_iterator<iterator>; 427 using const_reverse_iterator = std::reverse_iterator<const_iterator>; 428 429 // --------------------------------------------------------------------------- 430 // Constructor 431 432 constexpr circular_deque() = default; 433 434 // Constructs with |count| copies of |value| or default constructed version. 435 explicit circular_deque(size_type count) { resize(count); } 436 circular_deque(size_type count, const T& value) { resize(count, value); } 437 438 // Range constructor. 439 template <class InputIterator> 440 circular_deque(InputIterator first, InputIterator last) { 441 assign(first, last); 442 } 443 444 // Copy/move. 445 circular_deque(const circular_deque& other) : buffer_(other.size() + 1) { 446 assign(other.begin(), other.end()); 447 } 448 circular_deque(circular_deque&& other) noexcept 449 : buffer_(std::move(other.buffer_)), 450 begin_(other.begin_), 451 end_(other.end_) { 452 other.begin_ = 0; 453 other.end_ = 0; 454 } 455 456 circular_deque(std::initializer_list<value_type> init) { assign(init); } 457 458 ~circular_deque() { DestructRange(begin_, end_); } 459 460 // --------------------------------------------------------------------------- 461 // Assignments. 462 // 463 // All of these may invalidate iterators and references. 464 465 circular_deque& operator=(const circular_deque& other) { 466 if (&other == this) 467 return *this; 468 469 reserve(other.size()); 470 assign(other.begin(), other.end()); 471 return *this; 472 } 473 circular_deque& operator=(circular_deque&& other) noexcept { 474 if (&other == this) 475 return *this; 476 477 // We're about to overwrite the buffer, so don't free it in clear to 478 // avoid doing it twice. 479 ClearRetainCapacity(); 480 buffer_ = std::move(other.buffer_); 481 begin_ = other.begin_; 482 end_ = other.end_; 483 484 other.begin_ = 0; 485 other.end_ = 0; 486 487 IncrementGeneration(); 488 return *this; 489 } 490 circular_deque& operator=(std::initializer_list<value_type> ilist) { 491 reserve(ilist.size()); 492 assign(std::begin(ilist), std::end(ilist)); 493 return *this; 494 } 495 496 void assign(size_type count, const value_type& value) { 497 ClearRetainCapacity(); 498 reserve(count); 499 for (size_t i = 0; i < count; i++) 500 emplace_back(value); 501 IncrementGeneration(); 502 } 503 504 // This variant should be enabled only when InputIterator is an iterator. 505 template <typename InputIterator> 506 std::enable_if_t<::base::internal::is_iterator<InputIterator>::value, void> 507 assign(InputIterator first, InputIterator last) { 508 // Possible future enhancement, dispatch on iterator tag type. For forward 509 // iterators we can use std::difference to preallocate the space required 510 // and only do one copy. 511 ClearRetainCapacity(); 512 for (; first != last; ++first) 513 emplace_back(*first); 514 IncrementGeneration(); 515 } 516 517 void assign(std::initializer_list<value_type> value) { 518 reserve(std::distance(value.begin(), value.end())); 519 assign(value.begin(), value.end()); 520 } 521 522 // --------------------------------------------------------------------------- 523 // Accessors. 524 // 525 // Since this class assumes no exceptions, at() and operator[] are equivalent. 526 527 const value_type& at(size_type i) const { 528 DCHECK(i < size()); 529 size_t right_size = buffer_.capacity() - begin_; 530 if (begin_ <= end_ || i < right_size) 531 return buffer_[begin_ + i]; 532 return buffer_[i - right_size]; 533 } 534 value_type& at(size_type i) { 535 return const_cast<value_type&>(std::as_const(*this).at(i)); 536 } 537 538 value_type& operator[](size_type i) { 539 return const_cast<value_type&>(std::as_const(*this)[i]); 540 } 541 542 const value_type& operator[](size_type i) const { return at(i); } 543 544 value_type& front() { 545 DCHECK(!empty()); 546 return buffer_[begin_]; 547 } 548 const value_type& front() const { 549 DCHECK(!empty()); 550 return buffer_[begin_]; 551 } 552 553 value_type& back() { 554 DCHECK(!empty()); 555 return *(--end()); 556 } 557 const value_type& back() const { 558 DCHECK(!empty()); 559 return *(--end()); 560 } 561 562 // --------------------------------------------------------------------------- 563 // Iterators. 564 565 iterator begin() { return iterator(this, begin_); } 566 const_iterator begin() const { return const_iterator(this, begin_); } 567 const_iterator cbegin() const { return const_iterator(this, begin_); } 568 569 iterator end() { return iterator(this, end_); } 570 const_iterator end() const { return const_iterator(this, end_); } 571 const_iterator cend() const { return const_iterator(this, end_); } 572 573 reverse_iterator rbegin() { return reverse_iterator(end()); } 574 const_reverse_iterator rbegin() const { 575 return const_reverse_iterator(end()); 576 } 577 const_reverse_iterator crbegin() const { return rbegin(); } 578 579 reverse_iterator rend() { return reverse_iterator(begin()); } 580 const_reverse_iterator rend() const { 581 return const_reverse_iterator(begin()); 582 } 583 const_reverse_iterator crend() const { return rend(); } 584 585 // --------------------------------------------------------------------------- 586 // Memory management. 587 588 // IMPORTANT NOTE ON reserve(...): This class implements auto-shrinking of 589 // the buffer when elements are deleted and there is "too much" wasted space. 590 // So if you call reserve() with a large size in anticipation of pushing many 591 // elements, but pop an element before the queue is full, the capacity you 592 // reserved may be lost. 593 // 594 // As a result, it's only worthwhile to call reserve() when you're adding 595 // many things at once with no intermediate operations. 596 void reserve(size_type new_capacity) { 597 if (new_capacity > capacity()) 598 SetCapacityTo(new_capacity); 599 } 600 601 size_type capacity() const { 602 // One item is wasted to indicate end(). 603 return buffer_.capacity() == 0 ? 0 : buffer_.capacity() - 1; 604 } 605 606 void shrink_to_fit() { 607 if (empty()) { 608 // Optimize empty case to really delete everything if there was 609 // something. 610 if (buffer_.capacity()) 611 buffer_ = VectorBuffer(); 612 } else { 613 SetCapacityTo(size()); 614 } 615 } 616 617 // --------------------------------------------------------------------------- 618 // Size management. 619 620 // This will additionally reset the capacity() to 0. 621 void clear() { 622 // This can't resize(0) because that requires a default constructor to 623 // compile, which not all contained classes may implement. 624 ClearRetainCapacity(); 625 buffer_ = VectorBuffer(); 626 } 627 628 bool empty() const { return begin_ == end_; } 629 630 size_type size() const { 631 if (begin_ <= end_) 632 return end_ - begin_; 633 return buffer_.capacity() - begin_ + end_; 634 } 635 636 // When reducing size, the elements are deleted from the end. When expanding 637 // size, elements are added to the end with |value| or the default 638 // constructed version. Even when using resize(count) to shrink, a default 639 // constructor is required for the code to compile, even though it will not 640 // be called. 641 // 642 // There are two versions rather than using a default value to avoid 643 // creating a temporary when shrinking (when it's not needed). Plus if 644 // the default constructor is desired when expanding usually just calling it 645 // for each element is faster than making a default-constructed temporary and 646 // copying it. 647 void resize(size_type count) { 648 // SEE BELOW VERSION if you change this. The code is mostly the same. 649 if (count > size()) { 650 // This could be slighly more efficient but expanding a queue with 651 // identical elements is unusual and the extra computations of emplacing 652 // one-by-one will typically be small relative to calling the constructor 653 // for every item. 654 ExpandCapacityIfNecessary(count - size()); 655 while (size() < count) 656 emplace_back(); 657 } else if (count < size()) { 658 size_t new_end = (begin_ + count) % buffer_.capacity(); 659 DestructRange(new_end, end_); 660 end_ = new_end; 661 662 ShrinkCapacityIfNecessary(); 663 } 664 IncrementGeneration(); 665 } 666 void resize(size_type count, const value_type& value) { 667 // SEE ABOVE VERSION if you change this. The code is mostly the same. 668 if (count > size()) { 669 ExpandCapacityIfNecessary(count - size()); 670 while (size() < count) 671 emplace_back(value); 672 } else if (count < size()) { 673 size_t new_end = (begin_ + count) % buffer_.capacity(); 674 DestructRange(new_end, end_); 675 end_ = new_end; 676 677 ShrinkCapacityIfNecessary(); 678 } 679 IncrementGeneration(); 680 } 681 682 // --------------------------------------------------------------------------- 683 // Insert and erase. 684 // 685 // Insertion and deletion in the middle is O(n) and invalidates all existing 686 // iterators. 687 // 688 // The implementation of insert isn't optimized as much as it could be. If 689 // the insertion requires that the buffer be grown, it will first be grown 690 // and everything moved, and then the items will be inserted, potentially 691 // moving some items twice. This simplifies the implemntation substantially 692 // and means less generated templatized code. Since this is an uncommon 693 // operation for deques, and already relatively slow, it doesn't seem worth 694 // the benefit to optimize this. 695 696 void insert(const_iterator pos, size_type count, const T& value) { 697 ValidateIterator(pos); 698 699 // Optimize insert at the beginning. 700 if (pos == begin()) { 701 ExpandCapacityIfNecessary(count); 702 for (size_t i = 0; i < count; i++) 703 push_front(value); 704 return; 705 } 706 707 iterator insert_cur(this, pos.index_); 708 iterator insert_end; 709 MakeRoomFor(count, &insert_cur, &insert_end); 710 while (insert_cur < insert_end) { 711 new (&buffer_[insert_cur.index_]) T(value); 712 ++insert_cur; 713 } 714 715 IncrementGeneration(); 716 } 717 718 // This enable_if keeps this call from getting confused with the (pos, count, 719 // value) version when value is an integer. 720 template <class InputIterator> 721 std::enable_if_t<::base::internal::is_iterator<InputIterator>::value, void> 722 insert(const_iterator pos, InputIterator first, InputIterator last) { 723 ValidateIterator(pos); 724 725 const difference_type inserted_items_signed = std::distance(first, last); 726 if (inserted_items_signed == 0) 727 return; // Can divide by 0 when doing modulo below, so return early. 728 CHECK(inserted_items_signed > 0); 729 const size_type inserted_items = 730 static_cast<size_type>(inserted_items_signed); 731 732 // Make a hole to copy the items into. 733 iterator insert_cur; 734 iterator insert_end; 735 if (pos == begin()) { 736 // Optimize insert at the beginning, nothing needs to be shifted and the 737 // hole is the |inserted_items| block immediately before |begin_|. 738 ExpandCapacityIfNecessary(inserted_items); 739 insert_end = begin(); 740 begin_ = 741 (begin_ + buffer_.capacity() - inserted_items) % buffer_.capacity(); 742 insert_cur = begin(); 743 } else { 744 insert_cur = iterator(this, pos.index_); 745 MakeRoomFor(inserted_items, &insert_cur, &insert_end); 746 } 747 748 // Copy the items. 749 while (insert_cur < insert_end) { 750 new (&buffer_[insert_cur.index_]) T(*first); 751 ++insert_cur; 752 ++first; 753 } 754 755 IncrementGeneration(); 756 } 757 758 // These all return an iterator to the inserted item. Existing iterators will 759 // be invalidated. 760 iterator insert(const_iterator pos, const T& value) { 761 return emplace(pos, value); 762 } 763 iterator insert(const_iterator pos, T&& value) { 764 return emplace(pos, std::move(value)); 765 } 766 template <class... Args> 767 iterator emplace(const_iterator pos, Args&&... args) { 768 ValidateIterator(pos); 769 770 // Optimize insert at beginning which doesn't require shifting. 771 if (pos == cbegin()) { 772 emplace_front(std::forward<Args>(args)...); 773 return begin(); 774 } 775 776 // Do this before we make the new iterators we return. 777 IncrementGeneration(); 778 779 iterator insert_begin(this, pos.index_); 780 iterator insert_end; 781 MakeRoomFor(1, &insert_begin, &insert_end); 782 new (&buffer_[insert_begin.index_]) T(std::forward<Args>(args)...); 783 784 return insert_begin; 785 } 786 787 // Calling erase() won't automatically resize the buffer smaller like resize 788 // or the pop functions. Erase is slow and relatively uncommon, and for 789 // normal deque usage a pop will normally be done on a regular basis that 790 // will prevent excessive buffer usage over long periods of time. It's not 791 // worth having the extra code for every template instantiation of erase() 792 // to resize capacity downward to a new buffer. 793 iterator erase(const_iterator pos) { return erase(pos, pos + 1); } 794 iterator erase(const_iterator first, const_iterator last) { 795 ValidateIterator(first); 796 ValidateIterator(last); 797 798 IncrementGeneration(); 799 800 // First, call the destructor on the deleted items. 801 if (first.index_ == last.index_) { 802 // Nothing deleted. Need to return early to avoid falling through to 803 // moving items on top of themselves. 804 return iterator(this, first.index_); 805 } else if (first.index_ < last.index_) { 806 // Contiguous range. 807 buffer_.DestructRange(&buffer_[first.index_], &buffer_[last.index_]); 808 } else { 809 // Deleted range wraps around. 810 buffer_.DestructRange(&buffer_[first.index_], 811 &buffer_[buffer_.capacity()]); 812 buffer_.DestructRange(&buffer_[0], &buffer_[last.index_]); 813 } 814 815 if (first.index_ == begin_) { 816 // This deletion is from the beginning. Nothing needs to be copied, only 817 // begin_ needs to be updated. 818 begin_ = last.index_; 819 return iterator(this, last.index_); 820 } 821 822 // In an erase operation, the shifted items all move logically to the left, 823 // so move them from left-to-right. 824 iterator move_src(this, last.index_); 825 iterator move_src_end = end(); 826 iterator move_dest(this, first.index_); 827 for (; move_src < move_src_end; move_src++, move_dest++) { 828 buffer_.MoveRange(&buffer_[move_src.index_], 829 &buffer_[move_src.index_ + 1], 830 &buffer_[move_dest.index_]); 831 } 832 833 end_ = move_dest.index_; 834 835 // Since we did not reallocate and only changed things after the erase 836 // element(s), the input iterator's index points to the thing following the 837 // deletion. 838 return iterator(this, first.index_); 839 } 840 841 // --------------------------------------------------------------------------- 842 // Begin/end operations. 843 844 void push_front(const T& value) { emplace_front(value); } 845 void push_front(T&& value) { emplace_front(std::move(value)); } 846 847 void push_back(const T& value) { emplace_back(value); } 848 void push_back(T&& value) { emplace_back(std::move(value)); } 849 850 template <class... Args> 851 reference emplace_front(Args&&... args) { 852 ExpandCapacityIfNecessary(1); 853 if (begin_ == 0) 854 begin_ = buffer_.capacity() - 1; 855 else 856 begin_--; 857 IncrementGeneration(); 858 new (&buffer_[begin_]) T(std::forward<Args>(args)...); 859 return front(); 860 } 861 862 template <class... Args> 863 reference emplace_back(Args&&... args) { 864 ExpandCapacityIfNecessary(1); 865 new (&buffer_[end_]) T(std::forward<Args>(args)...); 866 if (end_ == buffer_.capacity() - 1) 867 end_ = 0; 868 else 869 end_++; 870 IncrementGeneration(); 871 return back(); 872 } 873 874 void pop_front() { 875 DCHECK(size()); 876 buffer_.DestructRange(&buffer_[begin_], &buffer_[begin_ + 1]); 877 begin_++; 878 if (begin_ == buffer_.capacity()) 879 begin_ = 0; 880 881 ShrinkCapacityIfNecessary(); 882 883 // Technically popping will not invalidate any iterators since the 884 // underlying buffer will be stable. But in the future we may want to add a 885 // feature that resizes the buffer smaller if there is too much wasted 886 // space. This ensures we can make such a change safely. 887 IncrementGeneration(); 888 } 889 void pop_back() { 890 DCHECK(size()); 891 if (end_ == 0) 892 end_ = buffer_.capacity() - 1; 893 else 894 end_--; 895 buffer_.DestructRange(&buffer_[end_], &buffer_[end_ + 1]); 896 897 ShrinkCapacityIfNecessary(); 898 899 // See pop_front comment about why this is here. 900 IncrementGeneration(); 901 } 902 903 // --------------------------------------------------------------------------- 904 // General operations. 905 906 void swap(circular_deque& other) { 907 std::swap(buffer_, other.buffer_); 908 std::swap(begin_, other.begin_); 909 std::swap(end_, other.end_); 910 IncrementGeneration(); 911 } 912 913 friend void swap(circular_deque& lhs, circular_deque& rhs) { lhs.swap(rhs); } 914 915 private: 916 friend internal::circular_deque_iterator<T>; 917 friend internal::circular_deque_const_iterator<T>; 918 919 // Moves the items in the given circular buffer to the current one. The 920 // source is moved from so will become invalid. The destination buffer must 921 // have already been allocated with enough size. 922 static void MoveBuffer(VectorBuffer& from_buf, 923 size_t from_begin, 924 size_t from_end, 925 VectorBuffer* to_buf, 926 size_t* to_begin, 927 size_t* to_end) { 928 size_t from_capacity = from_buf.capacity(); 929 930 *to_begin = 0; 931 if (from_begin < from_end) { 932 // Contiguous. 933 from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_end], 934 to_buf->begin()); 935 *to_end = from_end - from_begin; 936 } else if (from_begin > from_end) { 937 // Discontiguous, copy the right side to the beginning of the new buffer. 938 from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_capacity], 939 to_buf->begin()); 940 size_t right_size = from_capacity - from_begin; 941 // Append the left side. 942 from_buf.MoveRange(&from_buf[0], &from_buf[from_end], 943 &(*to_buf)[right_size]); 944 *to_end = right_size + from_end; 945 } else { 946 // No items. 947 *to_end = 0; 948 } 949 } 950 951 // Expands the buffer size. This assumes the size is larger than the 952 // number of elements in the vector (it won't call delete on anything). 953 void SetCapacityTo(size_t new_capacity) { 954 // Use the capacity + 1 as the internal buffer size to differentiate 955 // empty and full (see definition of buffer_ below). 956 VectorBuffer new_buffer(new_capacity + 1); 957 MoveBuffer(buffer_, begin_, end_, &new_buffer, &begin_, &end_); 958 buffer_ = std::move(new_buffer); 959 } 960 void ExpandCapacityIfNecessary(size_t additional_elts) { 961 size_t min_new_capacity = size() + additional_elts; 962 if (capacity() >= min_new_capacity) 963 return; // Already enough room. 964 965 min_new_capacity = 966 std::max(min_new_capacity, internal::kCircularBufferInitialCapacity); 967 968 // std::vector always grows by at least 50%. WTF::Deque grows by at least 969 // 25%. We expect queue workloads to generally stay at a similar size and 970 // grow less than a vector might, so use 25%. 971 size_t new_capacity = 972 std::max(min_new_capacity, capacity() + capacity() / 4); 973 SetCapacityTo(new_capacity); 974 } 975 976 void ShrinkCapacityIfNecessary() { 977 // Don't auto-shrink below this size. 978 if (capacity() <= internal::kCircularBufferInitialCapacity) 979 return; 980 981 // Shrink when 100% of the size() is wasted. 982 size_t sz = size(); 983 size_t empty_spaces = capacity() - sz; 984 if (empty_spaces < sz) 985 return; 986 987 // Leave 1/4 the size as free capacity, not going below the initial 988 // capacity. 989 size_t new_capacity = 990 std::max(internal::kCircularBufferInitialCapacity, sz + sz / 4); 991 if (new_capacity < capacity()) { 992 // Count extra item to convert to internal capacity. 993 SetCapacityTo(new_capacity); 994 } 995 } 996 997 // Backend for clear() but does not resize the internal buffer. 998 void ClearRetainCapacity() { 999 // This can't resize(0) because that requires a default constructor to 1000 // compile, which not all contained classes may implement. 1001 DestructRange(begin_, end_); 1002 begin_ = 0; 1003 end_ = 0; 1004 IncrementGeneration(); 1005 } 1006 1007 // Calls destructors for the given begin->end indices. The indices may wrap 1008 // around. The buffer is not resized, and the begin_ and end_ members are 1009 // not changed. 1010 void DestructRange(size_t begin, size_t end) { 1011 if (end == begin) { 1012 return; 1013 } else if (end > begin) { 1014 buffer_.DestructRange(&buffer_[begin], &buffer_[end]); 1015 } else { 1016 buffer_.DestructRange(&buffer_[begin], &buffer_[buffer_.capacity()]); 1017 buffer_.DestructRange(&buffer_[0], &buffer_[end]); 1018 } 1019 } 1020 1021 // Makes room for |count| items starting at |*insert_begin|. Since iterators 1022 // are not stable across buffer resizes, |*insert_begin| will be updated to 1023 // point to the beginning of the newly opened position in the new array (it's 1024 // in/out), and the end of the newly opened position (it's out-only). 1025 void MakeRoomFor(size_t count, iterator* insert_begin, iterator* insert_end) { 1026 if (count == 0) { 1027 *insert_end = *insert_begin; 1028 return; 1029 } 1030 1031 // The offset from the beginning will be stable across reallocations. 1032 size_t begin_offset = insert_begin->OffsetFromBegin(); 1033 ExpandCapacityIfNecessary(count); 1034 1035 insert_begin->index_ = (begin_ + begin_offset) % buffer_.capacity(); 1036 *insert_end = 1037 iterator(this, (insert_begin->index_ + count) % buffer_.capacity()); 1038 1039 // Update the new end and prepare the iterators for copying. 1040 iterator src = end(); 1041 end_ = (end_ + count) % buffer_.capacity(); 1042 iterator dest = end(); 1043 1044 // Move the elements. This will always involve shifting logically to the 1045 // right, so move in a right-to-left order. 1046 while (true) { 1047 if (src == *insert_begin) 1048 break; 1049 --src; 1050 --dest; 1051 buffer_.MoveRange(&buffer_[src.index_], &buffer_[src.index_ + 1], 1052 &buffer_[dest.index_]); 1053 } 1054 } 1055 1056 #if DCHECK_IS_ON() 1057 // Asserts the given index is dereferencable. The index is an index into the 1058 // buffer, not an index used by operator[] or at() which will be offsets from 1059 // begin. 1060 void CheckValidIndex(size_t i) const { 1061 if (begin_ <= end_) 1062 DCHECK(i >= begin_ && i < end_); 1063 else 1064 DCHECK((i >= begin_ && i < buffer_.capacity()) || i < end_); 1065 } 1066 1067 // Asserts the given index is either dereferencable or points to end(). 1068 void CheckValidIndexOrEnd(size_t i) const { 1069 if (i != end_) 1070 CheckValidIndex(i); 1071 } 1072 1073 void ValidateIterator(const const_iterator& i) const { 1074 DCHECK(i.parent_deque_ == this); 1075 i.CheckUnstableUsage(); 1076 } 1077 1078 // See generation_ below. 1079 void IncrementGeneration() { generation_++; } 1080 #else 1081 // No-op versions of these functions for release builds. 1082 void CheckValidIndex(size_t) const {} 1083 void CheckValidIndexOrEnd(size_t) const {} 1084 void ValidateIterator(const const_iterator& i) const {} 1085 void IncrementGeneration() {} 1086 #endif 1087 1088 // Danger, the buffer_.capacity() is the "internal capacity" which is 1089 // capacity() + 1 since there is an extra item to indicate the end. Otherwise 1090 // being completely empty and completely full are indistinguishable (begin == 1091 // end). We could add a separate flag to avoid it, but that adds significant 1092 // extra complexity since every computation will have to check for it. Always 1093 // keeping one extra unused element in the buffer makes iterator computations 1094 // much simpler. 1095 // 1096 // Container internal code will want to use buffer_.capacity() for offset 1097 // computations rather than capacity(). 1098 VectorBuffer buffer_; 1099 size_type begin_ = 0; 1100 size_type end_ = 0; 1101 1102 #if DCHECK_IS_ON() 1103 // Incremented every time a modification is made that could affect iterator 1104 // invalidations. 1105 uint64_t generation_ = 0; 1106 #endif 1107 }; 1108 1109 // Implementations of base::Erase[If] (see base/stl_util.h). 1110 template <class T, class Value> 1111 size_t Erase(circular_deque<T>& container, const Value& value) { 1112 auto it = ranges::remove(container, value); 1113 size_t removed = std::distance(it, container.end()); 1114 container.erase(it, container.end()); 1115 return removed; 1116 } 1117 1118 template <class T, class Predicate> 1119 size_t EraseIf(circular_deque<T>& container, Predicate pred) { 1120 auto it = ranges::remove_if(container, pred); 1121 size_t removed = std::distance(it, container.end()); 1122 container.erase(it, container.end()); 1123 return removed; 1124 } 1125 1126 } // namespace base 1127 1128 #endif // BASE_CONTAINERS_CIRCULAR_DEQUE_H_