tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

btree.h (119548B)


      1 // Copyright 2018 The Abseil Authors.
      2 //
      3 // Licensed under the Apache License, Version 2.0 (the "License");
      4 // you may not use this file except in compliance with the License.
      5 // You may obtain a copy of the License at
      6 //
      7 //      https://www.apache.org/licenses/LICENSE-2.0
      8 //
      9 // Unless required by applicable law or agreed to in writing, software
     10 // distributed under the License is distributed on an "AS IS" BASIS,
     11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     12 // See the License for the specific language governing permissions and
     13 // limitations under the License.
     14 
     15 // A btree implementation of the STL set and map interfaces. A btree is smaller
     16 // and generally also faster than STL set/map (refer to the benchmarks below).
     17 // The red-black tree implementation of STL set/map has an overhead of 3
     18 // pointers (left, right and parent) plus the node color information for each
     19 // stored value. So a set<int32_t> consumes 40 bytes for each value stored in
     20 // 64-bit mode. This btree implementation stores multiple values on fixed
     21 // size nodes (usually 256 bytes) and doesn't store child pointers for leaf
     22 // nodes. The result is that a btree_set<int32_t> may use much less memory per
     23 // stored value. For the random insertion benchmark in btree_bench.cc, a
     24 // btree_set<int32_t> with node-size of 256 uses 5.1 bytes per stored value.
     25 //
     26 // The packing of multiple values on to each node of a btree has another effect
     27 // besides better space utilization: better cache locality due to fewer cache
     28 // lines being accessed. Better cache locality translates into faster
     29 // operations.
     30 //
     31 // CAVEATS
     32 //
     33 // Insertions and deletions on a btree can cause splitting, merging or
     34 // rebalancing of btree nodes. And even without these operations, insertions
     35 // and deletions on a btree will move values around within a node. In both
     36 // cases, the result is that insertions and deletions can invalidate iterators
     37 // pointing to values other than the one being inserted/deleted. Therefore, this
     38 // container does not provide pointer stability. This is notably different from
     39 // STL set/map which takes care to not invalidate iterators on insert/erase
     40 // except, of course, for iterators pointing to the value being erased.  A
     41 // partial workaround when erasing is available: erase() returns an iterator
     42 // pointing to the item just after the one that was erased (or end() if none
     43 // exists).
     44 
     45 #ifndef ABSL_CONTAINER_INTERNAL_BTREE_H_
     46 #define ABSL_CONTAINER_INTERNAL_BTREE_H_
     47 
     48 #include <algorithm>
     49 #include <cassert>
     50 #include <cstddef>
     51 #include <cstdint>
     52 #include <cstring>
     53 #include <functional>
     54 #include <iterator>
     55 #include <limits>
     56 #include <string>
     57 #include <type_traits>
     58 #include <utility>
     59 
     60 #include "absl/base/config.h"
     61 #include "absl/base/internal/raw_logging.h"
     62 #include "absl/base/macros.h"
     63 #include "absl/base/optimization.h"
     64 #include "absl/container/internal/common.h"
     65 #include "absl/container/internal/common_policy_traits.h"
     66 #include "absl/container/internal/compressed_tuple.h"
     67 #include "absl/container/internal/container_memory.h"
     68 #include "absl/container/internal/layout.h"
     69 #include "absl/memory/memory.h"
     70 #include "absl/meta/type_traits.h"
     71 #include "absl/strings/cord.h"
     72 #include "absl/strings/string_view.h"
     73 #include "absl/types/compare.h"
     74 
     75 namespace absl {
     76 ABSL_NAMESPACE_BEGIN
     77 namespace container_internal {
     78 
     79 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
     80 #error ABSL_BTREE_ENABLE_GENERATIONS cannot be directly set
     81 #elif (defined(ABSL_HAVE_ADDRESS_SANITIZER) ||   \
     82       defined(ABSL_HAVE_HWADDRESS_SANITIZER) || \
     83       defined(ABSL_HAVE_MEMORY_SANITIZER)) &&   \
     84    !defined(NDEBUG_SANITIZER)  // If defined, performance is important.
     85 // When compiled in sanitizer mode, we add generation integers to the nodes and
     86 // iterators. When iterators are used, we validate that the container has not
     87 // been mutated since the iterator was constructed.
     88 #define ABSL_BTREE_ENABLE_GENERATIONS
     89 #endif
     90 
     91 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
     92 constexpr bool BtreeGenerationsEnabled() { return true; }
     93 #else
     94 constexpr bool BtreeGenerationsEnabled() { return false; }
     95 #endif
     96 
     97 template <typename Compare, typename T, typename U>
     98 using compare_result_t = absl::result_of_t<const Compare(const T &, const U &)>;
     99 
    100 // A helper class that indicates if the Compare parameter is a key-compare-to
    101 // comparator.
    102 template <typename Compare, typename T>
    103 using btree_is_key_compare_to =
    104    std::is_convertible<compare_result_t<Compare, T, T>, absl::weak_ordering>;
    105 
    106 struct StringBtreeDefaultLess {
    107  using is_transparent = void;
    108 
    109  StringBtreeDefaultLess() = default;
    110 
    111  // Compatibility constructor.
    112  StringBtreeDefaultLess(std::less<std::string>) {}        // NOLINT
    113  StringBtreeDefaultLess(std::less<absl::string_view>) {}  // NOLINT
    114 
    115  // Allow converting to std::less for use in key_comp()/value_comp().
    116  explicit operator std::less<std::string>() const { return {}; }
    117  explicit operator std::less<absl::string_view>() const { return {}; }
    118  explicit operator std::less<absl::Cord>() const { return {}; }
    119 
    120  absl::weak_ordering operator()(absl::string_view lhs,
    121                                 absl::string_view rhs) const {
    122    return compare_internal::compare_result_as_ordering(lhs.compare(rhs));
    123  }
    124  StringBtreeDefaultLess(std::less<absl::Cord>) {}  // NOLINT
    125  absl::weak_ordering operator()(const absl::Cord &lhs,
    126                                 const absl::Cord &rhs) const {
    127    return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
    128  }
    129  absl::weak_ordering operator()(const absl::Cord &lhs,
    130                                 absl::string_view rhs) const {
    131    return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
    132  }
    133  absl::weak_ordering operator()(absl::string_view lhs,
    134                                 const absl::Cord &rhs) const {
    135    return compare_internal::compare_result_as_ordering(-rhs.Compare(lhs));
    136  }
    137 };
    138 
    139 struct StringBtreeDefaultGreater {
    140  using is_transparent = void;
    141 
    142  StringBtreeDefaultGreater() = default;
    143 
    144  StringBtreeDefaultGreater(std::greater<std::string>) {}        // NOLINT
    145  StringBtreeDefaultGreater(std::greater<absl::string_view>) {}  // NOLINT
    146 
    147  // Allow converting to std::greater for use in key_comp()/value_comp().
    148  explicit operator std::greater<std::string>() const { return {}; }
    149  explicit operator std::greater<absl::string_view>() const { return {}; }
    150  explicit operator std::greater<absl::Cord>() const { return {}; }
    151 
    152  absl::weak_ordering operator()(absl::string_view lhs,
    153                                 absl::string_view rhs) const {
    154    return compare_internal::compare_result_as_ordering(rhs.compare(lhs));
    155  }
    156  StringBtreeDefaultGreater(std::greater<absl::Cord>) {}  // NOLINT
    157  absl::weak_ordering operator()(const absl::Cord &lhs,
    158                                 const absl::Cord &rhs) const {
    159    return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
    160  }
    161  absl::weak_ordering operator()(const absl::Cord &lhs,
    162                                 absl::string_view rhs) const {
    163    return compare_internal::compare_result_as_ordering(-lhs.Compare(rhs));
    164  }
    165  absl::weak_ordering operator()(absl::string_view lhs,
    166                                 const absl::Cord &rhs) const {
    167    return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
    168  }
    169 };
    170 
    171 // See below comments for checked_compare.
    172 template <typename Compare, bool is_class = std::is_class<Compare>::value>
    173 struct checked_compare_base : Compare {
    174  using Compare::Compare;
    175  explicit checked_compare_base(Compare c) : Compare(std::move(c)) {}
    176  const Compare &comp() const { return *this; }
    177 };
    178 template <typename Compare>
    179 struct checked_compare_base<Compare, false> {
    180  explicit checked_compare_base(Compare c) : compare(std::move(c)) {}
    181  const Compare &comp() const { return compare; }
    182  Compare compare;
    183 };
    184 
    185 // A mechanism for opting out of checked_compare for use only in btree_test.cc.
    186 struct BtreeTestOnlyCheckedCompareOptOutBase {};
    187 
    188 // A helper class to adapt the specified comparator for two use cases:
    189 // (1) When using common Abseil string types with common comparison functors,
    190 // convert a boolean comparison into a three-way comparison that returns an
    191 // `absl::weak_ordering`. This helper class is specialized for
    192 // less<std::string>, greater<std::string>, less<string_view>,
    193 // greater<string_view>, less<absl::Cord>, and greater<absl::Cord>.
    194 // (2) Adapt the comparator to diagnose cases of non-strict-weak-ordering (see
    195 // https://en.cppreference.com/w/cpp/named_req/Compare) in debug mode. Whenever
    196 // a comparison is made, we will make assertions to verify that the comparator
    197 // is valid.
    198 template <typename Compare, typename Key>
    199 struct key_compare_adapter {
    200  // Inherit from checked_compare_base to support function pointers and also
    201  // keep empty-base-optimization (EBO) support for classes.
    202  // Note: we can't use CompressedTuple here because that would interfere
    203  // with the EBO for `btree::rightmost_`. `btree::rightmost_` is itself a
    204  // CompressedTuple and nested `CompressedTuple`s don't support EBO.
    205  // TODO(b/214288561): use CompressedTuple instead once it supports EBO for
    206  // nested `CompressedTuple`s.
    207  struct checked_compare : checked_compare_base<Compare> {
    208   private:
    209    using Base = typename checked_compare::checked_compare_base;
    210    using Base::comp;
    211 
    212    // If possible, returns whether `t` is equivalent to itself. We can only do
    213    // this for `Key`s because we can't be sure that it's safe to call
    214    // `comp()(k, k)` otherwise. Even if SFINAE allows it, there could be a
    215    // compilation failure inside the implementation of the comparison operator.
    216    bool is_self_equivalent(const Key &k) const {
    217      // Note: this works for both boolean and three-way comparators.
    218      return comp()(k, k) == 0;
    219    }
    220    // If we can't compare `t` with itself, returns true unconditionally.
    221    template <typename T>
    222    bool is_self_equivalent(const T &) const {
    223      return true;
    224    }
    225 
    226   public:
    227    using Base::Base;
    228    checked_compare(Compare comp) : Base(std::move(comp)) {}  // NOLINT
    229 
    230    // Allow converting to Compare for use in key_comp()/value_comp().
    231    explicit operator Compare() const { return comp(); }
    232 
    233    template <typename T, typename U,
    234              absl::enable_if_t<
    235                  std::is_same<bool, compare_result_t<Compare, T, U>>::value,
    236                  int> = 0>
    237    bool operator()(const T &lhs, const U &rhs) const {
    238      // NOTE: if any of these assertions fail, then the comparator does not
    239      // establish a strict-weak-ordering (see
    240      // https://en.cppreference.com/w/cpp/named_req/Compare).
    241      assert(is_self_equivalent(lhs));
    242      assert(is_self_equivalent(rhs));
    243      const bool lhs_comp_rhs = comp()(lhs, rhs);
    244      assert(!lhs_comp_rhs || !comp()(rhs, lhs));
    245      return lhs_comp_rhs;
    246    }
    247 
    248    template <
    249        typename T, typename U,
    250        absl::enable_if_t<std::is_convertible<compare_result_t<Compare, T, U>,
    251                                              absl::weak_ordering>::value,
    252                          int> = 0>
    253    absl::weak_ordering operator()(const T &lhs, const U &rhs) const {
    254      // NOTE: if any of these assertions fail, then the comparator does not
    255      // establish a strict-weak-ordering (see
    256      // https://en.cppreference.com/w/cpp/named_req/Compare).
    257      assert(is_self_equivalent(lhs));
    258      assert(is_self_equivalent(rhs));
    259      const absl::weak_ordering lhs_comp_rhs = comp()(lhs, rhs);
    260 #ifndef NDEBUG
    261      const absl::weak_ordering rhs_comp_lhs = comp()(rhs, lhs);
    262      if (lhs_comp_rhs > 0) {
    263        assert(rhs_comp_lhs < 0 && "lhs_comp_rhs > 0 -> rhs_comp_lhs < 0");
    264      } else if (lhs_comp_rhs == 0) {
    265        assert(rhs_comp_lhs == 0 && "lhs_comp_rhs == 0 -> rhs_comp_lhs == 0");
    266      } else {
    267        assert(rhs_comp_lhs > 0 && "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0");
    268      }
    269 #endif
    270      return lhs_comp_rhs;
    271    }
    272  };
    273  using type = absl::conditional_t<
    274      std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase, Compare>::value,
    275      Compare, checked_compare>;
    276 };
    277 
    278 template <>
    279 struct key_compare_adapter<std::less<std::string>, std::string> {
    280  using type = StringBtreeDefaultLess;
    281 };
    282 
    283 template <>
    284 struct key_compare_adapter<std::greater<std::string>, std::string> {
    285  using type = StringBtreeDefaultGreater;
    286 };
    287 
    288 template <>
    289 struct key_compare_adapter<std::less<absl::string_view>, absl::string_view> {
    290  using type = StringBtreeDefaultLess;
    291 };
    292 
    293 template <>
    294 struct key_compare_adapter<std::greater<absl::string_view>, absl::string_view> {
    295  using type = StringBtreeDefaultGreater;
    296 };
    297 
    298 template <>
    299 struct key_compare_adapter<std::less<absl::Cord>, absl::Cord> {
    300  using type = StringBtreeDefaultLess;
    301 };
    302 
    303 template <>
    304 struct key_compare_adapter<std::greater<absl::Cord>, absl::Cord> {
    305  using type = StringBtreeDefaultGreater;
    306 };
    307 
    308 // Detects an 'absl_btree_prefer_linear_node_search' member. This is
    309 // a protocol used as an opt-in or opt-out of linear search.
    310 //
    311 //  For example, this would be useful for key types that wrap an integer
    312 //  and define their own cheap operator<(). For example:
    313 //
    314 //   class K {
    315 //    public:
    316 //     using absl_btree_prefer_linear_node_search = std::true_type;
    317 //     ...
    318 //    private:
    319 //     friend bool operator<(K a, K b) { return a.k_ < b.k_; }
    320 //     int k_;
    321 //   };
    322 //
    323 //   btree_map<K, V> m;  // Uses linear search
    324 //
    325 // If T has the preference tag, then it has a preference.
    326 // Btree will use the tag's truth value.
    327 template <typename T, typename = void>
    328 struct has_linear_node_search_preference : std::false_type {};
    329 template <typename T, typename = void>
    330 struct prefers_linear_node_search : std::false_type {};
    331 template <typename T>
    332 struct has_linear_node_search_preference<
    333    T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
    334    : std::true_type {};
    335 template <typename T>
    336 struct prefers_linear_node_search<
    337    T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
    338    : T::absl_btree_prefer_linear_node_search {};
    339 
    340 template <typename Compare, typename Key>
    341 constexpr bool compare_has_valid_result_type() {
    342  using compare_result_type = compare_result_t<Compare, Key, Key>;
    343  return std::is_same<compare_result_type, bool>::value ||
    344         std::is_convertible<compare_result_type, absl::weak_ordering>::value;
    345 }
    346 
    347 template <typename original_key_compare, typename value_type>
    348 class map_value_compare {
    349  template <typename Params>
    350  friend class btree;
    351 
    352  // Note: this `protected` is part of the API of std::map::value_compare. See
    353  // https://en.cppreference.com/w/cpp/container/map/value_compare.
    354 protected:
    355  explicit map_value_compare(original_key_compare c) : comp(std::move(c)) {}
    356 
    357  original_key_compare comp;  // NOLINT
    358 
    359 public:
    360  auto operator()(const value_type &lhs, const value_type &rhs) const
    361      -> decltype(comp(lhs.first, rhs.first)) {
    362    return comp(lhs.first, rhs.first);
    363  }
    364 };
    365 
    366 template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
    367          bool IsMulti, bool IsMap, typename SlotPolicy>
    368 struct common_params : common_policy_traits<SlotPolicy> {
    369  using original_key_compare = Compare;
    370 
    371  // If Compare is a common comparator for a string-like type, then we adapt it
    372  // to use heterogeneous lookup and to be a key-compare-to comparator.
    373  // We also adapt the comparator to diagnose invalid comparators in debug mode.
    374  // We disable this when `Compare` is invalid in a way that will cause
    375  // adaptation to fail (having invalid return type) so that we can give a
    376  // better compilation failure in static_assert_validation. If we don't do
    377  // this, then there will be cascading compilation failures that are confusing
    378  // for users.
    379  using key_compare =
    380      absl::conditional_t<!compare_has_valid_result_type<Compare, Key>(),
    381                          Compare,
    382                          typename key_compare_adapter<Compare, Key>::type>;
    383 
    384  static constexpr bool kIsKeyCompareStringAdapted =
    385      std::is_same<key_compare, StringBtreeDefaultLess>::value ||
    386      std::is_same<key_compare, StringBtreeDefaultGreater>::value;
    387  static constexpr bool kIsKeyCompareTransparent =
    388      IsTransparent<original_key_compare>::value || kIsKeyCompareStringAdapted;
    389 
    390  // A type which indicates if we have a key-compare-to functor or a plain old
    391  // key-compare functor.
    392  using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
    393 
    394  using allocator_type = Alloc;
    395  using key_type = Key;
    396  using size_type = size_t;
    397  using difference_type = ptrdiff_t;
    398 
    399  using slot_policy = SlotPolicy;
    400  using slot_type = typename slot_policy::slot_type;
    401  using value_type = typename slot_policy::value_type;
    402  using init_type = typename slot_policy::mutable_value_type;
    403  using pointer = value_type *;
    404  using const_pointer = const value_type *;
    405  using reference = value_type &;
    406  using const_reference = const value_type &;
    407 
    408  using value_compare =
    409      absl::conditional_t<IsMap,
    410                          map_value_compare<original_key_compare, value_type>,
    411                          original_key_compare>;
    412  using is_map_container = std::integral_constant<bool, IsMap>;
    413 
    414  // For the given lookup key type, returns whether we can have multiple
    415  // equivalent keys in the btree. If this is a multi-container, then we can.
    416  // Otherwise, we can have multiple equivalent keys only if all of the
    417  // following conditions are met:
    418  // - The comparator is transparent.
    419  // - The lookup key type is not the same as key_type.
    420  // - The comparator is not a StringBtreeDefault{Less,Greater} comparator
    421  //   that we know has the same equivalence classes for all lookup types.
    422  template <typename LookupKey>
    423  constexpr static bool can_have_multiple_equivalent_keys() {
    424    return IsMulti || (IsTransparent<key_compare>::value &&
    425                       !std::is_same<LookupKey, Key>::value &&
    426                       !kIsKeyCompareStringAdapted);
    427  }
    428 
    429  enum {
    430    kTargetNodeSize = TargetNodeSize,
    431 
    432    // Upper bound for the available space for slots. This is largest for leaf
    433    // nodes, which have overhead of at least a pointer + 4 bytes (for storing
    434    // 3 field_types and an enum).
    435    kNodeSlotSpace = TargetNodeSize - /*minimum overhead=*/(sizeof(void *) + 4),
    436  };
    437 
    438  // This is an integral type large enough to hold as many slots as will fit a
    439  // node of TargetNodeSize bytes.
    440  using node_count_type =
    441      absl::conditional_t<(kNodeSlotSpace / sizeof(slot_type) >
    442                           (std::numeric_limits<uint8_t>::max)()),
    443                          uint16_t, uint8_t>;  // NOLINT
    444 };
    445 
    446 // An adapter class that converts a lower-bound compare into an upper-bound
    447 // compare. Note: there is no need to make a version of this adapter specialized
    448 // for key-compare-to functors because the upper-bound (the first value greater
    449 // than the input) is never an exact match.
    450 template <typename Compare>
    451 struct upper_bound_adapter {
    452  explicit upper_bound_adapter(const Compare &c) : comp(c) {}
    453  template <typename K1, typename K2>
    454  bool operator()(const K1 &a, const K2 &b) const {
    455    // Returns true when a is not greater than b.
    456    return !compare_internal::compare_result_as_less_than(comp(b, a));
    457  }
    458 
    459 private:
    460  Compare comp;
    461 };
    462 
    463 enum class MatchKind : uint8_t { kEq, kNe };
    464 
    465 template <typename V, bool IsCompareTo>
    466 struct SearchResult {
    467  V value;
    468  MatchKind match;
    469 
    470  static constexpr bool HasMatch() { return true; }
    471  bool IsEq() const { return match == MatchKind::kEq; }
    472 };
    473 
    474 // When we don't use CompareTo, `match` is not present.
    475 // This ensures that callers can't use it accidentally when it provides no
    476 // useful information.
    477 template <typename V>
    478 struct SearchResult<V, false> {
    479  SearchResult() = default;
    480  explicit SearchResult(V v) : value(v) {}
    481  SearchResult(V v, MatchKind /*match*/) : value(v) {}
    482 
    483  V value;
    484 
    485  static constexpr bool HasMatch() { return false; }
    486  static constexpr bool IsEq() { return false; }
    487 };
    488 
    489 // A node in the btree holding. The same node type is used for both internal
    490 // and leaf nodes in the btree, though the nodes are allocated in such a way
    491 // that the children array is only valid in internal nodes.
    492 template <typename Params>
    493 class btree_node {
    494  using is_key_compare_to = typename Params::is_key_compare_to;
    495  using field_type = typename Params::node_count_type;
    496  using allocator_type = typename Params::allocator_type;
    497  using slot_type = typename Params::slot_type;
    498  using original_key_compare = typename Params::original_key_compare;
    499 
    500 public:
    501  using params_type = Params;
    502  using key_type = typename Params::key_type;
    503  using value_type = typename Params::value_type;
    504  using pointer = typename Params::pointer;
    505  using const_pointer = typename Params::const_pointer;
    506  using reference = typename Params::reference;
    507  using const_reference = typename Params::const_reference;
    508  using key_compare = typename Params::key_compare;
    509  using size_type = typename Params::size_type;
    510  using difference_type = typename Params::difference_type;
    511 
    512  // Btree decides whether to use linear node search as follows:
    513  //   - If the comparator expresses a preference, use that.
    514  //   - If the key expresses a preference, use that.
    515  //   - If the key is arithmetic and the comparator is std::less or
    516  //     std::greater, choose linear.
    517  //   - Otherwise, choose binary.
    518  // TODO(ezb): Might make sense to add condition(s) based on node-size.
    519  using use_linear_search = std::integral_constant<
    520      bool, has_linear_node_search_preference<original_key_compare>::value
    521                ? prefers_linear_node_search<original_key_compare>::value
    522            : has_linear_node_search_preference<key_type>::value
    523                ? prefers_linear_node_search<key_type>::value
    524                : std::is_arithmetic<key_type>::value &&
    525                      (std::is_same<std::less<key_type>,
    526                                    original_key_compare>::value ||
    527                       std::is_same<std::greater<key_type>,
    528                                    original_key_compare>::value)>;
    529 
    530  // This class is organized by absl::container_internal::Layout as if it had
    531  // the following structure:
    532  //   // A pointer to the node's parent.
    533  //   btree_node *parent;
    534  //
    535  //   // When ABSL_BTREE_ENABLE_GENERATIONS is defined, we also have a
    536  //   // generation integer in order to check that when iterators are
    537  //   // used, they haven't been invalidated already. Only the generation on
    538  //   // the root is used, but we have one on each node because whether a node
    539  //   // is root or not can change.
    540  //   uint32_t generation;
    541  //
    542  //   // The position of the node in the node's parent.
    543  //   field_type position;
    544  //   // The index of the first populated value in `values`.
    545  //   // TODO(ezb): right now, `start` is always 0. Update insertion/merge
    546  //   // logic to allow for floating storage within nodes.
    547  //   field_type start;
    548  //   // The index after the last populated value in `values`. Currently, this
    549  //   // is the same as the count of values.
    550  //   field_type finish;
    551  //   // The maximum number of values the node can hold. This is an integer in
    552  //   // [1, kNodeSlots] for root leaf nodes, kNodeSlots for non-root leaf
    553  //   // nodes, and kInternalNodeMaxCount (as a sentinel value) for internal
    554  //   // nodes (even though there are still kNodeSlots values in the node).
    555  //   // TODO(ezb): make max_count use only 4 bits and record log2(capacity)
    556  //   // to free extra bits for is_root, etc.
    557  //   field_type max_count;
    558  //
    559  //   // The array of values. The capacity is `max_count` for leaf nodes and
    560  //   // kNodeSlots for internal nodes. Only the values in
    561  //   // [start, finish) have been initialized and are valid.
    562  //   slot_type values[max_count];
    563  //
    564  //   // The array of child pointers. The keys in children[i] are all less
    565  //   // than key(i). The keys in children[i + 1] are all greater than key(i).
    566  //   // There are 0 children for leaf nodes and kNodeSlots + 1 children for
    567  //   // internal nodes.
    568  //   btree_node *children[kNodeSlots + 1];
    569  //
    570  // This class is only constructed by EmptyNodeType. Normally, pointers to the
    571  // layout above are allocated, cast to btree_node*, and de-allocated within
    572  // the btree implementation.
    573  ~btree_node() = default;
    574  btree_node(btree_node const &) = delete;
    575  btree_node &operator=(btree_node const &) = delete;
    576 
    577 protected:
    578  btree_node() = default;
    579 
    580 private:
    581  using layout_type =
    582      absl::container_internal::Layout<btree_node *, uint32_t, field_type,
    583                                       slot_type, btree_node *>;
    584  using leaf_layout_type = typename layout_type::template WithStaticSizes<
    585      /*parent*/ 1,
    586      /*generation*/ BtreeGenerationsEnabled() ? 1 : 0,
    587      /*position, start, finish, max_count*/ 4>;
    588  constexpr static size_type SizeWithNSlots(size_type n) {
    589    return leaf_layout_type(/*slots*/ n, /*children*/ 0).AllocSize();
    590  }
    591  // A lower bound for the overhead of fields other than slots in a leaf node.
    592  constexpr static size_type MinimumOverhead() {
    593    return SizeWithNSlots(1) - sizeof(slot_type);
    594  }
    595 
    596  // Compute how many values we can fit onto a leaf node taking into account
    597  // padding.
    598  constexpr static size_type NodeTargetSlots(const size_type begin,
    599                                             const size_type end) {
    600    return begin == end ? begin
    601           : SizeWithNSlots((begin + end) / 2 + 1) >
    602                   params_type::kTargetNodeSize
    603               ? NodeTargetSlots(begin, (begin + end) / 2)
    604               : NodeTargetSlots((begin + end) / 2 + 1, end);
    605  }
    606 
    607  constexpr static size_type kTargetNodeSize = params_type::kTargetNodeSize;
    608  constexpr static size_type kNodeTargetSlots =
    609      NodeTargetSlots(0, kTargetNodeSize);
    610 
    611  // We need a minimum of 3 slots per internal node in order to perform
    612  // splitting (1 value for the two nodes involved in the split and 1 value
    613  // propagated to the parent as the delimiter for the split). For performance
    614  // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy of
    615  // 1/3 (for a node, not a b-tree).
    616  constexpr static size_type kMinNodeSlots = 4;
    617 
    618  constexpr static size_type kNodeSlots =
    619      kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots;
    620 
    621  using internal_layout_type = typename layout_type::template WithStaticSizes<
    622      /*parent*/ 1,
    623      /*generation*/ BtreeGenerationsEnabled() ? 1 : 0,
    624      /*position, start, finish, max_count*/ 4, /*slots*/ kNodeSlots,
    625      /*children*/ kNodeSlots + 1>;
    626 
    627  // The node is internal (i.e. is not a leaf node) if and only if `max_count`
    628  // has this value.
    629  constexpr static field_type kInternalNodeMaxCount = 0;
    630 
    631  // Leaves can have less than kNodeSlots values.
    632  constexpr static leaf_layout_type LeafLayout(
    633      const size_type slot_count = kNodeSlots) {
    634    return leaf_layout_type(slot_count, 0);
    635  }
    636  constexpr static auto InternalLayout() { return internal_layout_type(); }
    637  constexpr static size_type LeafSize(const size_type slot_count = kNodeSlots) {
    638    return LeafLayout(slot_count).AllocSize();
    639  }
    640  constexpr static size_type InternalSize() {
    641    return InternalLayout().AllocSize();
    642  }
    643 
    644  constexpr static size_type Alignment() {
    645    static_assert(LeafLayout(1).Alignment() == InternalLayout().Alignment(),
    646                  "Alignment of all nodes must be equal.");
    647    return InternalLayout().Alignment();
    648  }
    649 
    650  // N is the index of the type in the Layout definition.
    651  // ElementType<N> is the Nth type in the Layout definition.
    652  template <size_type N>
    653  inline typename layout_type::template ElementType<N> *GetField() {
    654    // We assert that we don't read from values that aren't there.
    655    assert(N < 4 || is_internal());
    656    return InternalLayout().template Pointer<N>(reinterpret_cast<char *>(this));
    657  }
    658  template <size_type N>
    659  inline const typename layout_type::template ElementType<N> *GetField() const {
    660    assert(N < 4 || is_internal());
    661    return InternalLayout().template Pointer<N>(
    662        reinterpret_cast<const char *>(this));
    663  }
    664  void set_parent(btree_node *p) { *GetField<0>() = p; }
    665  field_type &mutable_finish() { return GetField<2>()[2]; }
    666  slot_type *slot(size_type i) { return &GetField<3>()[i]; }
    667  slot_type *start_slot() { return slot(start()); }
    668  slot_type *finish_slot() { return slot(finish()); }
    669  const slot_type *slot(size_type i) const { return &GetField<3>()[i]; }
    670  void set_position(field_type v) { GetField<2>()[0] = v; }
    671  void set_start(field_type v) { GetField<2>()[1] = v; }
    672  void set_finish(field_type v) { GetField<2>()[2] = v; }
    673  // This method is only called by the node init methods.
    674  void set_max_count(field_type v) { GetField<2>()[3] = v; }
    675 
    676 public:
    677  // Whether this is a leaf node or not. This value doesn't change after the
    678  // node is created.
    679  bool is_leaf() const { return GetField<2>()[3] != kInternalNodeMaxCount; }
    680  // Whether this is an internal node or not. This value doesn't change after
    681  // the node is created.
    682  bool is_internal() const { return !is_leaf(); }
    683 
    684  // Getter for the position of this node in its parent.
    685  field_type position() const { return GetField<2>()[0]; }
    686 
    687  // Getter for the offset of the first value in the `values` array.
    688  field_type start() const {
    689    // TODO(ezb): when floating storage is implemented, return GetField<2>()[1];
    690    assert(GetField<2>()[1] == 0);
    691    return 0;
    692  }
    693 
    694  // Getter for the offset after the last value in the `values` array.
    695  field_type finish() const { return GetField<2>()[2]; }
    696 
    697  // Getters for the number of values stored in this node.
    698  field_type count() const {
    699    assert(finish() >= start());
    700    return finish() - start();
    701  }
    702  field_type max_count() const {
    703    // Internal nodes have max_count==kInternalNodeMaxCount.
    704    // Leaf nodes have max_count in [1, kNodeSlots].
    705    const field_type max_count = GetField<2>()[3];
    706    return max_count == field_type{kInternalNodeMaxCount}
    707               ? field_type{kNodeSlots}
    708               : max_count;
    709  }
    710 
    711  // Getter for the parent of this node.
    712  // TODO(ezb): assert that the child of the returned node at position
    713  // `node_->position()` maps to the current node.
    714  btree_node *parent() const { return *GetField<0>(); }
    715  // Getter for whether the node is the root of the tree. The parent of the
    716  // root of the tree is the leftmost node in the tree which is guaranteed to
    717  // be a leaf.
    718  bool is_root() const { return parent()->is_leaf(); }
    719  void make_root() {
    720    assert(parent()->is_root());
    721    set_generation(parent()->generation());
    722    set_parent(parent()->parent());
    723  }
    724 
    725  // Gets the root node's generation integer, which is the one used by the tree.
    726  uint32_t *get_root_generation() const {
    727    assert(BtreeGenerationsEnabled());
    728    const btree_node *curr = this;
    729    for (; !curr->is_root(); curr = curr->parent()) continue;
    730    return const_cast<uint32_t *>(&curr->GetField<1>()[0]);
    731  }
    732 
    733  // Returns the generation for iterator validation.
    734  uint32_t generation() const {
    735    return BtreeGenerationsEnabled() ? *get_root_generation() : 0;
    736  }
    737  // Updates generation. Should only be called on a root node or during node
    738  // initialization.
    739  void set_generation(uint32_t generation) {
    740    if (BtreeGenerationsEnabled()) GetField<1>()[0] = generation;
    741  }
    742  // Updates the generation. We do this whenever the node is mutated.
    743  void next_generation() {
    744    if (BtreeGenerationsEnabled()) ++*get_root_generation();
    745  }
    746 
    747  // Getters for the key/value at position i in the node.
    748  const key_type &key(size_type i) const { return params_type::key(slot(i)); }
    749  reference value(size_type i) { return params_type::element(slot(i)); }
    750  const_reference value(size_type i) const {
    751    return params_type::element(slot(i));
    752  }
    753 
    754  // Getters/setter for the child at position i in the node.
    755  btree_node *child(field_type i) const { return GetField<4>()[i]; }
    756  btree_node *start_child() const { return child(start()); }
    757  btree_node *&mutable_child(field_type i) { return GetField<4>()[i]; }
    758  void clear_child(field_type i) {
    759    absl::container_internal::SanitizerPoisonObject(&mutable_child(i));
    760  }
    761  void set_child_noupdate_position(field_type i, btree_node *c) {
    762    absl::container_internal::SanitizerUnpoisonObject(&mutable_child(i));
    763    mutable_child(i) = c;
    764  }
    765  void set_child(field_type i, btree_node *c) {
    766    set_child_noupdate_position(i, c);
    767    c->set_position(i);
    768  }
    769  void init_child(field_type i, btree_node *c) {
    770    set_child(i, c);
    771    c->set_parent(this);
    772  }
    773 
    774  // Returns the position of the first value whose key is not less than k.
    775  template <typename K>
    776  SearchResult<size_type, is_key_compare_to::value> lower_bound(
    777      const K &k, const key_compare &comp) const {
    778    return use_linear_search::value ? linear_search(k, comp)
    779                                    : binary_search(k, comp);
    780  }
    781  // Returns the position of the first value whose key is greater than k.
    782  template <typename K>
    783  size_type upper_bound(const K &k, const key_compare &comp) const {
    784    auto upper_compare = upper_bound_adapter<key_compare>(comp);
    785    return use_linear_search::value ? linear_search(k, upper_compare).value
    786                                    : binary_search(k, upper_compare).value;
    787  }
    788 
    789  template <typename K, typename Compare>
    790  SearchResult<size_type, btree_is_key_compare_to<Compare, key_type>::value>
    791  linear_search(const K &k, const Compare &comp) const {
    792    return linear_search_impl(k, start(), finish(), comp,
    793                              btree_is_key_compare_to<Compare, key_type>());
    794  }
    795 
    796  template <typename K, typename Compare>
    797  SearchResult<size_type, btree_is_key_compare_to<Compare, key_type>::value>
    798  binary_search(const K &k, const Compare &comp) const {
    799    return binary_search_impl(k, start(), finish(), comp,
    800                              btree_is_key_compare_to<Compare, key_type>());
    801  }
    802 
    803  // Returns the position of the first value whose key is not less than k using
    804  // linear search performed using plain compare.
    805  template <typename K, typename Compare>
    806  SearchResult<size_type, false> linear_search_impl(
    807      const K &k, size_type s, const size_type e, const Compare &comp,
    808      std::false_type /* IsCompareTo */) const {
    809    while (s < e) {
    810      if (!comp(key(s), k)) {
    811        break;
    812      }
    813      ++s;
    814    }
    815    return SearchResult<size_type, false>{s};
    816  }
    817 
    818  // Returns the position of the first value whose key is not less than k using
    819  // linear search performed using compare-to.
    820  template <typename K, typename Compare>
    821  SearchResult<size_type, true> linear_search_impl(
    822      const K &k, size_type s, const size_type e, const Compare &comp,
    823      std::true_type /* IsCompareTo */) const {
    824    while (s < e) {
    825      const absl::weak_ordering c = comp(key(s), k);
    826      if (c == 0) {
    827        return {s, MatchKind::kEq};
    828      } else if (c > 0) {
    829        break;
    830      }
    831      ++s;
    832    }
    833    return {s, MatchKind::kNe};
    834  }
    835 
    836  // Returns the position of the first value whose key is not less than k using
    837  // binary search performed using plain compare.
    838  template <typename K, typename Compare>
    839  SearchResult<size_type, false> binary_search_impl(
    840      const K &k, size_type s, size_type e, const Compare &comp,
    841      std::false_type /* IsCompareTo */) const {
    842    while (s != e) {
    843      const size_type mid = (s + e) >> 1;
    844      if (comp(key(mid), k)) {
    845        s = mid + 1;
    846      } else {
    847        e = mid;
    848      }
    849    }
    850    return SearchResult<size_type, false>{s};
    851  }
    852 
    853  // Returns the position of the first value whose key is not less than k using
    854  // binary search performed using compare-to.
    855  template <typename K, typename CompareTo>
    856  SearchResult<size_type, true> binary_search_impl(
    857      const K &k, size_type s, size_type e, const CompareTo &comp,
    858      std::true_type /* IsCompareTo */) const {
    859    if (params_type::template can_have_multiple_equivalent_keys<K>()) {
    860      MatchKind exact_match = MatchKind::kNe;
    861      while (s != e) {
    862        const size_type mid = (s + e) >> 1;
    863        const absl::weak_ordering c = comp(key(mid), k);
    864        if (c < 0) {
    865          s = mid + 1;
    866        } else {
    867          e = mid;
    868          if (c == 0) {
    869            // Need to return the first value whose key is not less than k,
    870            // which requires continuing the binary search if there could be
    871            // multiple equivalent keys.
    872            exact_match = MatchKind::kEq;
    873          }
    874        }
    875      }
    876      return {s, exact_match};
    877    } else {  // Can't have multiple equivalent keys.
    878      while (s != e) {
    879        const size_type mid = (s + e) >> 1;
    880        const absl::weak_ordering c = comp(key(mid), k);
    881        if (c < 0) {
    882          s = mid + 1;
    883        } else if (c > 0) {
    884          e = mid;
    885        } else {
    886          return {mid, MatchKind::kEq};
    887        }
    888      }
    889      return {s, MatchKind::kNe};
    890    }
    891  }
    892 
    893  // Returns whether key i is ordered correctly with respect to the other keys
    894  // in the node. The motivation here is to detect comparators that violate
    895  // transitivity. Note: we only do comparisons of keys on this node rather than
    896  // the whole tree so that this is constant time.
    897  template <typename Compare>
    898  bool is_ordered_correctly(field_type i, const Compare &comp) const {
    899    if (std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase,
    900                        Compare>::value ||
    901        params_type::kIsKeyCompareStringAdapted) {
    902      return true;
    903    }
    904 
    905    const auto compare = [&](field_type a, field_type b) {
    906      const absl::weak_ordering cmp =
    907          compare_internal::do_three_way_comparison(comp, key(a), key(b));
    908      return cmp < 0 ? -1 : cmp > 0 ? 1 : 0;
    909    };
    910    int cmp = -1;
    911    constexpr bool kCanHaveEquivKeys =
    912        params_type::template can_have_multiple_equivalent_keys<key_type>();
    913    for (field_type j = start(); j < finish(); ++j) {
    914      if (j == i) {
    915        if (cmp > 0) return false;
    916        continue;
    917      }
    918      int new_cmp = compare(j, i);
    919      if (new_cmp < cmp || (!kCanHaveEquivKeys && new_cmp == 0)) return false;
    920      cmp = new_cmp;
    921    }
    922    return true;
    923  }
    924 
    925  // Emplaces a value at position i, shifting all existing values and
    926  // children at positions >= i to the right by 1.
    927  template <typename... Args>
    928  void emplace_value(field_type i, allocator_type *alloc, Args &&...args);
    929 
    930  // Removes the values at positions [i, i + to_erase), shifting all existing
    931  // values and children after that range to the left by to_erase. Clears all
    932  // children between [i, i + to_erase).
    933  void remove_values(field_type i, field_type to_erase, allocator_type *alloc);
    934 
    935  // Rebalances a node with its right sibling.
    936  void rebalance_right_to_left(field_type to_move, btree_node *right,
    937                               allocator_type *alloc);
    938  void rebalance_left_to_right(field_type to_move, btree_node *right,
    939                               allocator_type *alloc);
    940 
    941  // Splits a node, moving a portion of the node's values to its right sibling.
    942  void split(int insert_position, btree_node *dest, allocator_type *alloc);
    943 
    944  // Merges a node with its right sibling, moving all of the values and the
    945  // delimiting key in the parent node onto itself, and deleting the src node.
    946  void merge(btree_node *src, allocator_type *alloc);
    947 
    948  // Node allocation/deletion routines.
    949  void init_leaf(field_type position, field_type max_count,
    950                 btree_node *parent) {
    951    set_generation(0);
    952    set_parent(parent);
    953    set_position(position);
    954    set_start(0);
    955    set_finish(0);
    956    set_max_count(max_count);
    957    absl::container_internal::SanitizerPoisonMemoryRegion(
    958        start_slot(), max_count * sizeof(slot_type));
    959  }
    960  void init_internal(field_type position, btree_node *parent) {
    961    init_leaf(position, kNodeSlots, parent);
    962    // Set `max_count` to a sentinel value to indicate that this node is
    963    // internal.
    964    set_max_count(kInternalNodeMaxCount);
    965    absl::container_internal::SanitizerPoisonMemoryRegion(
    966        &mutable_child(start()), (kNodeSlots + 1) * sizeof(btree_node *));
    967  }
    968 
    969  static void deallocate(const size_type size, btree_node *node,
    970                         allocator_type *alloc) {
    971    absl::container_internal::SanitizerUnpoisonMemoryRegion(node, size);
    972    absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
    973  }
    974 
    975  // Deletes a node and all of its children.
    976  static void clear_and_delete(btree_node *node, allocator_type *alloc);
    977 
    978 private:
    979  template <typename... Args>
    980  void value_init(const field_type i, allocator_type *alloc, Args &&...args) {
    981    next_generation();
    982    absl::container_internal::SanitizerUnpoisonObject(slot(i));
    983    params_type::construct(alloc, slot(i), std::forward<Args>(args)...);
    984  }
    985  void value_destroy(const field_type i, allocator_type *alloc) {
    986    next_generation();
    987    params_type::destroy(alloc, slot(i));
    988    absl::container_internal::SanitizerPoisonObject(slot(i));
    989  }
    990  void value_destroy_n(const field_type i, const field_type n,
    991                       allocator_type *alloc) {
    992    next_generation();
    993    for (slot_type *s = slot(i), *end = slot(i + n); s != end; ++s) {
    994      params_type::destroy(alloc, s);
    995      absl::container_internal::SanitizerPoisonObject(s);
    996    }
    997  }
    998 
    999  static void transfer(slot_type *dest, slot_type *src, allocator_type *alloc) {
   1000    absl::container_internal::SanitizerUnpoisonObject(dest);
   1001    params_type::transfer(alloc, dest, src);
   1002    absl::container_internal::SanitizerPoisonObject(src);
   1003  }
   1004 
   1005  // Transfers value from slot `src_i` in `src_node` to slot `dest_i` in `this`.
   1006  void transfer(const size_type dest_i, const size_type src_i,
   1007                btree_node *src_node, allocator_type *alloc) {
   1008    next_generation();
   1009    transfer(slot(dest_i), src_node->slot(src_i), alloc);
   1010  }
   1011 
   1012  // Transfers `n` values starting at value `src_i` in `src_node` into the
   1013  // values starting at value `dest_i` in `this`.
   1014  void transfer_n(const size_type n, const size_type dest_i,
   1015                  const size_type src_i, btree_node *src_node,
   1016                  allocator_type *alloc) {
   1017    next_generation();
   1018    for (slot_type *src = src_node->slot(src_i), *end = src + n,
   1019                   *dest = slot(dest_i);
   1020         src != end; ++src, ++dest) {
   1021      transfer(dest, src, alloc);
   1022    }
   1023  }
   1024 
   1025  // Same as above, except that we start at the end and work our way to the
   1026  // beginning.
   1027  void transfer_n_backward(const size_type n, const size_type dest_i,
   1028                           const size_type src_i, btree_node *src_node,
   1029                           allocator_type *alloc) {
   1030    next_generation();
   1031    for (slot_type *src = src_node->slot(src_i + n), *end = src - n,
   1032                   *dest = slot(dest_i + n);
   1033         src != end; --src, --dest) {
   1034      // If we modified the loop index calculations above to avoid the -1s here,
   1035      // it would result in UB in the computation of `end` (and possibly `src`
   1036      // as well, if n == 0), since slot() is effectively an array index and it
   1037      // is UB to compute the address of any out-of-bounds array element except
   1038      // for one-past-the-end.
   1039      transfer(dest - 1, src - 1, alloc);
   1040    }
   1041  }
   1042 
   1043  template <typename P>
   1044  friend class btree;
   1045  template <typename N, typename R, typename P>
   1046  friend class btree_iterator;
   1047  friend class BtreeNodePeer;
   1048  friend struct btree_access;
   1049 };
   1050 
   1051 template <typename Node>
   1052 bool AreNodesFromSameContainer(const Node *node_a, const Node *node_b) {
   1053  // If either node is null, then give up on checking whether they're from the
   1054  // same container. (If exactly one is null, then we'll trigger the
   1055  // default-constructed assert in Equals.)
   1056  if (node_a == nullptr || node_b == nullptr) return true;
   1057  while (!node_a->is_root()) node_a = node_a->parent();
   1058  while (!node_b->is_root()) node_b = node_b->parent();
   1059  return node_a == node_b;
   1060 }
   1061 
   1062 class btree_iterator_generation_info_enabled {
   1063 public:
   1064  explicit btree_iterator_generation_info_enabled(uint32_t g)
   1065      : generation_(g) {}
   1066 
   1067  // Updates the generation. For use internally right before we return an
   1068  // iterator to the user.
   1069  template <typename Node>
   1070  void update_generation(const Node *node) {
   1071    if (node != nullptr) generation_ = node->generation();
   1072  }
   1073  uint32_t generation() const { return generation_; }
   1074 
   1075  template <typename Node>
   1076  void assert_valid_generation(const Node *node) const {
   1077    if (node != nullptr && node->generation() != generation_) {
   1078      ABSL_INTERNAL_LOG(
   1079          FATAL,
   1080          "Attempting to use an invalidated iterator. The corresponding b-tree "
   1081          "container has been mutated since this iterator was constructed.");
   1082    }
   1083  }
   1084 
   1085 private:
   1086  // Used to check that the iterator hasn't been invalidated.
   1087  uint32_t generation_;
   1088 };
   1089 
   1090 class btree_iterator_generation_info_disabled {
   1091 public:
   1092  explicit btree_iterator_generation_info_disabled(uint32_t) {}
   1093  static void update_generation(const void *) {}
   1094  static uint32_t generation() { return 0; }
   1095  static void assert_valid_generation(const void *) {}
   1096 };
   1097 
   1098 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
   1099 using btree_iterator_generation_info = btree_iterator_generation_info_enabled;
   1100 #else
   1101 using btree_iterator_generation_info = btree_iterator_generation_info_disabled;
   1102 #endif
   1103 
   1104 template <typename Node, typename Reference, typename Pointer>
   1105 class btree_iterator : private btree_iterator_generation_info {
   1106  using field_type = typename Node::field_type;
   1107  using key_type = typename Node::key_type;
   1108  using size_type = typename Node::size_type;
   1109  using params_type = typename Node::params_type;
   1110  using is_map_container = typename params_type::is_map_container;
   1111 
   1112  using node_type = Node;
   1113  using normal_node = typename std::remove_const<Node>::type;
   1114  using const_node = const Node;
   1115  using normal_pointer = typename params_type::pointer;
   1116  using normal_reference = typename params_type::reference;
   1117  using const_pointer = typename params_type::const_pointer;
   1118  using const_reference = typename params_type::const_reference;
   1119  using slot_type = typename params_type::slot_type;
   1120 
   1121  // In sets, all iterators are const.
   1122  using iterator = absl::conditional_t<
   1123      is_map_container::value,
   1124      btree_iterator<normal_node, normal_reference, normal_pointer>,
   1125      btree_iterator<normal_node, const_reference, const_pointer>>;
   1126  using const_iterator =
   1127      btree_iterator<const_node, const_reference, const_pointer>;
   1128 
   1129 public:
   1130  // These aliases are public for std::iterator_traits.
   1131  using difference_type = typename Node::difference_type;
   1132  using value_type = typename params_type::value_type;
   1133  using pointer = Pointer;
   1134  using reference = Reference;
   1135  using iterator_category = std::bidirectional_iterator_tag;
   1136 
   1137  btree_iterator() : btree_iterator(nullptr, -1) {}
   1138  explicit btree_iterator(Node *n) : btree_iterator(n, n->start()) {}
   1139  btree_iterator(Node *n, int p)
   1140      : btree_iterator_generation_info(n != nullptr ? n->generation()
   1141                                                    : ~uint32_t{}),
   1142        node_(n),
   1143        position_(p) {}
   1144 
   1145  // NOTE: this SFINAE allows for implicit conversions from iterator to
   1146  // const_iterator, but it specifically avoids hiding the copy constructor so
   1147  // that the trivial one will be used when possible.
   1148  template <typename N, typename R, typename P,
   1149            absl::enable_if_t<
   1150                std::is_same<btree_iterator<N, R, P>, iterator>::value &&
   1151                    std::is_same<btree_iterator, const_iterator>::value,
   1152                int> = 0>
   1153  btree_iterator(const btree_iterator<N, R, P> other)  // NOLINT
   1154      : btree_iterator_generation_info(other),
   1155        node_(other.node_),
   1156        position_(other.position_) {}
   1157 
   1158  bool operator==(const iterator &other) const {
   1159    return Equals(other);
   1160  }
   1161  bool operator==(const const_iterator &other) const {
   1162    return Equals(other);
   1163  }
   1164  bool operator!=(const iterator &other) const {
   1165    return !Equals(other);
   1166  }
   1167  bool operator!=(const const_iterator &other) const {
   1168    return !Equals(other);
   1169  }
   1170 
   1171  // Returns n such that n calls to ++other yields *this.
   1172  // Precondition: n exists.
   1173  difference_type operator-(const_iterator other) const {
   1174    if (node_ == other.node_) {
   1175      if (node_->is_leaf()) return position_ - other.position_;
   1176      if (position_ == other.position_) return 0;
   1177    }
   1178    return distance_slow(other);
   1179  }
   1180 
   1181  // Advances the iterator by `n`. Values of `n` must not result in going past
   1182  // the `end` iterator (for a positive `n`) or before the `begin` iterator (for
   1183  // a negative `n`).
   1184  btree_iterator &operator+=(difference_type n) {
   1185    assert_valid_generation(node_);
   1186    if (n == 0) return *this;
   1187    if (n < 0) return decrement_n_slow(-n);
   1188    return increment_n_slow(n);
   1189  }
   1190 
   1191  // Moves the iterator by `n` positions backwards. Values of `n` must not
   1192  // result in going before the `begin` iterator (for a positive `n`) or past
   1193  // the `end` iterator (for a negative `n`).
   1194  btree_iterator &operator-=(difference_type n) {
   1195    assert_valid_generation(node_);
   1196    if (n == 0) return *this;
   1197    if (n < 0) return increment_n_slow(-n);
   1198    return decrement_n_slow(n);
   1199  }
   1200 
   1201  // Accessors for the key/value the iterator is pointing at.
   1202  reference operator*() const {
   1203    ABSL_HARDENING_ASSERT(node_ != nullptr);
   1204    assert_valid_generation(node_);
   1205    ABSL_HARDENING_ASSERT(position_ >= node_->start());
   1206    if (position_ >= node_->finish()) {
   1207      ABSL_HARDENING_ASSERT(!IsEndIterator() && "Dereferencing end() iterator");
   1208      ABSL_HARDENING_ASSERT(position_ < node_->finish());
   1209    }
   1210    return node_->value(static_cast<field_type>(position_));
   1211  }
   1212  pointer operator->() const { return &operator*(); }
   1213 
   1214  btree_iterator &operator++() {
   1215    increment();
   1216    return *this;
   1217  }
   1218  btree_iterator &operator--() {
   1219    decrement();
   1220    return *this;
   1221  }
   1222  btree_iterator operator++(int) {
   1223    btree_iterator tmp = *this;
   1224    ++*this;
   1225    return tmp;
   1226  }
   1227  btree_iterator operator--(int) {
   1228    btree_iterator tmp = *this;
   1229    --*this;
   1230    return tmp;
   1231  }
   1232 
   1233 private:
   1234  friend iterator;
   1235  friend const_iterator;
   1236  template <typename Params>
   1237  friend class btree;
   1238  template <typename Tree>
   1239  friend class btree_container;
   1240  template <typename Tree>
   1241  friend class btree_set_container;
   1242  template <typename Tree>
   1243  friend class btree_map_container;
   1244  template <typename Tree>
   1245  friend class btree_multiset_container;
   1246  template <typename TreeType, typename CheckerType>
   1247  friend class base_checker;
   1248  friend struct btree_access;
   1249 
   1250  // This SFINAE allows explicit conversions from const_iterator to
   1251  // iterator, but also avoids hiding the copy constructor.
   1252  // NOTE: the const_cast is safe because this constructor is only called by
   1253  // non-const methods and the container owns the nodes.
   1254  template <typename N, typename R, typename P,
   1255            absl::enable_if_t<
   1256                std::is_same<btree_iterator<N, R, P>, const_iterator>::value &&
   1257                    std::is_same<btree_iterator, iterator>::value,
   1258                int> = 0>
   1259  explicit btree_iterator(const btree_iterator<N, R, P> other)
   1260      : btree_iterator_generation_info(other.generation()),
   1261        node_(const_cast<node_type *>(other.node_)),
   1262        position_(other.position_) {}
   1263 
   1264  bool Equals(const const_iterator other) const {
   1265    ABSL_HARDENING_ASSERT(((node_ == nullptr && other.node_ == nullptr) ||
   1266                           (node_ != nullptr && other.node_ != nullptr)) &&
   1267                          "Comparing default-constructed iterator with "
   1268                          "non-default-constructed iterator.");
   1269    // Note: we use assert instead of ABSL_HARDENING_ASSERT here because this
   1270    // changes the complexity of Equals from O(1) to O(log(N) + log(M)) where
   1271    // N/M are sizes of the containers containing node_/other.node_.
   1272    assert(AreNodesFromSameContainer(node_, other.node_) &&
   1273           "Comparing iterators from different containers.");
   1274    assert_valid_generation(node_);
   1275    other.assert_valid_generation(other.node_);
   1276    return node_ == other.node_ && position_ == other.position_;
   1277  }
   1278 
   1279  bool IsEndIterator() const {
   1280    if (position_ != node_->finish()) return false;
   1281    node_type *node = node_;
   1282    while (!node->is_root()) {
   1283      if (node->position() != node->parent()->finish()) return false;
   1284      node = node->parent();
   1285    }
   1286    return true;
   1287  }
   1288 
   1289  // Returns n such that n calls to ++other yields *this.
   1290  // Precondition: n exists && (this->node_ != other.node_ ||
   1291  // !this->node_->is_leaf() || this->position_ != other.position_).
   1292  difference_type distance_slow(const_iterator other) const;
   1293 
   1294  // Increment/decrement the iterator.
   1295  void increment() {
   1296    assert_valid_generation(node_);
   1297    if (node_->is_leaf() && ++position_ < node_->finish()) {
   1298      return;
   1299    }
   1300    increment_slow();
   1301  }
   1302  void increment_slow();
   1303  btree_iterator &increment_n_slow(difference_type n);
   1304 
   1305  void decrement() {
   1306    assert_valid_generation(node_);
   1307    if (node_->is_leaf() && --position_ >= node_->start()) {
   1308      return;
   1309    }
   1310    decrement_slow();
   1311  }
   1312  void decrement_slow();
   1313  btree_iterator &decrement_n_slow(difference_type n);
   1314 
   1315  const key_type &key() const {
   1316    return node_->key(static_cast<size_type>(position_));
   1317  }
   1318  decltype(std::declval<Node *>()->slot(0)) slot() {
   1319    return node_->slot(static_cast<size_type>(position_));
   1320  }
   1321 
   1322  void update_generation() {
   1323    btree_iterator_generation_info::update_generation(node_);
   1324  }
   1325 
   1326  // The node in the tree the iterator is pointing at.
   1327  Node *node_;
   1328  // The position within the node of the tree the iterator is pointing at.
   1329  // NOTE: this is an int rather than a field_type because iterators can point
   1330  // to invalid positions (such as -1) in certain circumstances.
   1331  int position_;
   1332 };
   1333 
   1334 template <typename Params>
   1335 class btree {
   1336  using node_type = btree_node<Params>;
   1337  using is_key_compare_to = typename Params::is_key_compare_to;
   1338  using field_type = typename node_type::field_type;
   1339 
   1340  // We use a static empty node for the root/leftmost/rightmost of empty btrees
   1341  // in order to avoid branching in begin()/end().
   1342  struct EmptyNodeType : node_type {
   1343    using field_type = typename node_type::field_type;
   1344    node_type *parent;
   1345 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
   1346    uint32_t generation = 0;
   1347 #endif
   1348    field_type position = 0;
   1349    field_type start = 0;
   1350    field_type finish = 0;
   1351    // max_count must be != kInternalNodeMaxCount (so that this node is regarded
   1352    // as a leaf node). max_count() is never called when the tree is empty.
   1353    field_type max_count = node_type::kInternalNodeMaxCount + 1;
   1354 
   1355    constexpr EmptyNodeType() : parent(this) {}
   1356  };
   1357 
   1358  static node_type *EmptyNode() {
   1359    alignas(node_type::Alignment()) static constexpr EmptyNodeType empty_node;
   1360    return const_cast<EmptyNodeType *>(&empty_node);
   1361  }
   1362 
   1363  enum : uint32_t {
   1364    kNodeSlots = node_type::kNodeSlots,
   1365    kMinNodeValues = kNodeSlots / 2,
   1366  };
   1367 
   1368  struct node_stats {
   1369    using size_type = typename Params::size_type;
   1370 
   1371    node_stats(size_type l, size_type i) : leaf_nodes(l), internal_nodes(i) {}
   1372 
   1373    node_stats &operator+=(const node_stats &other) {
   1374      leaf_nodes += other.leaf_nodes;
   1375      internal_nodes += other.internal_nodes;
   1376      return *this;
   1377    }
   1378 
   1379    size_type leaf_nodes;
   1380    size_type internal_nodes;
   1381  };
   1382 
   1383 public:
   1384  using key_type = typename Params::key_type;
   1385  using value_type = typename Params::value_type;
   1386  using size_type = typename Params::size_type;
   1387  using difference_type = typename Params::difference_type;
   1388  using key_compare = typename Params::key_compare;
   1389  using original_key_compare = typename Params::original_key_compare;
   1390  using value_compare = typename Params::value_compare;
   1391  using allocator_type = typename Params::allocator_type;
   1392  using reference = typename Params::reference;
   1393  using const_reference = typename Params::const_reference;
   1394  using pointer = typename Params::pointer;
   1395  using const_pointer = typename Params::const_pointer;
   1396  using iterator =
   1397      typename btree_iterator<node_type, reference, pointer>::iterator;
   1398  using const_iterator = typename iterator::const_iterator;
   1399  using reverse_iterator = std::reverse_iterator<iterator>;
   1400  using const_reverse_iterator = std::reverse_iterator<const_iterator>;
   1401  using node_handle_type = node_handle<Params, Params, allocator_type>;
   1402 
   1403  // Internal types made public for use by btree_container types.
   1404  using params_type = Params;
   1405  using slot_type = typename Params::slot_type;
   1406 
   1407 private:
   1408  // Copies or moves (depending on the template parameter) the values in
   1409  // other into this btree in their order in other. This btree must be empty
   1410  // before this method is called. This method is used in copy construction,
   1411  // copy assignment, and move assignment.
   1412  template <typename Btree>
   1413  void copy_or_move_values_in_order(Btree &other);
   1414 
   1415  // Validates that various assumptions/requirements are true at compile time.
   1416  constexpr static bool static_assert_validation();
   1417 
   1418 public:
   1419  btree(const key_compare &comp, const allocator_type &alloc)
   1420      : root_(EmptyNode()), rightmost_(comp, alloc, EmptyNode()), size_(0) {}
   1421 
   1422  btree(const btree &other) : btree(other, other.allocator()) {}
   1423  btree(const btree &other, const allocator_type &alloc)
   1424      : btree(other.key_comp(), alloc) {
   1425    copy_or_move_values_in_order(other);
   1426  }
   1427  btree(btree &&other) noexcept
   1428      : root_(std::exchange(other.root_, EmptyNode())),
   1429        rightmost_(std::move(other.rightmost_)),
   1430        size_(std::exchange(other.size_, 0u)) {
   1431    other.mutable_rightmost() = EmptyNode();
   1432  }
   1433  btree(btree &&other, const allocator_type &alloc)
   1434      : btree(other.key_comp(), alloc) {
   1435    if (alloc == other.allocator()) {
   1436      swap(other);
   1437    } else {
   1438      // Move values from `other` one at a time when allocators are different.
   1439      copy_or_move_values_in_order(other);
   1440    }
   1441  }
   1442 
   1443  ~btree() {
   1444    // Put static_asserts in destructor to avoid triggering them before the type
   1445    // is complete.
   1446    static_assert(static_assert_validation(), "This call must be elided.");
   1447    clear();
   1448  }
   1449 
   1450  // Assign the contents of other to *this.
   1451  btree &operator=(const btree &other);
   1452  btree &operator=(btree &&other) noexcept;
   1453 
   1454  iterator begin() { return iterator(leftmost()); }
   1455  const_iterator begin() const { return const_iterator(leftmost()); }
   1456  iterator end() { return iterator(rightmost(), rightmost()->finish()); }
   1457  const_iterator end() const {
   1458    return const_iterator(rightmost(), rightmost()->finish());
   1459  }
   1460  reverse_iterator rbegin() { return reverse_iterator(end()); }
   1461  const_reverse_iterator rbegin() const {
   1462    return const_reverse_iterator(end());
   1463  }
   1464  reverse_iterator rend() { return reverse_iterator(begin()); }
   1465  const_reverse_iterator rend() const {
   1466    return const_reverse_iterator(begin());
   1467  }
   1468 
   1469  // Finds the first element whose key is not less than `key`.
   1470  template <typename K>
   1471  iterator lower_bound(const K &key) {
   1472    return internal_end(internal_lower_bound(key).value);
   1473  }
   1474  template <typename K>
   1475  const_iterator lower_bound(const K &key) const {
   1476    return internal_end(internal_lower_bound(key).value);
   1477  }
   1478 
   1479  // Finds the first element whose key is not less than `key` and also returns
   1480  // whether that element is equal to `key`.
   1481  template <typename K>
   1482  std::pair<iterator, bool> lower_bound_equal(const K &key) const;
   1483 
   1484  // Finds the first element whose key is greater than `key`.
   1485  template <typename K>
   1486  iterator upper_bound(const K &key) {
   1487    return internal_end(internal_upper_bound(key));
   1488  }
   1489  template <typename K>
   1490  const_iterator upper_bound(const K &key) const {
   1491    return internal_end(internal_upper_bound(key));
   1492  }
   1493 
   1494  // Finds the range of values which compare equal to key. The first member of
   1495  // the returned pair is equal to lower_bound(key). The second member of the
   1496  // pair is equal to upper_bound(key).
   1497  template <typename K>
   1498  std::pair<iterator, iterator> equal_range(const K &key);
   1499  template <typename K>
   1500  std::pair<const_iterator, const_iterator> equal_range(const K &key) const {
   1501    return const_cast<btree *>(this)->equal_range(key);
   1502  }
   1503 
   1504  // Inserts a value into the btree only if it does not already exist. The
   1505  // boolean return value indicates whether insertion succeeded or failed.
   1506  // Requirement: if `key` already exists in the btree, does not consume `args`.
   1507  // Requirement: `key` is never referenced after consuming `args`.
   1508  template <typename K, typename... Args>
   1509  std::pair<iterator, bool> insert_unique(const K &key, Args &&...args);
   1510 
   1511  // Inserts with hint. Checks to see if the value should be placed immediately
   1512  // before `position` in the tree. If so, then the insertion will take
   1513  // amortized constant time. If not, the insertion will take amortized
   1514  // logarithmic time as if a call to insert_unique() were made.
   1515  // Requirement: if `key` already exists in the btree, does not consume `args`.
   1516  // Requirement: `key` is never referenced after consuming `args`.
   1517  template <typename K, typename... Args>
   1518  std::pair<iterator, bool> insert_hint_unique(iterator position, const K &key,
   1519                                               Args &&...args);
   1520 
   1521  // Insert a range of values into the btree.
   1522  // Note: the first overload avoids constructing a value_type if the key
   1523  // already exists in the btree.
   1524  template <typename InputIterator,
   1525            typename = decltype(std::declval<const key_compare &>()(
   1526                params_type::key(*std::declval<InputIterator>()),
   1527                std::declval<const key_type &>()))>
   1528  void insert_iterator_unique(InputIterator b, InputIterator e, int);
   1529  // We need the second overload for cases in which we need to construct a
   1530  // value_type in order to compare it with the keys already in the btree.
   1531  template <typename InputIterator>
   1532  void insert_iterator_unique(InputIterator b, InputIterator e, char);
   1533 
   1534  // Inserts a value into the btree.
   1535  template <typename ValueType>
   1536  iterator insert_multi(const key_type &key, ValueType &&v);
   1537 
   1538  // Inserts a value into the btree.
   1539  template <typename ValueType>
   1540  iterator insert_multi(ValueType &&v) {
   1541    return insert_multi(params_type::key(v), std::forward<ValueType>(v));
   1542  }
   1543 
   1544  // Insert with hint. Check to see if the value should be placed immediately
   1545  // before position in the tree. If it does, then the insertion will take
   1546  // amortized constant time. If not, the insertion will take amortized
   1547  // logarithmic time as if a call to insert_multi(v) were made.
   1548  template <typename ValueType>
   1549  iterator insert_hint_multi(iterator position, ValueType &&v);
   1550 
   1551  // Insert a range of values into the btree.
   1552  template <typename InputIterator>
   1553  void insert_iterator_multi(InputIterator b,
   1554                             InputIterator e);
   1555 
   1556  // Erase the specified iterator from the btree. The iterator must be valid
   1557  // (i.e. not equal to end()).  Return an iterator pointing to the node after
   1558  // the one that was erased (or end() if none exists).
   1559  // Requirement: does not read the value at `*iter`.
   1560  iterator erase(iterator iter);
   1561 
   1562  // Erases range. Returns the number of keys erased and an iterator pointing
   1563  // to the element after the last erased element.
   1564  std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
   1565 
   1566  // Finds an element with key equivalent to `key` or returns `end()` if `key`
   1567  // is not present.
   1568  template <typename K>
   1569  iterator find(const K &key) {
   1570    return internal_end(internal_find(key));
   1571  }
   1572  template <typename K>
   1573  const_iterator find(const K &key) const {
   1574    return internal_end(internal_find(key));
   1575  }
   1576 
   1577  // Clear the btree, deleting all of the values it contains.
   1578  void clear();
   1579 
   1580  // Swaps the contents of `this` and `other`.
   1581  void swap(btree &other);
   1582 
   1583  const key_compare &key_comp() const noexcept {
   1584    return rightmost_.template get<0>();
   1585  }
   1586  template <typename K1, typename K2>
   1587  bool compare_keys(const K1 &a, const K2 &b) const {
   1588    return compare_internal::compare_result_as_less_than(key_comp()(a, b));
   1589  }
   1590 
   1591  value_compare value_comp() const {
   1592    return value_compare(original_key_compare(key_comp()));
   1593  }
   1594 
   1595  // Verifies the structure of the btree.
   1596  void verify() const;
   1597 
   1598  // Size routines.
   1599  size_type size() const { return size_; }
   1600  size_type max_size() const { return (std::numeric_limits<size_type>::max)(); }
   1601  bool empty() const { return size_ == 0; }
   1602 
   1603  // The height of the btree. An empty tree will have height 0.
   1604  size_type height() const {
   1605    size_type h = 0;
   1606    if (!empty()) {
   1607      // Count the length of the chain from the leftmost node up to the
   1608      // root. We actually count from the root back around to the level below
   1609      // the root, but the calculation is the same because of the circularity
   1610      // of that traversal.
   1611      const node_type *n = root();
   1612      do {
   1613        ++h;
   1614        n = n->parent();
   1615      } while (n != root());
   1616    }
   1617    return h;
   1618  }
   1619 
   1620  // The number of internal, leaf and total nodes used by the btree.
   1621  size_type leaf_nodes() const { return internal_stats(root()).leaf_nodes; }
   1622  size_type internal_nodes() const {
   1623    return internal_stats(root()).internal_nodes;
   1624  }
   1625  size_type nodes() const {
   1626    node_stats stats = internal_stats(root());
   1627    return stats.leaf_nodes + stats.internal_nodes;
   1628  }
   1629 
   1630  // The total number of bytes used by the btree.
   1631  // TODO(b/169338300): update to support node_btree_*.
   1632  size_type bytes_used() const {
   1633    node_stats stats = internal_stats(root());
   1634    if (stats.leaf_nodes == 1 && stats.internal_nodes == 0) {
   1635      return sizeof(*this) + node_type::LeafSize(root()->max_count());
   1636    } else {
   1637      return sizeof(*this) + stats.leaf_nodes * node_type::LeafSize() +
   1638             stats.internal_nodes * node_type::InternalSize();
   1639    }
   1640  }
   1641 
   1642  // The average number of bytes used per value stored in the btree assuming
   1643  // random insertion order.
   1644  static double average_bytes_per_value() {
   1645    // The expected number of values per node with random insertion order is the
   1646    // average of the maximum and minimum numbers of values per node.
   1647    const double expected_values_per_node = (kNodeSlots + kMinNodeValues) / 2.0;
   1648    return node_type::LeafSize() / expected_values_per_node;
   1649  }
   1650 
   1651  // The fullness of the btree. Computed as the number of elements in the btree
   1652  // divided by the maximum number of elements a tree with the current number
   1653  // of nodes could hold. A value of 1 indicates perfect space
   1654  // utilization. Smaller values indicate space wastage.
   1655  // Returns 0 for empty trees.
   1656  double fullness() const {
   1657    if (empty()) return 0.0;
   1658    return static_cast<double>(size()) / (nodes() * kNodeSlots);
   1659  }
   1660  // The overhead of the btree structure in bytes per node. Computed as the
   1661  // total number of bytes used by the btree minus the number of bytes used for
   1662  // storing elements divided by the number of elements.
   1663  // Returns 0 for empty trees.
   1664  double overhead() const {
   1665    if (empty()) return 0.0;
   1666    return (bytes_used() - size() * sizeof(value_type)) /
   1667           static_cast<double>(size());
   1668  }
   1669 
   1670  // The allocator used by the btree.
   1671  allocator_type get_allocator() const { return allocator(); }
   1672 
   1673 private:
   1674  friend struct btree_access;
   1675 
   1676  // Internal accessor routines.
   1677  node_type *root() { return root_; }
   1678  const node_type *root() const { return root_; }
   1679  node_type *&mutable_root() noexcept { return root_; }
   1680  node_type *rightmost() { return rightmost_.template get<2>(); }
   1681  const node_type *rightmost() const { return rightmost_.template get<2>(); }
   1682  node_type *&mutable_rightmost() noexcept {
   1683    return rightmost_.template get<2>();
   1684  }
   1685  key_compare *mutable_key_comp() noexcept {
   1686    return &rightmost_.template get<0>();
   1687  }
   1688 
   1689  // The leftmost node is stored as the parent of the root node.
   1690  node_type *leftmost() { return root()->parent(); }
   1691  const node_type *leftmost() const { return root()->parent(); }
   1692 
   1693  // Allocator routines.
   1694  allocator_type *mutable_allocator() noexcept {
   1695    return &rightmost_.template get<1>();
   1696  }
   1697  const allocator_type &allocator() const noexcept {
   1698    return rightmost_.template get<1>();
   1699  }
   1700 
   1701  // Allocates a correctly aligned node of at least size bytes using the
   1702  // allocator.
   1703  node_type *allocate(size_type size) {
   1704    return reinterpret_cast<node_type *>(
   1705        absl::container_internal::Allocate<node_type::Alignment()>(
   1706            mutable_allocator(), size));
   1707  }
   1708 
   1709  // Node creation/deletion routines.
   1710  node_type *new_internal_node(field_type position, node_type *parent) {
   1711    node_type *n = allocate(node_type::InternalSize());
   1712    n->init_internal(position, parent);
   1713    return n;
   1714  }
   1715  node_type *new_leaf_node(field_type position, node_type *parent) {
   1716    node_type *n = allocate(node_type::LeafSize());
   1717    n->init_leaf(position, kNodeSlots, parent);
   1718    return n;
   1719  }
   1720  node_type *new_leaf_root_node(field_type max_count) {
   1721    node_type *n = allocate(node_type::LeafSize(max_count));
   1722    n->init_leaf(/*position=*/0, max_count, /*parent=*/n);
   1723    return n;
   1724  }
   1725 
   1726  // Deletion helper routines.
   1727  iterator rebalance_after_delete(iterator iter);
   1728 
   1729  // Rebalances or splits the node iter points to.
   1730  void rebalance_or_split(iterator *iter);
   1731 
   1732  // Merges the values of left, right and the delimiting key on their parent
   1733  // onto left, removing the delimiting key and deleting right.
   1734  void merge_nodes(node_type *left, node_type *right);
   1735 
   1736  // Tries to merge node with its left or right sibling, and failing that,
   1737  // rebalance with its left or right sibling. Returns true if a merge
   1738  // occurred, at which point it is no longer valid to access node. Returns
   1739  // false if no merging took place.
   1740  bool try_merge_or_rebalance(iterator *iter);
   1741 
   1742  // Tries to shrink the height of the tree by 1.
   1743  void try_shrink();
   1744 
   1745  iterator internal_end(iterator iter) {
   1746    return iter.node_ != nullptr ? iter : end();
   1747  }
   1748  const_iterator internal_end(const_iterator iter) const {
   1749    return iter.node_ != nullptr ? iter : end();
   1750  }
   1751 
   1752  // Emplaces a value into the btree immediately before iter. Requires that
   1753  // key(v) <= iter.key() and (--iter).key() <= key(v).
   1754  template <typename... Args>
   1755  iterator internal_emplace(iterator iter, Args &&...args);
   1756 
   1757  // Returns an iterator pointing to the first value >= the value "iter" is
   1758  // pointing at. Note that "iter" might be pointing to an invalid location such
   1759  // as iter.position_ == iter.node_->finish(). This routine simply moves iter
   1760  // up in the tree to a valid location. Requires: iter.node_ is non-null.
   1761  template <typename IterType>
   1762  static IterType internal_last(IterType iter);
   1763 
   1764  // Returns an iterator pointing to the leaf position at which key would
   1765  // reside in the tree, unless there is an exact match - in which case, the
   1766  // result may not be on a leaf. When there's a three-way comparator, we can
   1767  // return whether there was an exact match. This allows the caller to avoid a
   1768  // subsequent comparison to determine if an exact match was made, which is
   1769  // important for keys with expensive comparison, such as strings.
   1770  template <typename K>
   1771  SearchResult<iterator, is_key_compare_to::value> internal_locate(
   1772      const K &key) const;
   1773 
   1774  // Internal routine which implements lower_bound().
   1775  template <typename K>
   1776  SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
   1777      const K &key) const;
   1778 
   1779  // Internal routine which implements upper_bound().
   1780  template <typename K>
   1781  iterator internal_upper_bound(const K &key) const;
   1782 
   1783  // Internal routine which implements find().
   1784  template <typename K>
   1785  iterator internal_find(const K &key) const;
   1786 
   1787  // Verifies the tree structure of node.
   1788  size_type internal_verify(const node_type *node, const key_type *lo,
   1789                            const key_type *hi) const;
   1790 
   1791  node_stats internal_stats(const node_type *node) const {
   1792    // The root can be a static empty node.
   1793    if (node == nullptr || (node == root() && empty())) {
   1794      return node_stats(0, 0);
   1795    }
   1796    if (node->is_leaf()) {
   1797      return node_stats(1, 0);
   1798    }
   1799    node_stats res(0, 1);
   1800    for (int i = node->start(); i <= node->finish(); ++i) {
   1801      res += internal_stats(node->child(i));
   1802    }
   1803    return res;
   1804  }
   1805 
   1806  node_type *root_;
   1807 
   1808  // A pointer to the rightmost node. Note that the leftmost node is stored as
   1809  // the root's parent. We use compressed tuple in order to save space because
   1810  // key_compare and allocator_type are usually empty.
   1811  absl::container_internal::CompressedTuple<key_compare, allocator_type,
   1812                                            node_type *>
   1813      rightmost_;
   1814 
   1815  // Number of values.
   1816  size_type size_;
   1817 };
   1818 
   1819 ////
   1820 // btree_node methods
   1821 template <typename P>
   1822 template <typename... Args>
   1823 inline void btree_node<P>::emplace_value(const field_type i,
   1824                                         allocator_type *alloc,
   1825                                         Args &&...args) {
   1826  assert(i >= start());
   1827  assert(i <= finish());
   1828  // Shift old values to create space for new value and then construct it in
   1829  // place.
   1830  if (i < finish()) {
   1831    transfer_n_backward(finish() - i, /*dest_i=*/i + 1, /*src_i=*/i, this,
   1832                        alloc);
   1833  }
   1834  value_init(static_cast<field_type>(i), alloc, std::forward<Args>(args)...);
   1835  set_finish(finish() + 1);
   1836 
   1837  if (is_internal() && finish() > i + 1) {
   1838    for (field_type j = finish(); j > i + 1; --j) {
   1839      set_child(j, child(j - 1));
   1840    }
   1841    clear_child(i + 1);
   1842  }
   1843 }
   1844 
   1845 template <typename P>
   1846 inline void btree_node<P>::remove_values(const field_type i,
   1847                                         const field_type to_erase,
   1848                                         allocator_type *alloc) {
   1849  // Transfer values after the removed range into their new places.
   1850  value_destroy_n(i, to_erase, alloc);
   1851  const field_type orig_finish = finish();
   1852  const field_type src_i = i + to_erase;
   1853  transfer_n(orig_finish - src_i, i, src_i, this, alloc);
   1854 
   1855  if (is_internal()) {
   1856    // Delete all children between begin and end.
   1857    for (field_type j = 0; j < to_erase; ++j) {
   1858      clear_and_delete(child(i + j + 1), alloc);
   1859    }
   1860    // Rotate children after end into new positions.
   1861    for (field_type j = i + to_erase + 1; j <= orig_finish; ++j) {
   1862      set_child(j - to_erase, child(j));
   1863      clear_child(j);
   1864    }
   1865  }
   1866  set_finish(orig_finish - to_erase);
   1867 }
   1868 
   1869 template <typename P>
   1870 void btree_node<P>::rebalance_right_to_left(field_type to_move,
   1871                                            btree_node *right,
   1872                                            allocator_type *alloc) {
   1873  assert(parent() == right->parent());
   1874  assert(position() + 1 == right->position());
   1875  assert(right->count() >= count());
   1876  assert(to_move >= 1);
   1877  assert(to_move <= right->count());
   1878 
   1879  // 1) Move the delimiting value in the parent to the left node.
   1880  transfer(finish(), position(), parent(), alloc);
   1881 
   1882  // 2) Move the (to_move - 1) values from the right node to the left node.
   1883  transfer_n(to_move - 1, finish() + 1, right->start(), right, alloc);
   1884 
   1885  // 3) Move the new delimiting value to the parent from the right node.
   1886  parent()->transfer(position(), right->start() + to_move - 1, right, alloc);
   1887 
   1888  // 4) Shift the values in the right node to their correct positions.
   1889  right->transfer_n(right->count() - to_move, right->start(),
   1890                    right->start() + to_move, right, alloc);
   1891 
   1892  if (is_internal()) {
   1893    // Move the child pointers from the right to the left node.
   1894    for (field_type i = 0; i < to_move; ++i) {
   1895      init_child(finish() + i + 1, right->child(i));
   1896    }
   1897    for (field_type i = right->start(); i <= right->finish() - to_move; ++i) {
   1898      assert(i + to_move <= right->max_count());
   1899      right->init_child(i, right->child(i + to_move));
   1900      right->clear_child(i + to_move);
   1901    }
   1902  }
   1903 
   1904  // Fixup `finish` on the left and right nodes.
   1905  set_finish(finish() + to_move);
   1906  right->set_finish(right->finish() - to_move);
   1907 }
   1908 
   1909 template <typename P>
   1910 void btree_node<P>::rebalance_left_to_right(field_type to_move,
   1911                                            btree_node *right,
   1912                                            allocator_type *alloc) {
   1913  assert(parent() == right->parent());
   1914  assert(position() + 1 == right->position());
   1915  assert(count() >= right->count());
   1916  assert(to_move >= 1);
   1917  assert(to_move <= count());
   1918 
   1919  // Values in the right node are shifted to the right to make room for the
   1920  // new to_move values. Then, the delimiting value in the parent and the
   1921  // other (to_move - 1) values in the left node are moved into the right node.
   1922  // Lastly, a new delimiting value is moved from the left node into the
   1923  // parent, and the remaining empty left node entries are destroyed.
   1924 
   1925  // 1) Shift existing values in the right node to their correct positions.
   1926  right->transfer_n_backward(right->count(), right->start() + to_move,
   1927                             right->start(), right, alloc);
   1928 
   1929  // 2) Move the delimiting value in the parent to the right node.
   1930  right->transfer(right->start() + to_move - 1, position(), parent(), alloc);
   1931 
   1932  // 3) Move the (to_move - 1) values from the left node to the right node.
   1933  right->transfer_n(to_move - 1, right->start(), finish() - (to_move - 1), this,
   1934                    alloc);
   1935 
   1936  // 4) Move the new delimiting value to the parent from the left node.
   1937  parent()->transfer(position(), finish() - to_move, this, alloc);
   1938 
   1939  if (is_internal()) {
   1940    // Move the child pointers from the left to the right node.
   1941    for (field_type i = right->finish() + 1; i > right->start(); --i) {
   1942      right->init_child(i - 1 + to_move, right->child(i - 1));
   1943      right->clear_child(i - 1);
   1944    }
   1945    for (field_type i = 1; i <= to_move; ++i) {
   1946      right->init_child(i - 1, child(finish() - to_move + i));
   1947      clear_child(finish() - to_move + i);
   1948    }
   1949  }
   1950 
   1951  // Fixup the counts on the left and right nodes.
   1952  set_finish(finish() - to_move);
   1953  right->set_finish(right->finish() + to_move);
   1954 }
   1955 
   1956 template <typename P>
   1957 void btree_node<P>::split(const int insert_position, btree_node *dest,
   1958                          allocator_type *alloc) {
   1959  assert(dest->count() == 0);
   1960  assert(max_count() == kNodeSlots);
   1961  assert(position() + 1 == dest->position());
   1962  assert(parent() == dest->parent());
   1963 
   1964  // We bias the split based on the position being inserted. If we're
   1965  // inserting at the beginning of the left node then bias the split to put
   1966  // more values on the right node. If we're inserting at the end of the
   1967  // right node then bias the split to put more values on the left node.
   1968  if (insert_position == start()) {
   1969    dest->set_finish(dest->start() + finish() - 1);
   1970  } else if (insert_position == kNodeSlots) {
   1971    dest->set_finish(dest->start());
   1972  } else {
   1973    dest->set_finish(dest->start() + count() / 2);
   1974  }
   1975  set_finish(finish() - dest->count());
   1976  assert(count() >= 1);
   1977 
   1978  // Move values from the left sibling to the right sibling.
   1979  dest->transfer_n(dest->count(), dest->start(), finish(), this, alloc);
   1980 
   1981  // The split key is the largest value in the left sibling.
   1982  --mutable_finish();
   1983  parent()->emplace_value(position(), alloc, finish_slot());
   1984  value_destroy(finish(), alloc);
   1985  parent()->set_child_noupdate_position(position() + 1, dest);
   1986 
   1987  if (is_internal()) {
   1988    for (field_type i = dest->start(), j = finish() + 1; i <= dest->finish();
   1989         ++i, ++j) {
   1990      assert(child(j) != nullptr);
   1991      dest->init_child(i, child(j));
   1992      clear_child(j);
   1993    }
   1994  }
   1995 }
   1996 
   1997 template <typename P>
   1998 void btree_node<P>::merge(btree_node *src, allocator_type *alloc) {
   1999  assert(parent() == src->parent());
   2000  assert(position() + 1 == src->position());
   2001 
   2002  // Move the delimiting value to the left node.
   2003  value_init(finish(), alloc, parent()->slot(position()));
   2004 
   2005  // Move the values from the right to the left node.
   2006  transfer_n(src->count(), finish() + 1, src->start(), src, alloc);
   2007 
   2008  if (is_internal()) {
   2009    // Move the child pointers from the right to the left node.
   2010    for (field_type i = src->start(), j = finish() + 1; i <= src->finish();
   2011         ++i, ++j) {
   2012      init_child(j, src->child(i));
   2013      src->clear_child(i);
   2014    }
   2015  }
   2016 
   2017  // Fixup `finish` on the src and dest nodes.
   2018  set_finish(start() + 1 + count() + src->count());
   2019  src->set_finish(src->start());
   2020 
   2021  // Remove the value on the parent node and delete the src node.
   2022  parent()->remove_values(position(), /*to_erase=*/1, alloc);
   2023 }
   2024 
   2025 template <typename P>
   2026 void btree_node<P>::clear_and_delete(btree_node *node, allocator_type *alloc) {
   2027  if (node->is_leaf()) {
   2028    node->value_destroy_n(node->start(), node->count(), alloc);
   2029    deallocate(LeafSize(node->max_count()), node, alloc);
   2030    return;
   2031  }
   2032  if (node->count() == 0) {
   2033    deallocate(InternalSize(), node, alloc);
   2034    return;
   2035  }
   2036 
   2037  // The parent of the root of the subtree we are deleting.
   2038  btree_node *delete_root_parent = node->parent();
   2039 
   2040  // Navigate to the leftmost leaf under node, and then delete upwards.
   2041  while (node->is_internal()) node = node->start_child();
   2042 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
   2043  // When generations are enabled, we delete the leftmost leaf last in case it's
   2044  // the parent of the root and we need to check whether it's a leaf before we
   2045  // can update the root's generation.
   2046  // TODO(ezb): if we change btree_node::is_root to check a bool inside the node
   2047  // instead of checking whether the parent is a leaf, we can remove this logic.
   2048  btree_node *leftmost_leaf = node;
   2049 #endif
   2050  // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`,
   2051  // which isn't guaranteed to be a valid `field_type`.
   2052  size_type pos = node->position();
   2053  btree_node *parent = node->parent();
   2054  for (;;) {
   2055    // In each iteration of the next loop, we delete one leaf node and go right.
   2056    assert(pos <= parent->finish());
   2057    do {
   2058      node = parent->child(static_cast<field_type>(pos));
   2059      if (node->is_internal()) {
   2060        // Navigate to the leftmost leaf under node.
   2061        while (node->is_internal()) node = node->start_child();
   2062        pos = node->position();
   2063        parent = node->parent();
   2064      }
   2065      node->value_destroy_n(node->start(), node->count(), alloc);
   2066 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
   2067      if (leftmost_leaf != node)
   2068 #endif
   2069        deallocate(LeafSize(node->max_count()), node, alloc);
   2070      ++pos;
   2071    } while (pos <= parent->finish());
   2072 
   2073    // Once we've deleted all children of parent, delete parent and go up/right.
   2074    assert(pos > parent->finish());
   2075    do {
   2076      node = parent;
   2077      pos = node->position();
   2078      parent = node->parent();
   2079      node->value_destroy_n(node->start(), node->count(), alloc);
   2080      deallocate(InternalSize(), node, alloc);
   2081      if (parent == delete_root_parent) {
   2082 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
   2083        deallocate(LeafSize(leftmost_leaf->max_count()), leftmost_leaf, alloc);
   2084 #endif
   2085        return;
   2086      }
   2087      ++pos;
   2088    } while (pos > parent->finish());
   2089  }
   2090 }
   2091 
   2092 ////
   2093 // btree_iterator methods
   2094 
   2095 // Note: the implementation here is based on btree_node::clear_and_delete.
   2096 template <typename N, typename R, typename P>
   2097 auto btree_iterator<N, R, P>::distance_slow(const_iterator other) const
   2098    -> difference_type {
   2099  const_iterator begin = other;
   2100  const_iterator end = *this;
   2101  assert(begin.node_ != end.node_ || !begin.node_->is_leaf() ||
   2102         begin.position_ != end.position_);
   2103 
   2104  const node_type *node = begin.node_;
   2105  // We need to compensate for double counting if begin.node_ is a leaf node.
   2106  difference_type count = node->is_leaf() ? -begin.position_ : 0;
   2107 
   2108  // First navigate to the leftmost leaf node past begin.
   2109  if (node->is_internal()) {
   2110    ++count;
   2111    node = node->child(begin.position_ + 1);
   2112  }
   2113  while (node->is_internal()) node = node->start_child();
   2114 
   2115  // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`,
   2116  // which isn't guaranteed to be a valid `field_type`.
   2117  size_type pos = node->position();
   2118  const node_type *parent = node->parent();
   2119  for (;;) {
   2120    // In each iteration of the next loop, we count one leaf node and go right.
   2121    assert(pos <= parent->finish());
   2122    do {
   2123      node = parent->child(static_cast<field_type>(pos));
   2124      if (node->is_internal()) {
   2125        // Navigate to the leftmost leaf under node.
   2126        while (node->is_internal()) node = node->start_child();
   2127        pos = node->position();
   2128        parent = node->parent();
   2129      }
   2130      if (node == end.node_) return count + end.position_;
   2131      if (parent == end.node_ && pos == static_cast<size_type>(end.position_))
   2132        return count + node->count();
   2133      // +1 is for the next internal node value.
   2134      count += node->count() + 1;
   2135      ++pos;
   2136    } while (pos <= parent->finish());
   2137 
   2138    // Once we've counted all children of parent, go up/right.
   2139    assert(pos > parent->finish());
   2140    do {
   2141      node = parent;
   2142      pos = node->position();
   2143      parent = node->parent();
   2144      // -1 because we counted the value at end and shouldn't.
   2145      if (parent == end.node_ && pos == static_cast<size_type>(end.position_))
   2146        return count - 1;
   2147      ++pos;
   2148    } while (pos > parent->finish());
   2149  }
   2150 }
   2151 
   2152 template <typename N, typename R, typename P>
   2153 void btree_iterator<N, R, P>::increment_slow() {
   2154  N* node = node_;
   2155  int position = position_;
   2156  if (node->is_leaf()) {
   2157    assert(position >= node->finish());
   2158    while (position == node->finish() && !node->is_root()) {
   2159      assert(node->parent()->child(node->position()) == node);
   2160      position = node->position();
   2161      node = node->parent();
   2162    }
   2163    // TODO(ezb): assert we aren't incrementing end() instead of handling.
   2164    if (position == node->finish()) {
   2165      return;
   2166    }
   2167  } else {
   2168    assert(position < node->finish());
   2169    node = node->child(static_cast<field_type>(position + 1));
   2170    while (node->is_internal()) {
   2171      node = node->start_child();
   2172    }
   2173    position = node->start();
   2174  }
   2175  *this = {node, position};
   2176 }
   2177 
   2178 template <typename N, typename R, typename P>
   2179 void btree_iterator<N, R, P>::decrement_slow() {
   2180  N* node = node_;
   2181  int position = position_;
   2182  if (node->is_leaf()) {
   2183    assert(position <= -1);
   2184    while (position < node->start() && !node->is_root()) {
   2185      assert(node->parent()->child(node->position()) == node);
   2186      position = node->position() - 1;
   2187      node = node->parent();
   2188    }
   2189    // TODO(ezb): assert we aren't decrementing begin() instead of handling.
   2190    if (position < node->start()) {
   2191      return;
   2192    }
   2193  } else {
   2194    assert(position >= node->start());
   2195    node = node->child(static_cast<field_type>(position));
   2196    while (node->is_internal()) {
   2197      node = node->child(node->finish());
   2198    }
   2199    position = node->finish() - 1;
   2200  }
   2201  *this = {node, position};
   2202 }
   2203 
   2204 template <typename N, typename R, typename P>
   2205 btree_iterator<N, R, P> &btree_iterator<N, R, P>::increment_n_slow(
   2206    difference_type n) {
   2207  N *node = node_;
   2208  int position = position_;
   2209  ABSL_ASSUME(n > 0);
   2210  while (n > 0) {
   2211    if (node->is_leaf()) {
   2212      if (position + n < node->finish()) {
   2213        position += n;
   2214        break;
   2215      } else {
   2216        n -= node->finish() - position;
   2217        position = node->finish();
   2218        btree_iterator save = {node, position};
   2219        while (position == node->finish() && !node->is_root()) {
   2220          position = node->position();
   2221          node = node->parent();
   2222        }
   2223        if (position == node->finish()) {
   2224          ABSL_HARDENING_ASSERT(n == 0);
   2225          return *this = save;
   2226        }
   2227      }
   2228    } else {
   2229      --n;
   2230      assert(position < node->finish());
   2231      node = node->child(static_cast<field_type>(position + 1));
   2232      while (node->is_internal()) {
   2233        node = node->start_child();
   2234      }
   2235      position = node->start();
   2236    }
   2237  }
   2238  node_ = node;
   2239  position_ = position;
   2240  return *this;
   2241 }
   2242 
   2243 template <typename N, typename R, typename P>
   2244 btree_iterator<N, R, P> &btree_iterator<N, R, P>::decrement_n_slow(
   2245    difference_type n) {
   2246  N *node = node_;
   2247  int position = position_;
   2248  ABSL_ASSUME(n > 0);
   2249  while (n > 0) {
   2250    if (node->is_leaf()) {
   2251      if (position - n >= node->start()) {
   2252        position -= n;
   2253        break;
   2254      } else {
   2255        n -= 1 + position - node->start();
   2256        position = node->start() - 1;
   2257        while (position < node->start() && !node->is_root()) {
   2258          position = node->position() - 1;
   2259          node = node->parent();
   2260        }
   2261        ABSL_HARDENING_ASSERT(position >= node->start());
   2262      }
   2263    } else {
   2264      --n;
   2265      assert(position >= node->start());
   2266      node = node->child(static_cast<field_type>(position));
   2267      while (node->is_internal()) {
   2268        node = node->child(node->finish());
   2269      }
   2270      position = node->finish() - 1;
   2271    }
   2272  }
   2273  node_ = node;
   2274  position_ = position;
   2275  return *this;
   2276 }
   2277 
   2278 ////
   2279 // btree methods
   2280 template <typename P>
   2281 template <typename Btree>
   2282 void btree<P>::copy_or_move_values_in_order(Btree &other) {
   2283  static_assert(std::is_same<btree, Btree>::value ||
   2284                    std::is_same<const btree, Btree>::value,
   2285                "Btree type must be same or const.");
   2286  assert(empty());
   2287 
   2288  // We can avoid key comparisons because we know the order of the
   2289  // values is the same order we'll store them in.
   2290  auto iter = other.begin();
   2291  if (iter == other.end()) return;
   2292  insert_multi(iter.slot());
   2293  ++iter;
   2294  for (; iter != other.end(); ++iter) {
   2295    // If the btree is not empty, we can just insert the new value at the end
   2296    // of the tree.
   2297    internal_emplace(end(), iter.slot());
   2298  }
   2299 }
   2300 
   2301 template <typename P>
   2302 constexpr bool btree<P>::static_assert_validation() {
   2303  static_assert(std::is_nothrow_copy_constructible<key_compare>::value,
   2304                "Key comparison must be nothrow copy constructible");
   2305  static_assert(std::is_nothrow_copy_constructible<allocator_type>::value,
   2306                "Allocator must be nothrow copy constructible");
   2307  static_assert(std::is_trivially_copyable<iterator>::value,
   2308                "iterator not trivially copyable.");
   2309 
   2310  // Note: We assert that kTargetValues, which is computed from
   2311  // Params::kTargetNodeSize, must fit the node_type::field_type.
   2312  static_assert(
   2313      kNodeSlots < (1 << (8 * sizeof(typename node_type::field_type))),
   2314      "target node size too large");
   2315 
   2316  // Verify that key_compare returns an absl::{weak,strong}_ordering or bool.
   2317  static_assert(
   2318      compare_has_valid_result_type<key_compare, key_type>(),
   2319      "key comparison function must return absl::{weak,strong}_ordering or "
   2320      "bool.");
   2321 
   2322  // Test the assumption made in setting kNodeSlotSpace.
   2323  static_assert(node_type::MinimumOverhead() >= sizeof(void *) + 4,
   2324                "node space assumption incorrect");
   2325 
   2326  return true;
   2327 }
   2328 
   2329 template <typename P>
   2330 template <typename K>
   2331 auto btree<P>::lower_bound_equal(const K &key) const
   2332    -> std::pair<iterator, bool> {
   2333  const SearchResult<iterator, is_key_compare_to::value> res =
   2334      internal_lower_bound(key);
   2335  const iterator lower = iterator(internal_end(res.value));
   2336  const bool equal = res.HasMatch()
   2337                         ? res.IsEq()
   2338                         : lower != end() && !compare_keys(key, lower.key());
   2339  return {lower, equal};
   2340 }
   2341 
   2342 template <typename P>
   2343 template <typename K>
   2344 auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
   2345  const std::pair<iterator, bool> lower_and_equal = lower_bound_equal(key);
   2346  const iterator lower = lower_and_equal.first;
   2347  if (!lower_and_equal.second) {
   2348    return {lower, lower};
   2349  }
   2350 
   2351  const iterator next = std::next(lower);
   2352  if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
   2353    // The next iterator after lower must point to a key greater than `key`.
   2354    // Note: if this assert fails, then it may indicate that the comparator does
   2355    // not meet the equivalence requirements for Compare
   2356    // (see https://en.cppreference.com/w/cpp/named_req/Compare).
   2357    assert(next == end() || compare_keys(key, next.key()));
   2358    return {lower, next};
   2359  }
   2360  // Try once more to avoid the call to upper_bound() if there's only one
   2361  // equivalent key. This should prevent all calls to upper_bound() in cases of
   2362  // unique-containers with heterogeneous comparators in which all comparison
   2363  // operators have the same equivalence classes.
   2364  if (next == end() || compare_keys(key, next.key())) return {lower, next};
   2365 
   2366  // In this case, we need to call upper_bound() to avoid worst case O(N)
   2367  // behavior if we were to iterate over equal keys.
   2368  return {lower, upper_bound(key)};
   2369 }
   2370 
   2371 template <typename P>
   2372 template <typename K, typename... Args>
   2373 auto btree<P>::insert_unique(const K &key, Args &&...args)
   2374    -> std::pair<iterator, bool> {
   2375  if (empty()) {
   2376    mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
   2377  }
   2378 
   2379  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
   2380  iterator iter = res.value;
   2381 
   2382  if (res.HasMatch()) {
   2383    if (res.IsEq()) {
   2384      // The key already exists in the tree, do nothing.
   2385      return {iter, false};
   2386    }
   2387  } else {
   2388    iterator last = internal_last(iter);
   2389    if (last.node_ && !compare_keys(key, last.key())) {
   2390      // The key already exists in the tree, do nothing.
   2391      return {last, false};
   2392    }
   2393  }
   2394  return {internal_emplace(iter, std::forward<Args>(args)...), true};
   2395 }
   2396 
   2397 template <typename P>
   2398 template <typename K, typename... Args>
   2399 inline auto btree<P>::insert_hint_unique(iterator position, const K &key,
   2400                                         Args &&...args)
   2401    -> std::pair<iterator, bool> {
   2402  if (!empty()) {
   2403    if (position == end() || compare_keys(key, position.key())) {
   2404      if (position == begin() || compare_keys(std::prev(position).key(), key)) {
   2405        // prev.key() < key < position.key()
   2406        return {internal_emplace(position, std::forward<Args>(args)...), true};
   2407      }
   2408    } else if (compare_keys(position.key(), key)) {
   2409      ++position;
   2410      if (position == end() || compare_keys(key, position.key())) {
   2411        // {original `position`}.key() < key < {current `position`}.key()
   2412        return {internal_emplace(position, std::forward<Args>(args)...), true};
   2413      }
   2414    } else {
   2415      // position.key() == key
   2416      return {position, false};
   2417    }
   2418  }
   2419  return insert_unique(key, std::forward<Args>(args)...);
   2420 }
   2421 
   2422 template <typename P>
   2423 template <typename InputIterator, typename>
   2424 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int) {
   2425  for (; b != e; ++b) {
   2426    insert_hint_unique(end(), params_type::key(*b), *b);
   2427  }
   2428 }
   2429 
   2430 template <typename P>
   2431 template <typename InputIterator>
   2432 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char) {
   2433  for (; b != e; ++b) {
   2434    // Use a node handle to manage a temp slot.
   2435    auto node_handle =
   2436        CommonAccess::Construct<node_handle_type>(get_allocator(), *b);
   2437    slot_type *slot = CommonAccess::GetSlot(node_handle);
   2438    insert_hint_unique(end(), params_type::key(slot), slot);
   2439  }
   2440 }
   2441 
   2442 template <typename P>
   2443 template <typename ValueType>
   2444 auto btree<P>::insert_multi(const key_type &key, ValueType &&v) -> iterator {
   2445  if (empty()) {
   2446    mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
   2447  }
   2448 
   2449  iterator iter = internal_upper_bound(key);
   2450  if (iter.node_ == nullptr) {
   2451    iter = end();
   2452  }
   2453  return internal_emplace(iter, std::forward<ValueType>(v));
   2454 }
   2455 
   2456 template <typename P>
   2457 template <typename ValueType>
   2458 auto btree<P>::insert_hint_multi(iterator position, ValueType &&v) -> iterator {
   2459  if (!empty()) {
   2460    const key_type &key = params_type::key(v);
   2461    if (position == end() || !compare_keys(position.key(), key)) {
   2462      if (position == begin() ||
   2463          !compare_keys(key, std::prev(position).key())) {
   2464        // prev.key() <= key <= position.key()
   2465        return internal_emplace(position, std::forward<ValueType>(v));
   2466      }
   2467    } else {
   2468      ++position;
   2469      if (position == end() || !compare_keys(position.key(), key)) {
   2470        // {original `position`}.key() < key < {current `position`}.key()
   2471        return internal_emplace(position, std::forward<ValueType>(v));
   2472      }
   2473    }
   2474  }
   2475  return insert_multi(std::forward<ValueType>(v));
   2476 }
   2477 
   2478 template <typename P>
   2479 template <typename InputIterator>
   2480 void btree<P>::insert_iterator_multi(InputIterator b, InputIterator e) {
   2481  for (; b != e; ++b) {
   2482    insert_hint_multi(end(), *b);
   2483  }
   2484 }
   2485 
   2486 template <typename P>
   2487 auto btree<P>::operator=(const btree &other) -> btree & {
   2488  if (this != &other) {
   2489    clear();
   2490 
   2491    *mutable_key_comp() = other.key_comp();
   2492    if (absl::allocator_traits<
   2493            allocator_type>::propagate_on_container_copy_assignment::value) {
   2494      *mutable_allocator() = other.allocator();
   2495    }
   2496 
   2497    copy_or_move_values_in_order(other);
   2498  }
   2499  return *this;
   2500 }
   2501 
   2502 template <typename P>
   2503 auto btree<P>::operator=(btree &&other) noexcept -> btree & {
   2504  if (this != &other) {
   2505    clear();
   2506 
   2507    using std::swap;
   2508    if (absl::allocator_traits<
   2509            allocator_type>::propagate_on_container_move_assignment::value) {
   2510      swap(root_, other.root_);
   2511      // Note: `rightmost_` also contains the allocator and the key comparator.
   2512      swap(rightmost_, other.rightmost_);
   2513      swap(size_, other.size_);
   2514    } else {
   2515      if (allocator() == other.allocator()) {
   2516        swap(mutable_root(), other.mutable_root());
   2517        swap(*mutable_key_comp(), *other.mutable_key_comp());
   2518        swap(mutable_rightmost(), other.mutable_rightmost());
   2519        swap(size_, other.size_);
   2520      } else {
   2521        // We aren't allowed to propagate the allocator and the allocator is
   2522        // different so we can't take over its memory. We must move each element
   2523        // individually. We need both `other` and `this` to have `other`s key
   2524        // comparator while moving the values so we can't swap the key
   2525        // comparators.
   2526        *mutable_key_comp() = other.key_comp();
   2527        copy_or_move_values_in_order(other);
   2528      }
   2529    }
   2530  }
   2531  return *this;
   2532 }
   2533 
   2534 template <typename P>
   2535 auto btree<P>::erase(iterator iter) -> iterator {
   2536  iter.node_->value_destroy(static_cast<field_type>(iter.position_),
   2537                            mutable_allocator());
   2538  iter.update_generation();
   2539 
   2540  const bool internal_delete = iter.node_->is_internal();
   2541  if (internal_delete) {
   2542    // Deletion of a value on an internal node. First, transfer the largest
   2543    // value from our left child here, then erase/rebalance from that position.
   2544    // We can get to the largest value from our left child by decrementing iter.
   2545    iterator internal_iter(iter);
   2546    --iter;
   2547    assert(iter.node_->is_leaf());
   2548    internal_iter.node_->transfer(
   2549        static_cast<size_type>(internal_iter.position_),
   2550        static_cast<size_type>(iter.position_), iter.node_,
   2551        mutable_allocator());
   2552  } else {
   2553    // Shift values after erased position in leaf. In the internal case, we
   2554    // don't need to do this because the leaf position is the end of the node.
   2555    const field_type transfer_from =
   2556        static_cast<field_type>(iter.position_ + 1);
   2557    const field_type num_to_transfer = iter.node_->finish() - transfer_from;
   2558    iter.node_->transfer_n(num_to_transfer,
   2559                           static_cast<size_type>(iter.position_),
   2560                           transfer_from, iter.node_, mutable_allocator());
   2561  }
   2562  // Update node finish and container size.
   2563  iter.node_->set_finish(iter.node_->finish() - 1);
   2564  --size_;
   2565 
   2566  // We want to return the next value after the one we just erased. If we
   2567  // erased from an internal node (internal_delete == true), then the next
   2568  // value is ++(++iter). If we erased from a leaf node (internal_delete ==
   2569  // false) then the next value is ++iter. Note that ++iter may point to an
   2570  // internal node and the value in the internal node may move to a leaf node
   2571  // (iter.node_) when rebalancing is performed at the leaf level.
   2572 
   2573  iterator res = rebalance_after_delete(iter);
   2574 
   2575  // If we erased from an internal node, advance the iterator.
   2576  if (internal_delete) {
   2577    ++res;
   2578  }
   2579  return res;
   2580 }
   2581 
   2582 template <typename P>
   2583 auto btree<P>::rebalance_after_delete(iterator iter) -> iterator {
   2584  // Merge/rebalance as we walk back up the tree.
   2585  iterator res(iter);
   2586  bool first_iteration = true;
   2587  for (;;) {
   2588    if (iter.node_ == root()) {
   2589      try_shrink();
   2590      if (empty()) {
   2591        return end();
   2592      }
   2593      break;
   2594    }
   2595    if (iter.node_->count() >= kMinNodeValues) {
   2596      break;
   2597    }
   2598    bool merged = try_merge_or_rebalance(&iter);
   2599    // On the first iteration, we should update `res` with `iter` because `res`
   2600    // may have been invalidated.
   2601    if (first_iteration) {
   2602      res = iter;
   2603      first_iteration = false;
   2604    }
   2605    if (!merged) {
   2606      break;
   2607    }
   2608    iter.position_ = iter.node_->position();
   2609    iter.node_ = iter.node_->parent();
   2610  }
   2611  res.update_generation();
   2612 
   2613  // Adjust our return value. If we're pointing at the end of a node, advance
   2614  // the iterator.
   2615  if (res.position_ == res.node_->finish()) {
   2616    res.position_ = res.node_->finish() - 1;
   2617    ++res;
   2618  }
   2619 
   2620  return res;
   2621 }
   2622 
   2623 // Note: we tried implementing this more efficiently by erasing all of the
   2624 // elements in [begin, end) at once and then doing rebalancing once at the end
   2625 // (rather than interleaving deletion and rebalancing), but that adds a lot of
   2626 // complexity, which seems to outweigh the performance win.
   2627 template <typename P>
   2628 auto btree<P>::erase_range(iterator begin, iterator end)
   2629    -> std::pair<size_type, iterator> {
   2630  size_type count = static_cast<size_type>(end - begin);
   2631  assert(count >= 0);
   2632 
   2633  if (count == 0) {
   2634    return {0, begin};
   2635  }
   2636 
   2637  if (static_cast<size_type>(count) == size_) {
   2638    clear();
   2639    return {count, this->end()};
   2640  }
   2641 
   2642  if (begin.node_ == end.node_) {
   2643    assert(end.position_ > begin.position_);
   2644    begin.node_->remove_values(
   2645        static_cast<field_type>(begin.position_),
   2646        static_cast<field_type>(end.position_ - begin.position_),
   2647        mutable_allocator());
   2648    size_ -= count;
   2649    return {count, rebalance_after_delete(begin)};
   2650  }
   2651 
   2652  const size_type target_size = size_ - count;
   2653  while (size_ > target_size) {
   2654    if (begin.node_->is_leaf()) {
   2655      const size_type remaining_to_erase = size_ - target_size;
   2656      const size_type remaining_in_node =
   2657          static_cast<size_type>(begin.node_->finish() - begin.position_);
   2658      const field_type to_erase = static_cast<field_type>(
   2659          (std::min)(remaining_to_erase, remaining_in_node));
   2660      begin.node_->remove_values(static_cast<field_type>(begin.position_),
   2661                                 to_erase, mutable_allocator());
   2662      size_ -= to_erase;
   2663      begin = rebalance_after_delete(begin);
   2664    } else {
   2665      begin = erase(begin);
   2666    }
   2667  }
   2668  begin.update_generation();
   2669  return {count, begin};
   2670 }
   2671 
   2672 template <typename P>
   2673 void btree<P>::clear() {
   2674  if (!empty()) {
   2675    node_type::clear_and_delete(root(), mutable_allocator());
   2676  }
   2677  mutable_root() = mutable_rightmost() = EmptyNode();
   2678  size_ = 0;
   2679 }
   2680 
   2681 template <typename P>
   2682 void btree<P>::swap(btree &other) {
   2683  using std::swap;
   2684  if (absl::allocator_traits<
   2685          allocator_type>::propagate_on_container_swap::value) {
   2686    // Note: `rightmost_` also contains the allocator and the key comparator.
   2687    swap(rightmost_, other.rightmost_);
   2688  } else {
   2689    // It's undefined behavior if the allocators are unequal here.
   2690    assert(allocator() == other.allocator());
   2691    swap(mutable_rightmost(), other.mutable_rightmost());
   2692    swap(*mutable_key_comp(), *other.mutable_key_comp());
   2693  }
   2694  swap(mutable_root(), other.mutable_root());
   2695  swap(size_, other.size_);
   2696 }
   2697 
   2698 template <typename P>
   2699 void btree<P>::verify() const {
   2700  assert(root() != nullptr);
   2701  assert(leftmost() != nullptr);
   2702  assert(rightmost() != nullptr);
   2703  assert(empty() || size() == internal_verify(root(), nullptr, nullptr));
   2704  assert(leftmost() == (++const_iterator(root(), -1)).node_);
   2705  assert(rightmost() == (--const_iterator(root(), root()->finish())).node_);
   2706  assert(leftmost()->is_leaf());
   2707  assert(rightmost()->is_leaf());
   2708 }
   2709 
   2710 template <typename P>
   2711 void btree<P>::rebalance_or_split(iterator *iter) {
   2712  node_type *&node = iter->node_;
   2713  int &insert_position = iter->position_;
   2714  assert(node->count() == node->max_count());
   2715  assert(kNodeSlots == node->max_count());
   2716 
   2717  // First try to make room on the node by rebalancing.
   2718  node_type *parent = node->parent();
   2719  if (node != root()) {
   2720    if (node->position() > parent->start()) {
   2721      // Try rebalancing with our left sibling.
   2722      node_type *left = parent->child(node->position() - 1);
   2723      assert(left->max_count() == kNodeSlots);
   2724      if (left->count() < kNodeSlots) {
   2725        // We bias rebalancing based on the position being inserted. If we're
   2726        // inserting at the end of the right node then we bias rebalancing to
   2727        // fill up the left node.
   2728        field_type to_move =
   2729            (kNodeSlots - left->count()) /
   2730            (1 + (static_cast<field_type>(insert_position) < kNodeSlots));
   2731        to_move = (std::max)(field_type{1}, to_move);
   2732 
   2733        if (static_cast<field_type>(insert_position) - to_move >=
   2734                node->start() ||
   2735            left->count() + to_move < kNodeSlots) {
   2736          left->rebalance_right_to_left(to_move, node, mutable_allocator());
   2737 
   2738          assert(node->max_count() - node->count() == to_move);
   2739          insert_position = static_cast<int>(
   2740              static_cast<field_type>(insert_position) - to_move);
   2741          if (insert_position < node->start()) {
   2742            insert_position = insert_position + left->count() + 1;
   2743            node = left;
   2744          }
   2745 
   2746          assert(node->count() < node->max_count());
   2747          return;
   2748        }
   2749      }
   2750    }
   2751 
   2752    if (node->position() < parent->finish()) {
   2753      // Try rebalancing with our right sibling.
   2754      node_type *right = parent->child(node->position() + 1);
   2755      assert(right->max_count() == kNodeSlots);
   2756      if (right->count() < kNodeSlots) {
   2757        // We bias rebalancing based on the position being inserted. If we're
   2758        // inserting at the beginning of the left node then we bias rebalancing
   2759        // to fill up the right node.
   2760        field_type to_move = (kNodeSlots - right->count()) /
   2761                             (1 + (insert_position > node->start()));
   2762        to_move = (std::max)(field_type{1}, to_move);
   2763 
   2764        if (static_cast<field_type>(insert_position) <=
   2765                node->finish() - to_move ||
   2766            right->count() + to_move < kNodeSlots) {
   2767          node->rebalance_left_to_right(to_move, right, mutable_allocator());
   2768 
   2769          if (insert_position > node->finish()) {
   2770            insert_position = insert_position - node->count() - 1;
   2771            node = right;
   2772          }
   2773 
   2774          assert(node->count() < node->max_count());
   2775          return;
   2776        }
   2777      }
   2778    }
   2779 
   2780    // Rebalancing failed, make sure there is room on the parent node for a new
   2781    // value.
   2782    assert(parent->max_count() == kNodeSlots);
   2783    if (parent->count() == kNodeSlots) {
   2784      iterator parent_iter(parent, node->position());
   2785      rebalance_or_split(&parent_iter);
   2786      parent = node->parent();
   2787    }
   2788  } else {
   2789    // Rebalancing not possible because this is the root node.
   2790    // Create a new root node and set the current root node as the child of the
   2791    // new root.
   2792    parent = new_internal_node(/*position=*/0, parent);
   2793    parent->set_generation(root()->generation());
   2794    parent->init_child(parent->start(), node);
   2795    mutable_root() = parent;
   2796    // If the former root was a leaf node, then it's now the rightmost node.
   2797    assert(parent->start_child()->is_internal() ||
   2798           parent->start_child() == rightmost());
   2799  }
   2800 
   2801  // Split the node.
   2802  node_type *split_node;
   2803  if (node->is_leaf()) {
   2804    split_node = new_leaf_node(node->position() + 1, parent);
   2805    node->split(insert_position, split_node, mutable_allocator());
   2806    if (rightmost() == node) mutable_rightmost() = split_node;
   2807  } else {
   2808    split_node = new_internal_node(node->position() + 1, parent);
   2809    node->split(insert_position, split_node, mutable_allocator());
   2810  }
   2811 
   2812  if (insert_position > node->finish()) {
   2813    insert_position = insert_position - node->count() - 1;
   2814    node = split_node;
   2815  }
   2816 }
   2817 
   2818 template <typename P>
   2819 void btree<P>::merge_nodes(node_type *left, node_type *right) {
   2820  left->merge(right, mutable_allocator());
   2821  if (rightmost() == right) mutable_rightmost() = left;
   2822 }
   2823 
   2824 template <typename P>
   2825 bool btree<P>::try_merge_or_rebalance(iterator *iter) {
   2826  node_type *parent = iter->node_->parent();
   2827  if (iter->node_->position() > parent->start()) {
   2828    // Try merging with our left sibling.
   2829    node_type *left = parent->child(iter->node_->position() - 1);
   2830    assert(left->max_count() == kNodeSlots);
   2831    if (1U + left->count() + iter->node_->count() <= kNodeSlots) {
   2832      iter->position_ += 1 + left->count();
   2833      merge_nodes(left, iter->node_);
   2834      iter->node_ = left;
   2835      return true;
   2836    }
   2837  }
   2838  if (iter->node_->position() < parent->finish()) {
   2839    // Try merging with our right sibling.
   2840    node_type *right = parent->child(iter->node_->position() + 1);
   2841    assert(right->max_count() == kNodeSlots);
   2842    if (1U + iter->node_->count() + right->count() <= kNodeSlots) {
   2843      merge_nodes(iter->node_, right);
   2844      return true;
   2845    }
   2846    // Try rebalancing with our right sibling. We don't perform rebalancing if
   2847    // we deleted the first element from iter->node_ and the node is not
   2848    // empty. This is a small optimization for the common pattern of deleting
   2849    // from the front of the tree.
   2850    if (right->count() > kMinNodeValues &&
   2851        (iter->node_->count() == 0 || iter->position_ > iter->node_->start())) {
   2852      field_type to_move = (right->count() - iter->node_->count()) / 2;
   2853      to_move =
   2854          (std::min)(to_move, static_cast<field_type>(right->count() - 1));
   2855      iter->node_->rebalance_right_to_left(to_move, right, mutable_allocator());
   2856      return false;
   2857    }
   2858  }
   2859  if (iter->node_->position() > parent->start()) {
   2860    // Try rebalancing with our left sibling. We don't perform rebalancing if
   2861    // we deleted the last element from iter->node_ and the node is not
   2862    // empty. This is a small optimization for the common pattern of deleting
   2863    // from the back of the tree.
   2864    node_type *left = parent->child(iter->node_->position() - 1);
   2865    if (left->count() > kMinNodeValues &&
   2866        (iter->node_->count() == 0 ||
   2867         iter->position_ < iter->node_->finish())) {
   2868      field_type to_move = (left->count() - iter->node_->count()) / 2;
   2869      to_move = (std::min)(to_move, static_cast<field_type>(left->count() - 1));
   2870      left->rebalance_left_to_right(to_move, iter->node_, mutable_allocator());
   2871      iter->position_ += to_move;
   2872      return false;
   2873    }
   2874  }
   2875  return false;
   2876 }
   2877 
   2878 template <typename P>
   2879 void btree<P>::try_shrink() {
   2880  node_type *orig_root = root();
   2881  if (orig_root->count() > 0) {
   2882    return;
   2883  }
   2884  // Deleted the last item on the root node, shrink the height of the tree.
   2885  if (orig_root->is_leaf()) {
   2886    assert(size() == 0);
   2887    mutable_root() = mutable_rightmost() = EmptyNode();
   2888  } else {
   2889    node_type *child = orig_root->start_child();
   2890    child->make_root();
   2891    mutable_root() = child;
   2892  }
   2893  node_type::clear_and_delete(orig_root, mutable_allocator());
   2894 }
   2895 
   2896 template <typename P>
   2897 template <typename IterType>
   2898 inline IterType btree<P>::internal_last(IterType iter) {
   2899  assert(iter.node_ != nullptr);
   2900  while (iter.position_ == iter.node_->finish()) {
   2901    iter.position_ = iter.node_->position();
   2902    iter.node_ = iter.node_->parent();
   2903    if (iter.node_->is_leaf()) {
   2904      iter.node_ = nullptr;
   2905      break;
   2906    }
   2907  }
   2908  iter.update_generation();
   2909  return iter;
   2910 }
   2911 
   2912 template <typename P>
   2913 template <typename... Args>
   2914 inline auto btree<P>::internal_emplace(iterator iter, Args &&...args)
   2915    -> iterator {
   2916  if (iter.node_->is_internal()) {
   2917    // We can't insert on an internal node. Instead, we'll insert after the
   2918    // previous value which is guaranteed to be on a leaf node.
   2919    --iter;
   2920    ++iter.position_;
   2921  }
   2922  const field_type max_count = iter.node_->max_count();
   2923  allocator_type *alloc = mutable_allocator();
   2924 
   2925  const auto transfer_and_delete = [&](node_type *old_node,
   2926                                       node_type *new_node) {
   2927    new_node->transfer_n(old_node->count(), new_node->start(),
   2928                         old_node->start(), old_node, alloc);
   2929    new_node->set_finish(old_node->finish());
   2930    old_node->set_finish(old_node->start());
   2931    new_node->set_generation(old_node->generation());
   2932    node_type::clear_and_delete(old_node, alloc);
   2933  };
   2934  const auto replace_leaf_root_node = [&](field_type new_node_size) {
   2935    assert(iter.node_ == root());
   2936    node_type *old_root = iter.node_;
   2937    node_type *new_root = iter.node_ = new_leaf_root_node(new_node_size);
   2938    transfer_and_delete(old_root, new_root);
   2939    mutable_root() = mutable_rightmost() = new_root;
   2940  };
   2941 
   2942  bool replaced_node = false;
   2943  if (iter.node_->count() == max_count) {
   2944    // Make room in the leaf for the new item.
   2945    if (max_count < kNodeSlots) {
   2946      // Insertion into the root where the root is smaller than the full node
   2947      // size. Simply grow the size of the root node.
   2948      replace_leaf_root_node(static_cast<field_type>(
   2949          (std::min)(static_cast<int>(kNodeSlots), 2 * max_count)));
   2950      replaced_node = true;
   2951    } else {
   2952      rebalance_or_split(&iter);
   2953    }
   2954  }
   2955  (void)replaced_node;
   2956 #if defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
   2957    defined(ABSL_HAVE_HWADDRESS_SANITIZER)
   2958  if (!replaced_node) {
   2959    assert(iter.node_->is_leaf());
   2960    if (iter.node_->is_root()) {
   2961      replace_leaf_root_node(max_count);
   2962    } else {
   2963      node_type *old_node = iter.node_;
   2964      const bool was_rightmost = rightmost() == old_node;
   2965      const bool was_leftmost = leftmost() == old_node;
   2966      node_type *parent = old_node->parent();
   2967      const field_type position = old_node->position();
   2968      node_type *new_node = iter.node_ = new_leaf_node(position, parent);
   2969      parent->set_child_noupdate_position(position, new_node);
   2970      transfer_and_delete(old_node, new_node);
   2971      if (was_rightmost) mutable_rightmost() = new_node;
   2972      // The leftmost node is stored as the parent of the root node.
   2973      if (was_leftmost) root()->set_parent(new_node);
   2974    }
   2975  }
   2976 #endif
   2977  iter.node_->emplace_value(static_cast<field_type>(iter.position_), alloc,
   2978                            std::forward<Args>(args)...);
   2979  assert(
   2980      iter.node_->is_ordered_correctly(static_cast<field_type>(iter.position_),
   2981                                       original_key_compare(key_comp())) &&
   2982      "If this assert fails, then either (1) the comparator may violate "
   2983      "transitivity, i.e. comp(a,b) && comp(b,c) -> comp(a,c) (see "
   2984      "https://en.cppreference.com/w/cpp/named_req/Compare), or (2) a "
   2985      "key may have been mutated after it was inserted into the tree.");
   2986  ++size_;
   2987  iter.update_generation();
   2988  return iter;
   2989 }
   2990 
   2991 template <typename P>
   2992 template <typename K>
   2993 inline auto btree<P>::internal_locate(const K &key) const
   2994    -> SearchResult<iterator, is_key_compare_to::value> {
   2995  iterator iter(const_cast<node_type *>(root()));
   2996  for (;;) {
   2997    SearchResult<size_type, is_key_compare_to::value> res =
   2998        iter.node_->lower_bound(key, key_comp());
   2999    iter.position_ = static_cast<int>(res.value);
   3000    if (res.IsEq()) {
   3001      return {iter, MatchKind::kEq};
   3002    }
   3003    // Note: in the non-key-compare-to case, we don't need to walk all the way
   3004    // down the tree if the keys are equal, but determining equality would
   3005    // require doing an extra comparison on each node on the way down, and we
   3006    // will need to go all the way to the leaf node in the expected case.
   3007    if (iter.node_->is_leaf()) {
   3008      break;
   3009    }
   3010    iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
   3011  }
   3012  // Note: in the non-key-compare-to case, the key may actually be equivalent
   3013  // here (and the MatchKind::kNe is ignored).
   3014  return {iter, MatchKind::kNe};
   3015 }
   3016 
   3017 template <typename P>
   3018 template <typename K>
   3019 auto btree<P>::internal_lower_bound(const K &key) const
   3020    -> SearchResult<iterator, is_key_compare_to::value> {
   3021  if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
   3022    SearchResult<iterator, is_key_compare_to::value> ret = internal_locate(key);
   3023    ret.value = internal_last(ret.value);
   3024    return ret;
   3025  }
   3026  iterator iter(const_cast<node_type *>(root()));
   3027  SearchResult<size_type, is_key_compare_to::value> res;
   3028  bool seen_eq = false;
   3029  for (;;) {
   3030    res = iter.node_->lower_bound(key, key_comp());
   3031    iter.position_ = static_cast<int>(res.value);
   3032    if (iter.node_->is_leaf()) {
   3033      break;
   3034    }
   3035    seen_eq = seen_eq || res.IsEq();
   3036    iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
   3037  }
   3038  if (res.IsEq()) return {iter, MatchKind::kEq};
   3039  return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
   3040 }
   3041 
   3042 template <typename P>
   3043 template <typename K>
   3044 auto btree<P>::internal_upper_bound(const K &key) const -> iterator {
   3045  iterator iter(const_cast<node_type *>(root()));
   3046  for (;;) {
   3047    iter.position_ = static_cast<int>(iter.node_->upper_bound(key, key_comp()));
   3048    if (iter.node_->is_leaf()) {
   3049      break;
   3050    }
   3051    iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
   3052  }
   3053  return internal_last(iter);
   3054 }
   3055 
   3056 template <typename P>
   3057 template <typename K>
   3058 auto btree<P>::internal_find(const K &key) const -> iterator {
   3059  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
   3060  if (res.HasMatch()) {
   3061    if (res.IsEq()) {
   3062      return res.value;
   3063    }
   3064  } else {
   3065    const iterator iter = internal_last(res.value);
   3066    if (iter.node_ != nullptr && !compare_keys(key, iter.key())) {
   3067      return iter;
   3068    }
   3069  }
   3070  return {nullptr, 0};
   3071 }
   3072 
   3073 template <typename P>
   3074 typename btree<P>::size_type btree<P>::internal_verify(
   3075    const node_type *node, const key_type *lo, const key_type *hi) const {
   3076  assert(node->count() > 0);
   3077  assert(node->count() <= node->max_count());
   3078  if (lo) {
   3079    assert(!compare_keys(node->key(node->start()), *lo));
   3080  }
   3081  if (hi) {
   3082    assert(!compare_keys(*hi, node->key(node->finish() - 1)));
   3083  }
   3084  for (int i = node->start() + 1; i < node->finish(); ++i) {
   3085    assert(!compare_keys(node->key(i), node->key(i - 1)));
   3086  }
   3087  size_type count = node->count();
   3088  if (node->is_internal()) {
   3089    for (field_type i = node->start(); i <= node->finish(); ++i) {
   3090      assert(node->child(i) != nullptr);
   3091      assert(node->child(i)->parent() == node);
   3092      assert(node->child(i)->position() == i);
   3093      count += internal_verify(node->child(i),
   3094                               i == node->start() ? lo : &node->key(i - 1),
   3095                               i == node->finish() ? hi : &node->key(i));
   3096    }
   3097  }
   3098  return count;
   3099 }
   3100 
   3101 struct btree_access {
   3102  template <typename BtreeContainer, typename Pred>
   3103  static auto erase_if(BtreeContainer &container, Pred pred) ->
   3104      typename BtreeContainer::size_type {
   3105    const auto initial_size = container.size();
   3106    auto &tree = container.tree_;
   3107    auto *alloc = tree.mutable_allocator();
   3108    for (auto it = container.begin(); it != container.end();) {
   3109      if (!pred(*it)) {
   3110        ++it;
   3111        continue;
   3112      }
   3113      auto *node = it.node_;
   3114      if (node->is_internal()) {
   3115        // Handle internal nodes normally.
   3116        it = container.erase(it);
   3117        continue;
   3118      }
   3119      // If this is a leaf node, then we do all the erases from this node
   3120      // at once before doing rebalancing.
   3121 
   3122      // The current position to transfer slots to.
   3123      int to_pos = it.position_;
   3124      node->value_destroy(it.position_, alloc);
   3125      while (++it.position_ < node->finish()) {
   3126        it.update_generation();
   3127        if (pred(*it)) {
   3128          node->value_destroy(it.position_, alloc);
   3129        } else {
   3130          node->transfer(node->slot(to_pos++), node->slot(it.position_), alloc);
   3131        }
   3132      }
   3133      const int num_deleted = node->finish() - to_pos;
   3134      tree.size_ -= num_deleted;
   3135      node->set_finish(to_pos);
   3136      it.position_ = to_pos;
   3137      it = tree.rebalance_after_delete(it);
   3138    }
   3139    return initial_size - container.size();
   3140  }
   3141 };
   3142 
   3143 #undef ABSL_BTREE_ENABLE_GENERATIONS
   3144 
   3145 }  // namespace container_internal
   3146 ABSL_NAMESPACE_END
   3147 }  // namespace absl
   3148 
   3149 #endif  // ABSL_CONTAINER_INTERNAL_BTREE_H_