1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
3 // Copyright (C) 2010-2021 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
31 #ifndef _HASHTABLE_POLICY_H
32 #define _HASHTABLE_POLICY_H 1
34 #include <tuple> // for std::tuple, std::forward_as_tuple
35 #include <bits/stl_algobase.h> // for std::min, std::is_permutation.
36 #include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
38 namespace std
_GLIBCXX_VISIBILITY(default)
40 _GLIBCXX_BEGIN_NAMESPACE_VERSION
42 template<typename _Key
, typename _Value
, typename _Alloc
,
43 typename _ExtractKey
, typename _Equal
,
44 typename _Hash
, typename _RangeHash
, typename _Unused
,
45 typename _RehashPolicy
, typename _Traits
>
51 * @defgroup hashtable-detail Base and Implementation Classes
52 * @ingroup unordered_associative_containers
55 template<typename _Key
, typename _Value
, typename _ExtractKey
,
56 typename _Equal
, typename _Hash
, typename _RangeHash
,
57 typename _Unused
, typename _Traits
>
58 struct _Hashtable_base
;
60 // Helper function: return distance(first, last) for forward
61 // iterators, or 0/1 for input iterators.
62 template<class _Iterator
>
63 inline typename
std::iterator_traits
<_Iterator
>::difference_type
64 __distance_fw(_Iterator __first
, _Iterator __last
,
65 std::input_iterator_tag
)
66 { return __first
!= __last
? 1 : 0; }
68 template<class _Iterator
>
69 inline typename
std::iterator_traits
<_Iterator
>::difference_type
70 __distance_fw(_Iterator __first
, _Iterator __last
,
71 std::forward_iterator_tag
)
72 { return std::distance(__first
, __last
); }
74 template<class _Iterator
>
75 inline typename
std::iterator_traits
<_Iterator
>::difference_type
76 __distance_fw(_Iterator __first
, _Iterator __last
)
77 { return __distance_fw(__first
, __last
,
78 std::__iterator_category(__first
)); }
82 template<typename _Tp
>
84 operator()(_Tp
&& __x
) const noexcept
85 { return std::forward
<_Tp
>(__x
); }
90 template<typename _Tp
>
92 operator()(_Tp
&& __x
) const noexcept
93 -> decltype(std::get
<0>(std::forward
<_Tp
>(__x
)))
94 { return std::get
<0>(std::forward
<_Tp
>(__x
)); }
97 template<typename _NodeAlloc
>
98 struct _Hashtable_alloc
;
100 // Functor recycling a pool of nodes and using allocation once the pool is
102 template<typename _NodeAlloc
>
103 struct _ReuseOrAllocNode
106 using __node_alloc_type
= _NodeAlloc
;
107 using __hashtable_alloc
= _Hashtable_alloc
<__node_alloc_type
>;
108 using __node_alloc_traits
=
109 typename
__hashtable_alloc::__node_alloc_traits
;
110 using __node_type
= typename
__hashtable_alloc::__node_type
;
113 _ReuseOrAllocNode(__node_type
* __nodes
, __hashtable_alloc
& __h
)
114 : _M_nodes(__nodes
), _M_h(__h
) { }
115 _ReuseOrAllocNode(const _ReuseOrAllocNode
&) = delete;
118 { _M_h
._M_deallocate_nodes(_M_nodes
); }
120 template<typename _Arg
>
122 operator()(_Arg
&& __arg
) const
126 __node_type
* __node
= _M_nodes
;
127 _M_nodes
= _M_nodes
->_M_next();
128 __node
->_M_nxt
= nullptr;
129 auto& __a
= _M_h
._M_node_allocator();
130 __node_alloc_traits::destroy(__a
, __node
->_M_valptr());
133 __node_alloc_traits::construct(__a
, __node
->_M_valptr(),
134 std::forward
<_Arg
>(__arg
));
138 _M_h
._M_deallocate_node_ptr(__node
);
139 __throw_exception_again
;
143 return _M_h
._M_allocate_node(std::forward
<_Arg
>(__arg
));
147 mutable __node_type
* _M_nodes
;
148 __hashtable_alloc
& _M_h
;
151 // Functor similar to the previous one but without any pool of nodes to
153 template<typename _NodeAlloc
>
157 using __hashtable_alloc
= _Hashtable_alloc
<_NodeAlloc
>;
158 using __node_type
= typename
__hashtable_alloc::__node_type
;
161 _AllocNode(__hashtable_alloc
& __h
)
164 template<typename _Arg
>
166 operator()(_Arg
&& __arg
) const
167 { return _M_h
._M_allocate_node(std::forward
<_Arg
>(__arg
)); }
170 __hashtable_alloc
& _M_h
;
173 // Auxiliary types used for all instantiations of _Hashtable nodes
177 * struct _Hashtable_traits
179 * Important traits for hash tables.
181 * @tparam _Cache_hash_code Boolean value. True if the value of
182 * the hash function is stored along with the value. This is a
183 * time-space tradeoff. Storing it may improve lookup speed by
184 * reducing the number of times we need to call the _Hash or _Equal
187 * @tparam _Constant_iterators Boolean value. True if iterator and
188 * const_iterator are both constant iterator types. This is true
189 * for unordered_set and unordered_multiset, false for
190 * unordered_map and unordered_multimap.
192 * @tparam _Unique_keys Boolean value. True if the return value
193 * of _Hashtable::count(k) is always at most one, false if it may
194 * be an arbitrary number. This is true for unordered_set and
195 * unordered_map, false for unordered_multiset and
196 * unordered_multimap.
198 template<bool _Cache_hash_code
, bool _Constant_iterators
, bool _Unique_keys
>
199 struct _Hashtable_traits
201 using __hash_cached
= __bool_constant
<_Cache_hash_code
>;
202 using __constant_iterators
= __bool_constant
<_Constant_iterators
>;
203 using __unique_keys
= __bool_constant
<_Unique_keys
>;
207 * struct _Hash_node_base
209 * Nodes, used to wrap elements stored in the hash table. A policy
210 * template parameter of class template _Hashtable controls whether
211 * nodes also store a hash code. In some cases (e.g. strings) this
212 * may be a performance win.
214 struct _Hash_node_base
216 _Hash_node_base
* _M_nxt
;
218 _Hash_node_base() noexcept
: _M_nxt() { }
220 _Hash_node_base(_Hash_node_base
* __next
) noexcept
: _M_nxt(__next
) { }
224 * struct _Hash_node_value_base
226 * Node type with the value to store.
228 template<typename _Value
>
229 struct _Hash_node_value_base
231 typedef _Value value_type
;
233 __gnu_cxx::__aligned_buffer
<_Value
> _M_storage
;
237 { return _M_storage
._M_ptr(); }
240 _M_valptr() const noexcept
241 { return _M_storage
._M_ptr(); }
245 { return *_M_valptr(); }
248 _M_v() const noexcept
249 { return *_M_valptr(); }
253 * Primary template struct _Hash_node_code_cache.
255 template<bool _Cache_hash_code
>
256 struct _Hash_node_code_cache
260 * Specialization for node with cache, struct _Hash_node_code_cache.
263 struct _Hash_node_code_cache
<true>
264 { std::size_t _M_hash_code
; };
266 template<typename _Value
, bool _Cache_hash_code
>
267 struct _Hash_node_value
268 : _Hash_node_value_base
<_Value
>
269 , _Hash_node_code_cache
<_Cache_hash_code
>
273 * Primary template struct _Hash_node.
275 template<typename _Value
, bool _Cache_hash_code
>
278 , _Hash_node_value
<_Value
, _Cache_hash_code
>
281 _M_next() const noexcept
282 { return static_cast<_Hash_node
*>(this->_M_nxt
); }
285 /// Base class for node iterators.
286 template<typename _Value
, bool _Cache_hash_code
>
287 struct _Node_iterator_base
289 using __node_type
= _Hash_node
<_Value
, _Cache_hash_code
>;
293 _Node_iterator_base() = default;
294 _Node_iterator_base(__node_type
* __p
) noexcept
299 { _M_cur
= _M_cur
->_M_next(); }
302 operator==(const _Node_iterator_base
& __x
, const _Node_iterator_base
& __y
)
304 { return __x
._M_cur
== __y
._M_cur
; }
306 #if __cpp_impl_three_way_comparison < 201907L
308 operator!=(const _Node_iterator_base
& __x
, const _Node_iterator_base
& __y
)
310 { return __x
._M_cur
!= __y
._M_cur
; }
314 /// Node iterators, used to iterate through all the hashtable.
315 template<typename _Value
, bool __constant_iterators
, bool __cache
>
316 struct _Node_iterator
317 : public _Node_iterator_base
<_Value
, __cache
>
320 using __base_type
= _Node_iterator_base
<_Value
, __cache
>;
321 using __node_type
= typename
__base_type::__node_type
;
324 typedef _Value value_type
;
325 typedef std::ptrdiff_t difference_type
;
326 typedef std::forward_iterator_tag iterator_category
;
328 using pointer
= typename
std::conditional
<__constant_iterators
,
329 const value_type
*, value_type
*>::type
;
331 using reference
= typename
std::conditional
<__constant_iterators
,
332 const value_type
&, value_type
&>::type
;
334 _Node_iterator() noexcept
335 : __base_type(nullptr) { }
338 _Node_iterator(__node_type
* __p
) noexcept
339 : __base_type(__p
) { }
342 operator*() const noexcept
343 { return this->_M_cur
->_M_v(); }
346 operator->() const noexcept
347 { return this->_M_cur
->_M_valptr(); }
350 operator++() noexcept
357 operator++(int) noexcept
359 _Node_iterator
__tmp(*this);
365 /// Node const_iterators, used to iterate through all the hashtable.
366 template<typename _Value
, bool __constant_iterators
, bool __cache
>
367 struct _Node_const_iterator
368 : public _Node_iterator_base
<_Value
, __cache
>
371 using __base_type
= _Node_iterator_base
<_Value
, __cache
>;
372 using __node_type
= typename
__base_type::__node_type
;
375 typedef _Value value_type
;
376 typedef std::ptrdiff_t difference_type
;
377 typedef std::forward_iterator_tag iterator_category
;
379 typedef const value_type
* pointer
;
380 typedef const value_type
& reference
;
382 _Node_const_iterator() noexcept
383 : __base_type(nullptr) { }
386 _Node_const_iterator(__node_type
* __p
) noexcept
387 : __base_type(__p
) { }
389 _Node_const_iterator(const _Node_iterator
<_Value
, __constant_iterators
,
390 __cache
>& __x
) noexcept
391 : __base_type(__x
._M_cur
) { }
394 operator*() const noexcept
395 { return this->_M_cur
->_M_v(); }
398 operator->() const noexcept
399 { return this->_M_cur
->_M_valptr(); }
401 _Node_const_iterator
&
402 operator++() noexcept
409 operator++(int) noexcept
411 _Node_const_iterator
__tmp(*this);
417 // Many of class template _Hashtable's template parameters are policy
418 // classes. These are defaults for the policies.
420 /// Default range hashing function: use division to fold a large number
421 /// into the range [0, N).
422 struct _Mod_range_hashing
424 typedef std::size_t first_argument_type
;
425 typedef std::size_t second_argument_type
;
426 typedef std::size_t result_type
;
429 operator()(first_argument_type __num
,
430 second_argument_type __den
) const noexcept
431 { return __num
% __den
; }
434 /// Default ranged hash function H. In principle it should be a
435 /// function object composed from objects of type H1 and H2 such that
436 /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
437 /// h1 and h2. So instead we'll just use a tag to tell class template
438 /// hashtable to do that composition.
439 struct _Default_ranged_hash
{ };
441 /// Default value for rehash policy. Bucket size is (usually) the
442 /// smallest prime that keeps the load factor small enough.
443 struct _Prime_rehash_policy
445 using __has_load_factor
= true_type
;
447 _Prime_rehash_policy(float __z
= 1.0) noexcept
448 : _M_max_load_factor(__z
), _M_next_resize(0) { }
451 max_load_factor() const noexcept
452 { return _M_max_load_factor
; }
454 // Return a bucket size no smaller than n.
456 _M_next_bkt(std::size_t __n
) const;
458 // Return a bucket count appropriate for n elements
460 _M_bkt_for_elements(std::size_t __n
) const
461 { return __builtin_ceil(__n
/ (double)_M_max_load_factor
); }
463 // __n_bkt is current bucket count, __n_elt is current element count,
464 // and __n_ins is number of elements to be inserted. Do we need to
465 // increase bucket count? If so, return make_pair(true, n), where n
466 // is the new bucket count. If not, return make_pair(false, 0).
467 std::pair
<bool, std::size_t>
468 _M_need_rehash(std::size_t __n_bkt
, std::size_t __n_elt
,
469 std::size_t __n_ins
) const;
471 typedef std::size_t _State
;
475 { return _M_next_resize
; }
479 { _M_next_resize
= 0; }
482 _M_reset(_State __state
)
483 { _M_next_resize
= __state
; }
485 static const std::size_t _S_growth_factor
= 2;
487 float _M_max_load_factor
;
488 mutable std::size_t _M_next_resize
;
491 /// Range hashing function assuming that second arg is a power of 2.
492 struct _Mask_range_hashing
494 typedef std::size_t first_argument_type
;
495 typedef std::size_t second_argument_type
;
496 typedef std::size_t result_type
;
499 operator()(first_argument_type __num
,
500 second_argument_type __den
) const noexcept
501 { return __num
& (__den
- 1); }
504 /// Compute closest power of 2 not less than __n
506 __clp2(std::size_t __n
) noexcept
508 using __gnu_cxx::__int_traits
;
509 // Equivalent to return __n ? std::bit_ceil(__n) : 0;
512 const unsigned __lz
= sizeof(size_t) > sizeof(long)
513 ? __builtin_clzll(__n
- 1ull)
514 : __builtin_clzl(__n
- 1ul);
515 // Doing two shifts avoids undefined behaviour when __lz == 0.
516 return (size_t(1) << (__int_traits
<size_t>::__digits
- __lz
- 1)) << 1;
519 /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
521 struct _Power2_rehash_policy
523 using __has_load_factor
= true_type
;
525 _Power2_rehash_policy(float __z
= 1.0) noexcept
526 : _M_max_load_factor(__z
), _M_next_resize(0) { }
529 max_load_factor() const noexcept
530 { return _M_max_load_factor
; }
532 // Return a bucket size no smaller than n (as long as n is not above the
533 // highest power of 2).
535 _M_next_bkt(std::size_t __n
) noexcept
538 // Special case on container 1st initialization with 0 bucket count
539 // hint. We keep _M_next_resize to 0 to make sure that next time we
540 // want to add an element allocation will take place.
543 const auto __max_width
= std::min
<size_t>(sizeof(size_t), 8);
544 const auto __max_bkt
= size_t(1) << (__max_width
* __CHAR_BIT__
- 1);
545 std::size_t __res
= __clp2(__n
);
550 // If __res is 1 we force it to 2 to make sure there will be an
551 // allocation so that nothing need to be stored in the initial
555 if (__res
== __max_bkt
)
556 // Set next resize to the max value so that we never try to rehash again
557 // as we already reach the biggest possible bucket number.
558 // Note that it might result in max_load_factor not being respected.
559 _M_next_resize
= size_t(-1);
562 = __builtin_floor(__res
* (double)_M_max_load_factor
);
567 // Return a bucket count appropriate for n elements
569 _M_bkt_for_elements(std::size_t __n
) const noexcept
570 { return __builtin_ceil(__n
/ (double)_M_max_load_factor
); }
572 // __n_bkt is current bucket count, __n_elt is current element count,
573 // and __n_ins is number of elements to be inserted. Do we need to
574 // increase bucket count? If so, return make_pair(true, n), where n
575 // is the new bucket count. If not, return make_pair(false, 0).
576 std::pair
<bool, std::size_t>
577 _M_need_rehash(std::size_t __n_bkt
, std::size_t __n_elt
,
578 std::size_t __n_ins
) noexcept
580 if (__n_elt
+ __n_ins
> _M_next_resize
)
582 // If _M_next_resize is 0 it means that we have nothing allocated so
583 // far and that we start inserting elements. In this case we start
584 // with an initial bucket size of 11.
586 = std::max
<std::size_t>(__n_elt
+ __n_ins
, _M_next_resize
? 0 : 11)
587 / (double)_M_max_load_factor
;
588 if (__min_bkts
>= __n_bkt
)
590 _M_next_bkt(std::max
<std::size_t>(__builtin_floor(__min_bkts
) + 1,
591 __n_bkt
* _S_growth_factor
)) };
594 = __builtin_floor(__n_bkt
* (double)_M_max_load_factor
);
601 typedef std::size_t _State
;
604 _M_state() const noexcept
605 { return _M_next_resize
; }
609 { _M_next_resize
= 0; }
612 _M_reset(_State __state
) noexcept
613 { _M_next_resize
= __state
; }
615 static const std::size_t _S_growth_factor
= 2;
617 float _M_max_load_factor
;
618 std::size_t _M_next_resize
;
621 // Base classes for std::_Hashtable. We define these base classes
622 // because in some cases we want to do different things depending on
623 // the value of a policy class. In some cases the policy class
624 // affects which member functions and nested typedefs are defined;
625 // we handle that by specializing base class templates. Several of
626 // the base class templates need to access other members of class
627 // template _Hashtable, so we use a variant of the "Curiously
628 // Recurring Template Pattern" (CRTP) technique.
631 * Primary class template _Map_base.
633 * If the hashtable has a value type of the form pair<T1, T2> and a
634 * key extraction policy (_ExtractKey) that returns the first part
635 * of the pair, the hashtable gets a mapped_type typedef. If it
636 * satisfies those criteria and also has unique keys, then it also
637 * gets an operator[].
639 template<typename _Key
, typename _Value
, typename _Alloc
,
640 typename _ExtractKey
, typename _Equal
,
641 typename _Hash
, typename _RangeHash
, typename _Unused
,
642 typename _RehashPolicy
, typename _Traits
,
643 bool _Unique_keys
= _Traits::__unique_keys::value
>
644 struct _Map_base
{ };
646 /// Partial specialization, __unique_keys set to false.
647 template<typename _Key
, typename _Pair
, typename _Alloc
, typename _Equal
,
648 typename _Hash
, typename _RangeHash
, typename _Unused
,
649 typename _RehashPolicy
, typename _Traits
>
650 struct _Map_base
<_Key
, _Pair
, _Alloc
, _Select1st
, _Equal
,
651 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, false>
653 using mapped_type
= typename
std::tuple_element
<1, _Pair
>::type
;
656 /// Partial specialization, __unique_keys set to true.
657 template<typename _Key
, typename _Pair
, typename _Alloc
, typename _Equal
,
658 typename _Hash
, typename _RangeHash
, typename _Unused
,
659 typename _RehashPolicy
, typename _Traits
>
660 struct _Map_base
<_Key
, _Pair
, _Alloc
, _Select1st
, _Equal
,
661 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, true>
664 using __hashtable_base
= _Hashtable_base
<_Key
, _Pair
, _Select1st
, _Equal
,
665 _Hash
, _RangeHash
, _Unused
,
668 using __hashtable
= _Hashtable
<_Key
, _Pair
, _Alloc
, _Select1st
, _Equal
,
670 _Unused
, _RehashPolicy
, _Traits
>;
672 using __hash_code
= typename
__hashtable_base::__hash_code
;
675 using key_type
= typename
__hashtable_base::key_type
;
676 using mapped_type
= typename
std::tuple_element
<1, _Pair
>::type
;
679 operator[](const key_type
& __k
);
682 operator[](key_type
&& __k
);
684 // _GLIBCXX_RESOLVE_LIB_DEFECTS
685 // DR 761. unordered_map needs an at() member function.
687 at(const key_type
& __k
);
690 at(const key_type
& __k
) const;
693 template<typename _Key
, typename _Pair
, typename _Alloc
, typename _Equal
,
694 typename _Hash
, typename _RangeHash
, typename _Unused
,
695 typename _RehashPolicy
, typename _Traits
>
697 _Map_base
<_Key
, _Pair
, _Alloc
, _Select1st
, _Equal
,
698 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, true>::
699 operator[](const key_type
& __k
)
702 __hashtable
* __h
= static_cast<__hashtable
*>(this);
703 __hash_code __code
= __h
->_M_hash_code(__k
);
704 std::size_t __bkt
= __h
->_M_bucket_index(__code
);
705 if (auto __node
= __h
->_M_find_node(__bkt
, __k
, __code
))
706 return __node
->_M_v().second
;
708 typename
__hashtable::_Scoped_node __node
{
710 std::piecewise_construct
,
711 std::tuple
<const key_type
&>(__k
),
715 = __h
->_M_insert_unique_node(__bkt
, __code
, __node
._M_node
);
716 __node
._M_node
= nullptr;
717 return __pos
->second
;
720 template<typename _Key
, typename _Pair
, typename _Alloc
, typename _Equal
,
721 typename _Hash
, typename _RangeHash
, typename _Unused
,
722 typename _RehashPolicy
, typename _Traits
>
724 _Map_base
<_Key
, _Pair
, _Alloc
, _Select1st
, _Equal
,
725 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, true>::
726 operator[](key_type
&& __k
)
729 __hashtable
* __h
= static_cast<__hashtable
*>(this);
730 __hash_code __code
= __h
->_M_hash_code(__k
);
731 std::size_t __bkt
= __h
->_M_bucket_index(__code
);
732 if (auto __node
= __h
->_M_find_node(__bkt
, __k
, __code
))
733 return __node
->_M_v().second
;
735 typename
__hashtable::_Scoped_node __node
{
737 std::piecewise_construct
,
738 std::forward_as_tuple(std::move(__k
)),
742 = __h
->_M_insert_unique_node(__bkt
, __code
, __node
._M_node
);
743 __node
._M_node
= nullptr;
744 return __pos
->second
;
747 template<typename _Key
, typename _Pair
, typename _Alloc
, typename _Equal
,
748 typename _Hash
, typename _RangeHash
, typename _Unused
,
749 typename _RehashPolicy
, typename _Traits
>
751 _Map_base
<_Key
, _Pair
, _Alloc
, _Select1st
, _Equal
,
752 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, true>::
753 at(const key_type
& __k
)
756 __hashtable
* __h
= static_cast<__hashtable
*>(this);
757 auto __ite
= __h
->find(__k
);
760 __throw_out_of_range(__N("_Map_base::at"));
761 return __ite
->second
;
764 template<typename _Key
, typename _Pair
, typename _Alloc
, typename _Equal
,
765 typename _Hash
, typename _RangeHash
, typename _Unused
,
766 typename _RehashPolicy
, typename _Traits
>
768 _Map_base
<_Key
, _Pair
, _Alloc
, _Select1st
, _Equal
,
769 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, true>::
770 at(const key_type
& __k
) const
771 -> const mapped_type
&
773 const __hashtable
* __h
= static_cast<const __hashtable
*>(this);
774 auto __ite
= __h
->find(__k
);
777 __throw_out_of_range(__N("_Map_base::at"));
778 return __ite
->second
;
782 * Primary class template _Insert_base.
784 * Defines @c insert member functions appropriate to all _Hashtables.
786 template<typename _Key
, typename _Value
, typename _Alloc
,
787 typename _ExtractKey
, typename _Equal
,
788 typename _Hash
, typename _RangeHash
, typename _Unused
,
789 typename _RehashPolicy
, typename _Traits
>
793 using __hashtable_base
= _Hashtable_base
<_Key
, _Value
, _ExtractKey
,
794 _Equal
, _Hash
, _RangeHash
,
797 using __hashtable
= _Hashtable
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
799 _Unused
, _RehashPolicy
, _Traits
>;
801 using __hash_cached
= typename
_Traits::__hash_cached
;
802 using __constant_iterators
= typename
_Traits::__constant_iterators
;
804 using __hashtable_alloc
= _Hashtable_alloc
<
805 __alloc_rebind
<_Alloc
, _Hash_node
<_Value
,
806 __hash_cached::value
>>>;
808 using value_type
= typename
__hashtable_base::value_type
;
809 using size_type
= typename
__hashtable_base::size_type
;
811 using __unique_keys
= typename
_Traits::__unique_keys
;
812 using __node_alloc_type
= typename
__hashtable_alloc::__node_alloc_type
;
813 using __node_gen_type
= _AllocNode
<__node_alloc_type
>;
816 _M_conjure_hashtable()
817 { return *(static_cast<__hashtable
*>(this)); }
819 template<typename _InputIterator
, typename _NodeGetter
>
821 _M_insert_range(_InputIterator __first
, _InputIterator __last
,
822 const _NodeGetter
&, true_type __uks
);
824 template<typename _InputIterator
, typename _NodeGetter
>
826 _M_insert_range(_InputIterator __first
, _InputIterator __last
,
827 const _NodeGetter
&, false_type __uks
);
830 using iterator
= _Node_iterator
<_Value
, __constant_iterators::value
,
831 __hash_cached::value
>;
833 using const_iterator
= _Node_const_iterator
<_Value
, __constant_iterators::value
,
834 __hash_cached::value
>;
836 using __ireturn_type
= typename
std::conditional
<__unique_keys::value
,
837 std::pair
<iterator
, bool>,
841 insert(const value_type
& __v
)
843 __hashtable
& __h
= _M_conjure_hashtable();
844 __node_gen_type
__node_gen(__h
);
845 return __h
._M_insert(__v
, __node_gen
, __unique_keys
{});
849 insert(const_iterator __hint
, const value_type
& __v
)
851 __hashtable
& __h
= _M_conjure_hashtable();
852 __node_gen_type
__node_gen(__h
);
853 return __h
._M_insert(__hint
, __v
, __node_gen
, __unique_keys
{});
856 template<typename _KType
, typename
... _Args
>
857 std::pair
<iterator
, bool>
858 try_emplace(const_iterator
, _KType
&& __k
, _Args
&&... __args
)
860 __hashtable
& __h
= _M_conjure_hashtable();
861 auto __code
= __h
._M_hash_code(__k
);
862 std::size_t __bkt
= __h
._M_bucket_index(__code
);
863 if (auto __node
= __h
._M_find_node(__bkt
, __k
, __code
))
864 return { iterator(__node
), false };
866 typename
__hashtable::_Scoped_node __node
{
868 std::piecewise_construct
,
869 std::forward_as_tuple(std::forward
<_KType
>(__k
)),
870 std::forward_as_tuple(std::forward
<_Args
>(__args
)...)
873 = __h
._M_insert_unique_node(__bkt
, __code
, __node
._M_node
);
874 __node
._M_node
= nullptr;
875 return { __it
, true };
879 insert(initializer_list
<value_type
> __l
)
880 { this->insert(__l
.begin(), __l
.end()); }
882 template<typename _InputIterator
>
884 insert(_InputIterator __first
, _InputIterator __last
)
886 __hashtable
& __h
= _M_conjure_hashtable();
887 __node_gen_type
__node_gen(__h
);
888 return _M_insert_range(__first
, __last
, __node_gen
, __unique_keys
{});
892 template<typename _Key
, typename _Value
, typename _Alloc
,
893 typename _ExtractKey
, typename _Equal
,
894 typename _Hash
, typename _RangeHash
, typename _Unused
,
895 typename _RehashPolicy
, typename _Traits
>
896 template<typename _InputIterator
, typename _NodeGetter
>
898 _Insert_base
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
899 _Hash
, _RangeHash
, _Unused
,
900 _RehashPolicy
, _Traits
>::
901 _M_insert_range(_InputIterator __first
, _InputIterator __last
,
902 const _NodeGetter
& __node_gen
, true_type __uks
)
904 __hashtable
& __h
= _M_conjure_hashtable();
905 for (; __first
!= __last
; ++__first
)
906 __h
._M_insert(*__first
, __node_gen
, __uks
);
909 template<typename _Key
, typename _Value
, typename _Alloc
,
910 typename _ExtractKey
, typename _Equal
,
911 typename _Hash
, typename _RangeHash
, typename _Unused
,
912 typename _RehashPolicy
, typename _Traits
>
913 template<typename _InputIterator
, typename _NodeGetter
>
915 _Insert_base
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
916 _Hash
, _RangeHash
, _Unused
,
917 _RehashPolicy
, _Traits
>::
918 _M_insert_range(_InputIterator __first
, _InputIterator __last
,
919 const _NodeGetter
& __node_gen
, false_type __uks
)
921 using __rehash_type
= typename
__hashtable::__rehash_type
;
922 using __rehash_state
= typename
__hashtable::__rehash_state
;
923 using pair_type
= std::pair
<bool, std::size_t>;
925 size_type __n_elt
= __detail::__distance_fw(__first
, __last
);
929 __hashtable
& __h
= _M_conjure_hashtable();
930 __rehash_type
& __rehash
= __h
._M_rehash_policy
;
931 const __rehash_state
& __saved_state
= __rehash
._M_state();
932 pair_type __do_rehash
= __rehash
._M_need_rehash(__h
._M_bucket_count
,
933 __h
._M_element_count
,
936 if (__do_rehash
.first
)
937 __h
._M_rehash(__do_rehash
.second
, __saved_state
);
939 for (; __first
!= __last
; ++__first
)
940 __h
._M_insert(*__first
, __node_gen
, __uks
);
944 * Primary class template _Insert.
946 * Defines @c insert member functions that depend on _Hashtable policies,
947 * via partial specializations.
949 template<typename _Key
, typename _Value
, typename _Alloc
,
950 typename _ExtractKey
, typename _Equal
,
951 typename _Hash
, typename _RangeHash
, typename _Unused
,
952 typename _RehashPolicy
, typename _Traits
,
953 bool _Constant_iterators
= _Traits::__constant_iterators::value
>
957 template<typename _Key
, typename _Value
, typename _Alloc
,
958 typename _ExtractKey
, typename _Equal
,
959 typename _Hash
, typename _RangeHash
, typename _Unused
,
960 typename _RehashPolicy
, typename _Traits
>
961 struct _Insert
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
962 _Hash
, _RangeHash
, _Unused
,
963 _RehashPolicy
, _Traits
, true>
964 : public _Insert_base
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
965 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
>
967 using __base_type
= _Insert_base
<_Key
, _Value
, _Alloc
, _ExtractKey
,
968 _Equal
, _Hash
, _RangeHash
, _Unused
,
969 _RehashPolicy
, _Traits
>;
971 using value_type
= typename
__base_type::value_type
;
972 using iterator
= typename
__base_type::iterator
;
973 using const_iterator
= typename
__base_type::const_iterator
;
974 using __ireturn_type
= typename
__base_type::__ireturn_type
;
976 using __unique_keys
= typename
__base_type::__unique_keys
;
977 using __hashtable
= typename
__base_type::__hashtable
;
978 using __node_gen_type
= typename
__base_type::__node_gen_type
;
980 using __base_type::insert
;
983 insert(value_type
&& __v
)
985 __hashtable
& __h
= this->_M_conjure_hashtable();
986 __node_gen_type
__node_gen(__h
);
987 return __h
._M_insert(std::move(__v
), __node_gen
, __unique_keys
{});
991 insert(const_iterator __hint
, value_type
&& __v
)
993 __hashtable
& __h
= this->_M_conjure_hashtable();
994 __node_gen_type
__node_gen(__h
);
995 return __h
._M_insert(__hint
, std::move(__v
), __node_gen
,
1001 template<typename _Key
, typename _Value
, typename _Alloc
,
1002 typename _ExtractKey
, typename _Equal
,
1003 typename _Hash
, typename _RangeHash
, typename _Unused
,
1004 typename _RehashPolicy
, typename _Traits
>
1005 struct _Insert
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1006 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, false>
1007 : public _Insert_base
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1008 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
>
1010 using __base_type
= _Insert_base
<_Key
, _Value
, _Alloc
, _ExtractKey
,
1011 _Equal
, _Hash
, _RangeHash
, _Unused
,
1012 _RehashPolicy
, _Traits
>;
1013 using value_type
= typename
__base_type::value_type
;
1014 using iterator
= typename
__base_type::iterator
;
1015 using const_iterator
= typename
__base_type::const_iterator
;
1017 using __unique_keys
= typename
__base_type::__unique_keys
;
1018 using __hashtable
= typename
__base_type::__hashtable
;
1019 using __ireturn_type
= typename
__base_type::__ireturn_type
;
1021 using __base_type::insert
;
1023 template<typename _Pair
>
1024 using __is_cons
= std::is_constructible
<value_type
, _Pair
&&>;
1026 template<typename _Pair
>
1027 using _IFcons
= std::enable_if
<__is_cons
<_Pair
>::value
>;
1029 template<typename _Pair
>
1030 using _IFconsp
= typename _IFcons
<_Pair
>::type
;
1032 template<typename _Pair
, typename
= _IFconsp
<_Pair
>>
1036 __hashtable
& __h
= this->_M_conjure_hashtable();
1037 return __h
._M_emplace(__unique_keys
{}, std::forward
<_Pair
>(__v
));
1040 template<typename _Pair
, typename
= _IFconsp
<_Pair
>>
1042 insert(const_iterator __hint
, _Pair
&& __v
)
1044 __hashtable
& __h
= this->_M_conjure_hashtable();
1045 return __h
._M_emplace(__hint
, __unique_keys
{},
1046 std::forward
<_Pair
>(__v
));
1050 template<typename _Policy
>
1051 using __has_load_factor
= typename
_Policy::__has_load_factor
;
1054 * Primary class template _Rehash_base.
1056 * Give hashtable the max_load_factor functions and reserve iff the
1057 * rehash policy supports it.
1059 template<typename _Key
, typename _Value
, typename _Alloc
,
1060 typename _ExtractKey
, typename _Equal
,
1061 typename _Hash
, typename _RangeHash
, typename _Unused
,
1062 typename _RehashPolicy
, typename _Traits
,
1064 __detected_or_t
<false_type
, __has_load_factor
, _RehashPolicy
>>
1065 struct _Rehash_base
;
1067 /// Specialization when rehash policy doesn't provide load factor management.
1068 template<typename _Key
, typename _Value
, typename _Alloc
,
1069 typename _ExtractKey
, typename _Equal
,
1070 typename _Hash
, typename _RangeHash
, typename _Unused
,
1071 typename _RehashPolicy
, typename _Traits
>
1072 struct _Rehash_base
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1073 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
,
1074 false_type
/* Has load factor */>
1078 /// Specialization when rehash policy provide load factor management.
1079 template<typename _Key
, typename _Value
, typename _Alloc
,
1080 typename _ExtractKey
, typename _Equal
,
1081 typename _Hash
, typename _RangeHash
, typename _Unused
,
1082 typename _RehashPolicy
, typename _Traits
>
1083 struct _Rehash_base
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1084 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
,
1085 true_type
/* Has load factor */>
1087 using __hashtable
= _Hashtable
<_Key
, _Value
, _Alloc
, _ExtractKey
,
1088 _Equal
, _Hash
, _RangeHash
, _Unused
,
1089 _RehashPolicy
, _Traits
>;
1092 max_load_factor() const noexcept
1094 const __hashtable
* __this
= static_cast<const __hashtable
*>(this);
1095 return __this
->__rehash_policy().max_load_factor();
1099 max_load_factor(float __z
)
1101 __hashtable
* __this
= static_cast<__hashtable
*>(this);
1102 __this
->__rehash_policy(_RehashPolicy(__z
));
1106 reserve(std::size_t __n
)
1108 __hashtable
* __this
= static_cast<__hashtable
*>(this);
1109 __this
->rehash(__this
->__rehash_policy()._M_bkt_for_elements(__n
));
1114 * Primary class template _Hashtable_ebo_helper.
1116 * Helper class using EBO when it is not forbidden (the type is not
1117 * final) and when it is worth it (the type is empty.)
1119 template<int _Nm
, typename _Tp
,
1120 bool __use_ebo
= !__is_final(_Tp
) && __is_empty(_Tp
)>
1121 struct _Hashtable_ebo_helper
;
1123 /// Specialization using EBO.
1124 template<int _Nm
, typename _Tp
>
1125 struct _Hashtable_ebo_helper
<_Nm
, _Tp
, true>
1128 _Hashtable_ebo_helper() = default;
1130 template<typename _OtherTp
>
1131 _Hashtable_ebo_helper(_OtherTp
&& __tp
)
1132 : _Tp(std::forward
<_OtherTp
>(__tp
))
1135 const _Tp
& _M_cget() const { return static_cast<const _Tp
&>(*this); }
1136 _Tp
& _M_get() { return static_cast<_Tp
&>(*this); }
1139 /// Specialization not using EBO.
1140 template<int _Nm
, typename _Tp
>
1141 struct _Hashtable_ebo_helper
<_Nm
, _Tp
, false>
1143 _Hashtable_ebo_helper() = default;
1145 template<typename _OtherTp
>
1146 _Hashtable_ebo_helper(_OtherTp
&& __tp
)
1147 : _M_tp(std::forward
<_OtherTp
>(__tp
))
1150 const _Tp
& _M_cget() const { return _M_tp
; }
1151 _Tp
& _M_get() { return _M_tp
; }
1158 * Primary class template _Local_iterator_base.
1160 * Base class for local iterators, used to iterate within a bucket
1161 * but not between buckets.
1163 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1164 typename _Hash
, typename _RangeHash
, typename _Unused
,
1165 bool __cache_hash_code
>
1166 struct _Local_iterator_base
;
1169 * Primary class template _Hash_code_base.
1171 * Encapsulates two policy issues that aren't quite orthogonal.
1172 * (1) the difference between using a ranged hash function and using
1173 * the combination of a hash function and a range-hashing function.
1174 * In the former case we don't have such things as hash codes, so
1175 * we have a dummy type as placeholder.
1176 * (2) Whether or not we cache hash codes. Caching hash codes is
1177 * meaningless if we have a ranged hash function.
1179 * We also put the key extraction objects here, for convenience.
1180 * Each specialization derives from one or more of the template
1181 * parameters to benefit from Ebo. This is important as this type
1182 * is inherited in some cases by the _Local_iterator_base type used
1183 * to implement local_iterator and const_local_iterator. As with
1184 * any iterator type we prefer to make it as small as possible.
1186 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1187 typename _Hash
, typename _RangeHash
, typename _Unused
,
1188 bool __cache_hash_code
>
1189 struct _Hash_code_base
1190 : private _Hashtable_ebo_helper
<1, _Hash
>
1193 using __ebo_hash
= _Hashtable_ebo_helper
<1, _Hash
>;
1195 // Gives the local iterator implementation access to _M_bucket_index().
1196 friend struct _Local_iterator_base
<_Key
, _Value
, _ExtractKey
,
1197 _Hash
, _RangeHash
, _Unused
, false>;
1200 typedef _Hash hasher
;
1203 hash_function() const
1204 { return _M_hash(); }
1207 typedef std::size_t __hash_code
;
1209 // We need the default constructor for the local iterators and _Hashtable
1210 // default constructor.
1211 _Hash_code_base() = default;
1212 _Hash_code_base(const _Hash
& __hash
) : __ebo_hash(__hash
) { }
1215 _M_hash_code(const _Key
& __k
) const
1217 static_assert(__is_invocable
<const _Hash
&, const _Key
&>{},
1218 "hash function must be invocable with an argument of key type");
1219 return _M_hash()(__k
);
1223 _M_bucket_index(__hash_code __c
, std::size_t __bkt_count
) const
1224 { return _RangeHash
{}(__c
, __bkt_count
); }
1227 _M_bucket_index(const _Hash_node_value
<_Value
, false>& __n
,
1228 std::size_t __bkt_count
) const
1229 noexcept( noexcept(declval
<const _Hash
&>()(declval
<const _Key
&>()))
1230 && noexcept(declval
<const _RangeHash
&>()((__hash_code
)0,
1233 return _RangeHash
{}(_M_hash_code(_ExtractKey
{}(__n
._M_v())),
1238 _M_bucket_index(const _Hash_node_value
<_Value
, true>& __n
,
1239 std::size_t __bkt_count
) const
1240 noexcept( noexcept(declval
<const _RangeHash
&>()((__hash_code
)0,
1242 { return _RangeHash
{}(__n
._M_hash_code
, __bkt_count
); }
1245 _M_store_code(_Hash_node_code_cache
<false>&, __hash_code
) const
1249 _M_copy_code(_Hash_node_code_cache
<false>&,
1250 const _Hash_node_code_cache
<false>&) const
1254 _M_store_code(_Hash_node_code_cache
<true>& __n
, __hash_code __c
) const
1255 { __n
._M_hash_code
= __c
; }
1258 _M_copy_code(_Hash_node_code_cache
<true>& __to
,
1259 const _Hash_node_code_cache
<true>& __from
) const
1260 { __to
._M_hash_code
= __from
._M_hash_code
; }
1263 _M_swap(_Hash_code_base
& __x
)
1264 { std::swap(__ebo_hash::_M_get(), __x
.__ebo_hash::_M_get()); }
1267 _M_hash() const { return __ebo_hash::_M_cget(); }
1270 /// Partial specialization used when nodes contain a cached hash code.
1271 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1272 typename _Hash
, typename _RangeHash
, typename _Unused
>
1273 struct _Local_iterator_base
<_Key
, _Value
, _ExtractKey
,
1274 _Hash
, _RangeHash
, _Unused
, true>
1275 : public _Node_iterator_base
<_Value
, true>
1278 using __base_node_iter
= _Node_iterator_base
<_Value
, true>;
1279 using __hash_code_base
= _Hash_code_base
<_Key
, _Value
, _ExtractKey
,
1280 _Hash
, _RangeHash
, _Unused
, true>;
1282 _Local_iterator_base() = default;
1283 _Local_iterator_base(const __hash_code_base
&,
1284 _Hash_node
<_Value
, true>* __p
,
1285 std::size_t __bkt
, std::size_t __bkt_count
)
1286 : __base_node_iter(__p
), _M_bucket(__bkt
), _M_bucket_count(__bkt_count
)
1292 __base_node_iter::_M_incr();
1296 = _RangeHash
{}(this->_M_cur
->_M_hash_code
, _M_bucket_count
);
1297 if (__bkt
!= _M_bucket
)
1298 this->_M_cur
= nullptr;
1302 std::size_t _M_bucket
;
1303 std::size_t _M_bucket_count
;
1307 _M_get_bucket() const { return _M_bucket
; } // for debug mode
1310 // Uninitialized storage for a _Hash_code_base.
1311 // This type is DefaultConstructible and Assignable even if the
1312 // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1313 // can be DefaultConstructible and Assignable.
1314 template<typename _Tp
, bool _IsEmpty
= std::is_empty
<_Tp
>::value
>
1315 struct _Hash_code_storage
1317 __gnu_cxx::__aligned_buffer
<_Tp
> _M_storage
;
1320 _M_h() { return _M_storage
._M_ptr(); }
1323 _M_h() const { return _M_storage
._M_ptr(); }
1326 // Empty partial specialization for empty _Hash_code_base types.
1327 template<typename _Tp
>
1328 struct _Hash_code_storage
<_Tp
, true>
1330 static_assert( std::is_empty
<_Tp
>::value
, "Type must be empty" );
1332 // As _Tp is an empty type there will be no bytes written/read through
1333 // the cast pointer, so no strict-aliasing violation.
1335 _M_h() { return reinterpret_cast<_Tp
*>(this); }
1338 _M_h() const { return reinterpret_cast<const _Tp
*>(this); }
1341 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1342 typename _Hash
, typename _RangeHash
, typename _Unused
>
1343 using __hash_code_for_local_iter
1344 = _Hash_code_storage
<_Hash_code_base
<_Key
, _Value
, _ExtractKey
,
1345 _Hash
, _RangeHash
, _Unused
, false>>;
1347 // Partial specialization used when hash codes are not cached
1348 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1349 typename _Hash
, typename _RangeHash
, typename _Unused
>
1350 struct _Local_iterator_base
<_Key
, _Value
, _ExtractKey
,
1351 _Hash
, _RangeHash
, _Unused
, false>
1352 : __hash_code_for_local_iter
<_Key
, _Value
, _ExtractKey
, _Hash
, _RangeHash
,
1354 , _Node_iterator_base
<_Value
, false>
1357 using __hash_code_base
= _Hash_code_base
<_Key
, _Value
, _ExtractKey
,
1358 _Hash
, _RangeHash
, _Unused
, false>;
1359 using __node_iter_base
= _Node_iterator_base
<_Value
, false>;
1361 _Local_iterator_base() : _M_bucket_count(-1) { }
1363 _Local_iterator_base(const __hash_code_base
& __base
,
1364 _Hash_node
<_Value
, false>* __p
,
1365 std::size_t __bkt
, std::size_t __bkt_count
)
1366 : __node_iter_base(__p
), _M_bucket(__bkt
), _M_bucket_count(__bkt_count
)
1367 { _M_init(__base
); }
1369 ~_Local_iterator_base()
1371 if (_M_bucket_count
!= size_t(-1))
1375 _Local_iterator_base(const _Local_iterator_base
& __iter
)
1376 : __node_iter_base(__iter
._M_cur
), _M_bucket(__iter
._M_bucket
)
1377 , _M_bucket_count(__iter
._M_bucket_count
)
1379 if (_M_bucket_count
!= size_t(-1))
1380 _M_init(*__iter
._M_h());
1383 _Local_iterator_base
&
1384 operator=(const _Local_iterator_base
& __iter
)
1386 if (_M_bucket_count
!= -1)
1388 this->_M_cur
= __iter
._M_cur
;
1389 _M_bucket
= __iter
._M_bucket
;
1390 _M_bucket_count
= __iter
._M_bucket_count
;
1391 if (_M_bucket_count
!= -1)
1392 _M_init(*__iter
._M_h());
1399 __node_iter_base::_M_incr();
1402 std::size_t __bkt
= this->_M_h()->_M_bucket_index(*this->_M_cur
,
1404 if (__bkt
!= _M_bucket
)
1405 this->_M_cur
= nullptr;
1409 std::size_t _M_bucket
;
1410 std::size_t _M_bucket_count
;
1413 _M_init(const __hash_code_base
& __base
)
1414 { ::new(this->_M_h()) __hash_code_base(__base
); }
1417 _M_destroy() { this->_M_h()->~__hash_code_base(); }
1421 _M_get_bucket() const { return _M_bucket
; } // for debug mode
1425 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1426 typename _Hash
, typename _RangeHash
, typename _Unused
,
1427 bool __constant_iterators
, bool __cache
>
1428 struct _Local_iterator
1429 : public _Local_iterator_base
<_Key
, _Value
, _ExtractKey
,
1430 _Hash
, _RangeHash
, _Unused
, __cache
>
1433 using __base_type
= _Local_iterator_base
<_Key
, _Value
, _ExtractKey
,
1434 _Hash
, _RangeHash
, _Unused
, __cache
>;
1435 using __hash_code_base
= typename
__base_type::__hash_code_base
;
1438 typedef _Value value_type
;
1439 typedef typename
std::conditional
<__constant_iterators
,
1440 const value_type
*, value_type
*>::type
1442 typedef typename
std::conditional
<__constant_iterators
,
1443 const value_type
&, value_type
&>::type
1445 typedef std::ptrdiff_t difference_type
;
1446 typedef std::forward_iterator_tag iterator_category
;
1448 _Local_iterator() = default;
1450 _Local_iterator(const __hash_code_base
& __base
,
1451 _Hash_node
<_Value
, __cache
>* __n
,
1452 std::size_t __bkt
, std::size_t __bkt_count
)
1453 : __base_type(__base
, __n
, __bkt
, __bkt_count
)
1458 { return this->_M_cur
->_M_v(); }
1462 { return this->_M_cur
->_M_valptr(); }
1474 _Local_iterator
__tmp(*this);
1480 /// local const_iterators
1481 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1482 typename _Hash
, typename _RangeHash
, typename _Unused
,
1483 bool __constant_iterators
, bool __cache
>
1484 struct _Local_const_iterator
1485 : public _Local_iterator_base
<_Key
, _Value
, _ExtractKey
,
1486 _Hash
, _RangeHash
, _Unused
, __cache
>
1489 using __base_type
= _Local_iterator_base
<_Key
, _Value
, _ExtractKey
,
1490 _Hash
, _RangeHash
, _Unused
, __cache
>;
1491 using __hash_code_base
= typename
__base_type::__hash_code_base
;
1494 typedef _Value value_type
;
1495 typedef const value_type
* pointer
;
1496 typedef const value_type
& reference
;
1497 typedef std::ptrdiff_t difference_type
;
1498 typedef std::forward_iterator_tag iterator_category
;
1500 _Local_const_iterator() = default;
1502 _Local_const_iterator(const __hash_code_base
& __base
,
1503 _Hash_node
<_Value
, __cache
>* __n
,
1504 std::size_t __bkt
, std::size_t __bkt_count
)
1505 : __base_type(__base
, __n
, __bkt
, __bkt_count
)
1508 _Local_const_iterator(const _Local_iterator
<_Key
, _Value
, _ExtractKey
,
1509 _Hash
, _RangeHash
, _Unused
,
1510 __constant_iterators
,
1517 { return this->_M_cur
->_M_v(); }
1521 { return this->_M_cur
->_M_valptr(); }
1523 _Local_const_iterator
&
1530 _Local_const_iterator
1533 _Local_const_iterator
__tmp(*this);
1540 * Primary class template _Hashtable_base.
1542 * Helper class adding management of _Equal functor to
1543 * _Hash_code_base type.
1545 * Base class templates are:
1546 * - __detail::_Hash_code_base
1547 * - __detail::_Hashtable_ebo_helper
1549 template<typename _Key
, typename _Value
, typename _ExtractKey
,
1550 typename _Equal
, typename _Hash
, typename _RangeHash
,
1551 typename _Unused
, typename _Traits
>
1552 struct _Hashtable_base
1553 : public _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Hash
, _RangeHash
,
1554 _Unused
, _Traits::__hash_cached::value
>,
1555 private _Hashtable_ebo_helper
<0, _Equal
>
1558 typedef _Key key_type
;
1559 typedef _Value value_type
;
1560 typedef _Equal key_equal
;
1561 typedef std::size_t size_type
;
1562 typedef std::ptrdiff_t difference_type
;
1564 using __traits_type
= _Traits
;
1565 using __hash_cached
= typename
__traits_type::__hash_cached
;
1567 using __hash_code_base
= _Hash_code_base
<_Key
, _Value
, _ExtractKey
,
1568 _Hash
, _RangeHash
, _Unused
,
1569 __hash_cached::value
>;
1571 using __hash_code
= typename
__hash_code_base::__hash_code
;
1574 using _EqualEBO
= _Hashtable_ebo_helper
<0, _Equal
>;
1577 _S_equals(__hash_code
, const _Hash_node_code_cache
<false>&)
1581 _S_node_equals(const _Hash_node_code_cache
<false>&,
1582 const _Hash_node_code_cache
<false>&)
1586 _S_equals(__hash_code __c
, const _Hash_node_code_cache
<true>& __n
)
1587 { return __c
== __n
._M_hash_code
; }
1590 _S_node_equals(const _Hash_node_code_cache
<true>& __lhn
,
1591 const _Hash_node_code_cache
<true>& __rhn
)
1592 { return __lhn
._M_hash_code
== __rhn
._M_hash_code
; }
1595 _Hashtable_base() = default;
1596 _Hashtable_base(const _Hash
& __hash
, const _Equal
& __eq
)
1597 : __hash_code_base(__hash
), _EqualEBO(__eq
)
1601 _M_equals(const _Key
& __k
, __hash_code __c
,
1602 const _Hash_node_value
<_Value
, __hash_cached::value
>& __n
) const
1604 static_assert(__is_invocable
<const _Equal
&, const _Key
&, const _Key
&>{},
1605 "key equality predicate must be invocable with two arguments of "
1607 return _S_equals(__c
, __n
) && _M_eq()(__k
, _ExtractKey
{}(__n
._M_v()));
1612 const _Hash_node_value
<_Value
, __hash_cached::value
>& __lhn
,
1613 const _Hash_node_value
<_Value
, __hash_cached::value
>& __rhn
) const
1615 return _S_node_equals(__lhn
, __rhn
)
1616 && _M_eq()(_ExtractKey
{}(__lhn
._M_v()), _ExtractKey
{}(__rhn
._M_v()));
1620 _M_swap(_Hashtable_base
& __x
)
1622 __hash_code_base::_M_swap(__x
);
1623 std::swap(_EqualEBO::_M_get(), __x
._EqualEBO::_M_get());
1627 _M_eq() const { return _EqualEBO::_M_cget(); }
1631 * Primary class template _Equality.
1633 * This is for implementing equality comparison for unordered
1634 * containers, per N3068, by John Lakos and Pablo Halpern.
1635 * Algorithmically, we follow closely the reference implementations
1638 template<typename _Key
, typename _Value
, typename _Alloc
,
1639 typename _ExtractKey
, typename _Equal
,
1640 typename _Hash
, typename _RangeHash
, typename _Unused
,
1641 typename _RehashPolicy
, typename _Traits
,
1642 bool _Unique_keys
= _Traits::__unique_keys::value
>
1645 /// unordered_map and unordered_set specializations.
1646 template<typename _Key
, typename _Value
, typename _Alloc
,
1647 typename _ExtractKey
, typename _Equal
,
1648 typename _Hash
, typename _RangeHash
, typename _Unused
,
1649 typename _RehashPolicy
, typename _Traits
>
1650 struct _Equality
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1651 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, true>
1653 using __hashtable
= _Hashtable
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1654 _Hash
, _RangeHash
, _Unused
,
1655 _RehashPolicy
, _Traits
>;
1658 _M_equal(const __hashtable
&) const;
1661 template<typename _Key
, typename _Value
, typename _Alloc
,
1662 typename _ExtractKey
, typename _Equal
,
1663 typename _Hash
, typename _RangeHash
, typename _Unused
,
1664 typename _RehashPolicy
, typename _Traits
>
1666 _Equality
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1667 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, true>::
1668 _M_equal(const __hashtable
& __other
) const
1670 using __node_type
= typename
__hashtable::__node_type
;
1671 const __hashtable
* __this
= static_cast<const __hashtable
*>(this);
1672 if (__this
->size() != __other
.size())
1675 for (auto __itx
= __this
->begin(); __itx
!= __this
->end(); ++__itx
)
1677 std::size_t __ybkt
= __other
._M_bucket_index(*__itx
._M_cur
);
1678 auto __prev_n
= __other
._M_buckets
[__ybkt
];
1682 for (__node_type
* __n
= static_cast<__node_type
*>(__prev_n
->_M_nxt
);;
1683 __n
= __n
->_M_next())
1685 if (__n
->_M_v() == *__itx
)
1689 || __other
._M_bucket_index(*__n
->_M_next()) != __ybkt
)
1697 /// unordered_multiset and unordered_multimap specializations.
1698 template<typename _Key
, typename _Value
, typename _Alloc
,
1699 typename _ExtractKey
, typename _Equal
,
1700 typename _Hash
, typename _RangeHash
, typename _Unused
,
1701 typename _RehashPolicy
, typename _Traits
>
1702 struct _Equality
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1703 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, false>
1705 using __hashtable
= _Hashtable
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1706 _Hash
, _RangeHash
, _Unused
,
1707 _RehashPolicy
, _Traits
>;
1710 _M_equal(const __hashtable
&) const;
1713 template<typename _Key
, typename _Value
, typename _Alloc
,
1714 typename _ExtractKey
, typename _Equal
,
1715 typename _Hash
, typename _RangeHash
, typename _Unused
,
1716 typename _RehashPolicy
, typename _Traits
>
1718 _Equality
<_Key
, _Value
, _Alloc
, _ExtractKey
, _Equal
,
1719 _Hash
, _RangeHash
, _Unused
, _RehashPolicy
, _Traits
, false>::
1720 _M_equal(const __hashtable
& __other
) const
1722 using __node_type
= typename
__hashtable::__node_type
;
1723 const __hashtable
* __this
= static_cast<const __hashtable
*>(this);
1724 if (__this
->size() != __other
.size())
1727 for (auto __itx
= __this
->begin(); __itx
!= __this
->end();)
1729 std::size_t __x_count
= 1;
1730 auto __itx_end
= __itx
;
1731 for (++__itx_end
; __itx_end
!= __this
->end()
1732 && __this
->key_eq()(_ExtractKey
{}(*__itx
),
1733 _ExtractKey
{}(*__itx_end
));
1737 std::size_t __ybkt
= __other
._M_bucket_index(*__itx
._M_cur
);
1738 auto __y_prev_n
= __other
._M_buckets
[__ybkt
];
1742 __node_type
* __y_n
= static_cast<__node_type
*>(__y_prev_n
->_M_nxt
);
1745 if (__this
->key_eq()(_ExtractKey
{}(__y_n
->_M_v()),
1746 _ExtractKey
{}(*__itx
)))
1749 auto __y_ref_n
= __y_n
;
1750 for (__y_n
= __y_n
->_M_next(); __y_n
; __y_n
= __y_n
->_M_next())
1751 if (!__other
._M_node_equals(*__y_ref_n
, *__y_n
))
1754 if (!__y_n
|| __other
._M_bucket_index(*__y_n
) != __ybkt
)
1758 typename
__hashtable::const_iterator
__ity(__y_n
);
1759 for (auto __ity_end
= __ity
; __ity_end
!= __other
.end(); ++__ity_end
)
1760 if (--__x_count
== 0)
1766 if (!std::is_permutation(__itx
, __itx_end
, __ity
))
1775 * This type deals with all allocation and keeps an allocator instance
1776 * through inheritance to benefit from EBO when possible.
1778 template<typename _NodeAlloc
>
1779 struct _Hashtable_alloc
: private _Hashtable_ebo_helper
<0, _NodeAlloc
>
1782 using __ebo_node_alloc
= _Hashtable_ebo_helper
<0, _NodeAlloc
>;
1784 using __node_type
= typename
_NodeAlloc::value_type
;
1785 using __node_alloc_type
= _NodeAlloc
;
1786 // Use __gnu_cxx to benefit from _S_always_equal and al.
1787 using __node_alloc_traits
= __gnu_cxx::__alloc_traits
<__node_alloc_type
>;
1789 using __value_alloc_traits
= typename
__node_alloc_traits::template
1790 rebind_traits
<typename
__node_type::value_type
>;
1792 using __node_ptr
= __node_type
*;
1793 using __node_base
= _Hash_node_base
;
1794 using __node_base_ptr
= __node_base
*;
1795 using __buckets_alloc_type
=
1796 __alloc_rebind
<__node_alloc_type
, __node_base_ptr
>;
1797 using __buckets_alloc_traits
= std::allocator_traits
<__buckets_alloc_type
>;
1798 using __buckets_ptr
= __node_base_ptr
*;
1800 _Hashtable_alloc() = default;
1801 _Hashtable_alloc(const _Hashtable_alloc
&) = default;
1802 _Hashtable_alloc(_Hashtable_alloc
&&) = default;
1804 template<typename _Alloc
>
1805 _Hashtable_alloc(_Alloc
&& __a
)
1806 : __ebo_node_alloc(std::forward
<_Alloc
>(__a
))
1811 { return __ebo_node_alloc::_M_get(); }
1813 const __node_alloc_type
&
1814 _M_node_allocator() const
1815 { return __ebo_node_alloc::_M_cget(); }
1817 // Allocate a node and construct an element within it.
1818 template<typename
... _Args
>
1820 _M_allocate_node(_Args
&&... __args
);
1822 // Destroy the element within a node and deallocate the node.
1824 _M_deallocate_node(__node_ptr __n
);
1826 // Deallocate a node.
1828 _M_deallocate_node_ptr(__node_ptr __n
);
1830 // Deallocate the linked list of nodes pointed to by __n.
1831 // The elements within the nodes are destroyed.
1833 _M_deallocate_nodes(__node_ptr __n
);
1836 _M_allocate_buckets(std::size_t __bkt_count
);
1839 _M_deallocate_buckets(__buckets_ptr
, std::size_t __bkt_count
);
1842 // Definitions of class template _Hashtable_alloc's out-of-line member
1844 template<typename _NodeAlloc
>
1845 template<typename
... _Args
>
1847 _Hashtable_alloc
<_NodeAlloc
>::_M_allocate_node(_Args
&&... __args
)
1850 auto __nptr
= __node_alloc_traits::allocate(_M_node_allocator(), 1);
1851 __node_ptr __n
= std::__to_address(__nptr
);
1854 ::new ((void*)__n
) __node_type
;
1855 __node_alloc_traits::construct(_M_node_allocator(),
1857 std::forward
<_Args
>(__args
)...);
1862 __node_alloc_traits::deallocate(_M_node_allocator(), __nptr
, 1);
1863 __throw_exception_again
;
1867 template<typename _NodeAlloc
>
1869 _Hashtable_alloc
<_NodeAlloc
>::_M_deallocate_node(__node_ptr __n
)
1871 __node_alloc_traits::destroy(_M_node_allocator(), __n
->_M_valptr());
1872 _M_deallocate_node_ptr(__n
);
1875 template<typename _NodeAlloc
>
1877 _Hashtable_alloc
<_NodeAlloc
>::_M_deallocate_node_ptr(__node_ptr __n
)
1879 typedef typename
__node_alloc_traits::pointer _Ptr
;
1880 auto __ptr
= std::pointer_traits
<_Ptr
>::pointer_to(*__n
);
1881 __n
->~__node_type();
1882 __node_alloc_traits::deallocate(_M_node_allocator(), __ptr
, 1);
1885 template<typename _NodeAlloc
>
1887 _Hashtable_alloc
<_NodeAlloc
>::_M_deallocate_nodes(__node_ptr __n
)
1891 __node_ptr __tmp
= __n
;
1892 __n
= __n
->_M_next();
1893 _M_deallocate_node(__tmp
);
1897 template<typename _NodeAlloc
>
1899 _Hashtable_alloc
<_NodeAlloc
>::_M_allocate_buckets(std::size_t __bkt_count
)
1902 __buckets_alloc_type
__alloc(_M_node_allocator());
1904 auto __ptr
= __buckets_alloc_traits::allocate(__alloc
, __bkt_count
);
1905 __buckets_ptr __p
= std::__to_address(__ptr
);
1906 __builtin_memset(__p
, 0, __bkt_count
* sizeof(__node_base_ptr
));
1910 template<typename _NodeAlloc
>
1912 _Hashtable_alloc
<_NodeAlloc
>::
1913 _M_deallocate_buckets(__buckets_ptr __bkts
,
1914 std::size_t __bkt_count
)
1916 typedef typename
__buckets_alloc_traits::pointer _Ptr
;
1917 auto __ptr
= std::pointer_traits
<_Ptr
>::pointer_to(*__bkts
);
1918 __buckets_alloc_type
__alloc(_M_node_allocator());
1919 __buckets_alloc_traits::deallocate(__alloc
, __ptr
, __bkt_count
);
1922 //@} hashtable-detail
1923 } // namespace __detail
1924 _GLIBCXX_END_NAMESPACE_VERSION
1927 #endif // _HASHTABLE_POLICY_H