libstdc++
hashtable_policy.h
Go to the documentation of this file.
1// Internal policy header for unordered_set and unordered_map -*- C++ -*-
2
3// Copyright (C) 2010-2022 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
29 */
30
31#ifndef _HASHTABLE_POLICY_H
32#define _HASHTABLE_POLICY_H 1
33
34#include <tuple> // for std::tuple, std::forward_as_tuple
35#include <bits/stl_algobase.h> // for std::min, std::is_permutation.
36#include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
37
38namespace std _GLIBCXX_VISIBILITY(default)
39{
40_GLIBCXX_BEGIN_NAMESPACE_VERSION
41/// @cond undocumented
42
43 template<typename _Key, typename _Value, typename _Alloc,
44 typename _ExtractKey, typename _Equal,
45 typename _Hash, typename _RangeHash, typename _Unused,
46 typename _RehashPolicy, typename _Traits>
47 class _Hashtable;
48
49namespace __detail
50{
51 /**
52 * @defgroup hashtable-detail Base and Implementation Classes
53 * @ingroup unordered_associative_containers
54 * @{
55 */
56 template<typename _Key, typename _Value, typename _ExtractKey,
57 typename _Equal, typename _Hash, typename _RangeHash,
58 typename _Unused, typename _Traits>
59 struct _Hashtable_base;
60
61 // Helper function: return distance(first, last) for forward
62 // iterators, or 0/1 for input iterators.
63 template<typename _Iterator>
65 __distance_fw(_Iterator __first, _Iterator __last,
67 { return __first != __last ? 1 : 0; }
68
69 template<typename _Iterator>
71 __distance_fw(_Iterator __first, _Iterator __last,
73 { return std::distance(__first, __last); }
74
75 template<typename _Iterator>
77 __distance_fw(_Iterator __first, _Iterator __last)
78 { return __distance_fw(__first, __last,
79 std::__iterator_category(__first)); }
80
81 struct _Identity
82 {
83 template<typename _Tp>
84 _Tp&&
85 operator()(_Tp&& __x) const noexcept
86 { return std::forward<_Tp>(__x); }
87 };
88
89 struct _Select1st
90 {
91 template<typename _Pair>
92 struct __1st_type;
93
94 template<typename _Tp, typename _Up>
95 struct __1st_type<pair<_Tp, _Up>>
96 { using type = _Tp; };
97
98 template<typename _Tp, typename _Up>
99 struct __1st_type<const pair<_Tp, _Up>>
100 { using type = const _Tp; };
101
102 template<typename _Pair>
103 struct __1st_type<_Pair&>
104 { using type = typename __1st_type<_Pair>::type&; };
105
106 template<typename _Tp>
107 typename __1st_type<_Tp>::type&&
108 operator()(_Tp&& __x) const noexcept
109 { return std::forward<_Tp>(__x).first; }
110 };
111
112 template<typename _ExKey>
113 struct _NodeBuilder;
114
115 template<>
116 struct _NodeBuilder<_Select1st>
117 {
118 template<typename _Kt, typename _Arg, typename _NodeGenerator>
119 static auto
120 _S_build(_Kt&& __k, _Arg&& __arg, const _NodeGenerator& __node_gen)
121 -> typename _NodeGenerator::__node_type*
122 {
123 return __node_gen(std::forward<_Kt>(__k),
124 std::forward<_Arg>(__arg).second);
125 }
126 };
127
128 template<>
129 struct _NodeBuilder<_Identity>
130 {
131 template<typename _Kt, typename _Arg, typename _NodeGenerator>
132 static auto
133 _S_build(_Kt&& __k, _Arg&&, const _NodeGenerator& __node_gen)
134 -> typename _NodeGenerator::__node_type*
135 { return __node_gen(std::forward<_Kt>(__k)); }
136 };
137
138 template<typename _NodeAlloc>
139 struct _Hashtable_alloc;
140
141 // Functor recycling a pool of nodes and using allocation once the pool is
142 // empty.
143 template<typename _NodeAlloc>
144 struct _ReuseOrAllocNode
145 {
146 private:
147 using __node_alloc_type = _NodeAlloc;
148 using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
149 using __node_alloc_traits =
150 typename __hashtable_alloc::__node_alloc_traits;
151
152 public:
153 using __node_type = typename __hashtable_alloc::__node_type;
154
155 _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
156 : _M_nodes(__nodes), _M_h(__h) { }
157 _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
158
159 ~_ReuseOrAllocNode()
160 { _M_h._M_deallocate_nodes(_M_nodes); }
161
162 template<typename... _Args>
163 __node_type*
164 operator()(_Args&&... __args) const
165 {
166 if (_M_nodes)
167 {
168 __node_type* __node = _M_nodes;
169 _M_nodes = _M_nodes->_M_next();
170 __node->_M_nxt = nullptr;
171 auto& __a = _M_h._M_node_allocator();
172 __node_alloc_traits::destroy(__a, __node->_M_valptr());
173 __try
174 {
175 __node_alloc_traits::construct(__a, __node->_M_valptr(),
176 std::forward<_Args>(__args)...);
177 }
178 __catch(...)
179 {
180 _M_h._M_deallocate_node_ptr(__node);
181 __throw_exception_again;
182 }
183 return __node;
184 }
185 return _M_h._M_allocate_node(std::forward<_Args>(__args)...);
186 }
187
188 private:
189 mutable __node_type* _M_nodes;
190 __hashtable_alloc& _M_h;
191 };
192
193 // Functor similar to the previous one but without any pool of nodes to
194 // recycle.
195 template<typename _NodeAlloc>
196 struct _AllocNode
197 {
198 private:
199 using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
200
201 public:
202 using __node_type = typename __hashtable_alloc::__node_type;
203
204 _AllocNode(__hashtable_alloc& __h)
205 : _M_h(__h) { }
206
207 template<typename... _Args>
208 __node_type*
209 operator()(_Args&&... __args) const
210 { return _M_h._M_allocate_node(std::forward<_Args>(__args)...); }
211
212 private:
213 __hashtable_alloc& _M_h;
214 };
215
216 // Auxiliary types used for all instantiations of _Hashtable nodes
217 // and iterators.
218
219 /**
220 * struct _Hashtable_traits
221 *
222 * Important traits for hash tables.
223 *
224 * @tparam _Cache_hash_code Boolean value. True if the value of
225 * the hash function is stored along with the value. This is a
226 * time-space tradeoff. Storing it may improve lookup speed by
227 * reducing the number of times we need to call the _Hash or _Equal
228 * functors.
229 *
230 * @tparam _Constant_iterators Boolean value. True if iterator and
231 * const_iterator are both constant iterator types. This is true
232 * for unordered_set and unordered_multiset, false for
233 * unordered_map and unordered_multimap.
234 *
235 * @tparam _Unique_keys Boolean value. True if the return value
236 * of _Hashtable::count(k) is always at most one, false if it may
237 * be an arbitrary number. This is true for unordered_set and
238 * unordered_map, false for unordered_multiset and
239 * unordered_multimap.
240 */
241 template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
242 struct _Hashtable_traits
243 {
244 using __hash_cached = __bool_constant<_Cache_hash_code>;
245 using __constant_iterators = __bool_constant<_Constant_iterators>;
246 using __unique_keys = __bool_constant<_Unique_keys>;
247 };
248
249 /**
250 * struct _Hashtable_hash_traits
251 *
252 * Important traits for hash tables depending on associated hasher.
253 *
254 */
255 template<typename _Hash>
256 struct _Hashtable_hash_traits
257 {
258 static constexpr std::size_t
259 __small_size_threshold() noexcept
260 { return std::__is_fast_hash<_Hash>::value ? 0 : 20; }
261 };
262
263 /**
264 * struct _Hash_node_base
265 *
266 * Nodes, used to wrap elements stored in the hash table. A policy
267 * template parameter of class template _Hashtable controls whether
268 * nodes also store a hash code. In some cases (e.g. strings) this
269 * may be a performance win.
270 */
271 struct _Hash_node_base
272 {
273 _Hash_node_base* _M_nxt;
274
275 _Hash_node_base() noexcept : _M_nxt() { }
276
277 _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
278 };
279
280 /**
281 * struct _Hash_node_value_base
282 *
283 * Node type with the value to store.
284 */
285 template<typename _Value>
286 struct _Hash_node_value_base
287 {
288 typedef _Value value_type;
289
290 __gnu_cxx::__aligned_buffer<_Value> _M_storage;
291
292 _Value*
293 _M_valptr() noexcept
294 { return _M_storage._M_ptr(); }
295
296 const _Value*
297 _M_valptr() const noexcept
298 { return _M_storage._M_ptr(); }
299
300 _Value&
301 _M_v() noexcept
302 { return *_M_valptr(); }
303
304 const _Value&
305 _M_v() const noexcept
306 { return *_M_valptr(); }
307 };
308
309 /**
310 * Primary template struct _Hash_node_code_cache.
311 */
312 template<bool _Cache_hash_code>
313 struct _Hash_node_code_cache
314 { };
315
316 /**
317 * Specialization for node with cache, struct _Hash_node_code_cache.
318 */
319 template<>
320 struct _Hash_node_code_cache<true>
321 { std::size_t _M_hash_code; };
322
323 template<typename _Value, bool _Cache_hash_code>
324 struct _Hash_node_value
325 : _Hash_node_value_base<_Value>
326 , _Hash_node_code_cache<_Cache_hash_code>
327 { };
328
329 /**
330 * Primary template struct _Hash_node.
331 */
332 template<typename _Value, bool _Cache_hash_code>
333 struct _Hash_node
334 : _Hash_node_base
335 , _Hash_node_value<_Value, _Cache_hash_code>
336 {
337 _Hash_node*
338 _M_next() const noexcept
339 { return static_cast<_Hash_node*>(this->_M_nxt); }
340 };
341
342 /// Base class for node iterators.
343 template<typename _Value, bool _Cache_hash_code>
344 struct _Node_iterator_base
345 {
346 using __node_type = _Hash_node<_Value, _Cache_hash_code>;
347
348 __node_type* _M_cur;
349
350 _Node_iterator_base() : _M_cur(nullptr) { }
351 _Node_iterator_base(__node_type* __p) noexcept
352 : _M_cur(__p) { }
353
354 void
355 _M_incr() noexcept
356 { _M_cur = _M_cur->_M_next(); }
357
358 friend bool
359 operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
360 noexcept
361 { return __x._M_cur == __y._M_cur; }
362
363#if __cpp_impl_three_way_comparison < 201907L
364 friend bool
365 operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
366 noexcept
367 { return __x._M_cur != __y._M_cur; }
368#endif
369 };
370
371 /// Node iterators, used to iterate through all the hashtable.
372 template<typename _Value, bool __constant_iterators, bool __cache>
373 struct _Node_iterator
374 : public _Node_iterator_base<_Value, __cache>
375 {
376 private:
377 using __base_type = _Node_iterator_base<_Value, __cache>;
378 using __node_type = typename __base_type::__node_type;
379
380 public:
381 using value_type = _Value;
382 using difference_type = std::ptrdiff_t;
383 using iterator_category = std::forward_iterator_tag;
384
385 using pointer = __conditional_t<__constant_iterators,
386 const value_type*, value_type*>;
387
388 using reference = __conditional_t<__constant_iterators,
389 const value_type&, value_type&>;
390
391 _Node_iterator() = default;
392
393 explicit
394 _Node_iterator(__node_type* __p) noexcept
395 : __base_type(__p) { }
396
397 reference
398 operator*() const noexcept
399 { return this->_M_cur->_M_v(); }
400
401 pointer
402 operator->() const noexcept
403 { return this->_M_cur->_M_valptr(); }
404
405 _Node_iterator&
406 operator++() noexcept
407 {
408 this->_M_incr();
409 return *this;
410 }
411
412 _Node_iterator
413 operator++(int) noexcept
414 {
415 _Node_iterator __tmp(*this);
416 this->_M_incr();
417 return __tmp;
418 }
419 };
420
421 /// Node const_iterators, used to iterate through all the hashtable.
422 template<typename _Value, bool __constant_iterators, bool __cache>
423 struct _Node_const_iterator
424 : public _Node_iterator_base<_Value, __cache>
425 {
426 private:
427 using __base_type = _Node_iterator_base<_Value, __cache>;
428 using __node_type = typename __base_type::__node_type;
429
430 public:
431 typedef _Value value_type;
432 typedef std::ptrdiff_t difference_type;
433 typedef std::forward_iterator_tag iterator_category;
434
435 typedef const value_type* pointer;
436 typedef const value_type& reference;
437
438 _Node_const_iterator() = default;
439
440 explicit
441 _Node_const_iterator(__node_type* __p) noexcept
442 : __base_type(__p) { }
443
444 _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
445 __cache>& __x) noexcept
446 : __base_type(__x._M_cur) { }
447
448 reference
449 operator*() const noexcept
450 { return this->_M_cur->_M_v(); }
451
452 pointer
453 operator->() const noexcept
454 { return this->_M_cur->_M_valptr(); }
455
456 _Node_const_iterator&
457 operator++() noexcept
458 {
459 this->_M_incr();
460 return *this;
461 }
462
463 _Node_const_iterator
464 operator++(int) noexcept
465 {
466 _Node_const_iterator __tmp(*this);
467 this->_M_incr();
468 return __tmp;
469 }
470 };
471
472 // Many of class template _Hashtable's template parameters are policy
473 // classes. These are defaults for the policies.
474
475 /// Default range hashing function: use division to fold a large number
476 /// into the range [0, N).
477 struct _Mod_range_hashing
478 {
479 typedef std::size_t first_argument_type;
480 typedef std::size_t second_argument_type;
481 typedef std::size_t result_type;
482
483 result_type
484 operator()(first_argument_type __num,
485 second_argument_type __den) const noexcept
486 { return __num % __den; }
487 };
488
489 /// Default ranged hash function H. In principle it should be a
490 /// function object composed from objects of type H1 and H2 such that
491 /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
492 /// h1 and h2. So instead we'll just use a tag to tell class template
493 /// hashtable to do that composition.
494 struct _Default_ranged_hash { };
495
496 /// Default value for rehash policy. Bucket size is (usually) the
497 /// smallest prime that keeps the load factor small enough.
498 struct _Prime_rehash_policy
499 {
500 using __has_load_factor = true_type;
501
502 _Prime_rehash_policy(float __z = 1.0) noexcept
503 : _M_max_load_factor(__z), _M_next_resize(0) { }
504
505 float
506 max_load_factor() const noexcept
507 { return _M_max_load_factor; }
508
509 // Return a bucket size no smaller than n.
510 std::size_t
511 _M_next_bkt(std::size_t __n) const;
512
513 // Return a bucket count appropriate for n elements
514 std::size_t
515 _M_bkt_for_elements(std::size_t __n) const
516 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
517
518 // __n_bkt is current bucket count, __n_elt is current element count,
519 // and __n_ins is number of elements to be inserted. Do we need to
520 // increase bucket count? If so, return make_pair(true, n), where n
521 // is the new bucket count. If not, return make_pair(false, 0).
523 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
524 std::size_t __n_ins) const;
525
526 typedef std::size_t _State;
527
528 _State
529 _M_state() const
530 { return _M_next_resize; }
531
532 void
533 _M_reset() noexcept
534 { _M_next_resize = 0; }
535
536 void
537 _M_reset(_State __state)
538 { _M_next_resize = __state; }
539
540 static const std::size_t _S_growth_factor = 2;
541
542 float _M_max_load_factor;
543 mutable std::size_t _M_next_resize;
544 };
545
546 /// Range hashing function assuming that second arg is a power of 2.
547 struct _Mask_range_hashing
548 {
549 typedef std::size_t first_argument_type;
550 typedef std::size_t second_argument_type;
551 typedef std::size_t result_type;
552
553 result_type
554 operator()(first_argument_type __num,
555 second_argument_type __den) const noexcept
556 { return __num & (__den - 1); }
557 };
558
559 /// Compute closest power of 2 not less than __n
560 inline std::size_t
561 __clp2(std::size_t __n) noexcept
562 {
564 // Equivalent to return __n ? std::bit_ceil(__n) : 0;
565 if (__n < 2)
566 return __n;
567 const unsigned __lz = sizeof(size_t) > sizeof(long)
568 ? __builtin_clzll(__n - 1ull)
569 : __builtin_clzl(__n - 1ul);
570 // Doing two shifts avoids undefined behaviour when __lz == 0.
571 return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
572 }
573
574 /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
575 /// operations.
576 struct _Power2_rehash_policy
577 {
578 using __has_load_factor = true_type;
579
580 _Power2_rehash_policy(float __z = 1.0) noexcept
581 : _M_max_load_factor(__z), _M_next_resize(0) { }
582
583 float
584 max_load_factor() const noexcept
585 { return _M_max_load_factor; }
586
587 // Return a bucket size no smaller than n (as long as n is not above the
588 // highest power of 2).
589 std::size_t
590 _M_next_bkt(std::size_t __n) noexcept
591 {
592 if (__n == 0)
593 // Special case on container 1st initialization with 0 bucket count
594 // hint. We keep _M_next_resize to 0 to make sure that next time we
595 // want to add an element allocation will take place.
596 return 1;
597
598 const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
599 const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
600 std::size_t __res = __clp2(__n);
601
602 if (__res == 0)
603 __res = __max_bkt;
604 else if (__res == 1)
605 // If __res is 1 we force it to 2 to make sure there will be an
606 // allocation so that nothing need to be stored in the initial
607 // single bucket
608 __res = 2;
609
610 if (__res == __max_bkt)
611 // Set next resize to the max value so that we never try to rehash again
612 // as we already reach the biggest possible bucket number.
613 // Note that it might result in max_load_factor not being respected.
614 _M_next_resize = size_t(-1);
615 else
616 _M_next_resize
617 = __builtin_floor(__res * (double)_M_max_load_factor);
618
619 return __res;
620 }
621
622 // Return a bucket count appropriate for n elements
623 std::size_t
624 _M_bkt_for_elements(std::size_t __n) const noexcept
625 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
626
627 // __n_bkt is current bucket count, __n_elt is current element count,
628 // and __n_ins is number of elements to be inserted. Do we need to
629 // increase bucket count? If so, return make_pair(true, n), where n
630 // is the new bucket count. If not, return make_pair(false, 0).
632 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
633 std::size_t __n_ins) noexcept
634 {
635 if (__n_elt + __n_ins > _M_next_resize)
636 {
637 // If _M_next_resize is 0 it means that we have nothing allocated so
638 // far and that we start inserting elements. In this case we start
639 // with an initial bucket size of 11.
640 double __min_bkts
641 = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
642 / (double)_M_max_load_factor;
643 if (__min_bkts >= __n_bkt)
644 return { true,
645 _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
646 __n_bkt * _S_growth_factor)) };
647
648 _M_next_resize
649 = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
650 return { false, 0 };
651 }
652 else
653 return { false, 0 };
654 }
655
656 typedef std::size_t _State;
657
658 _State
659 _M_state() const noexcept
660 { return _M_next_resize; }
661
662 void
663 _M_reset() noexcept
664 { _M_next_resize = 0; }
665
666 void
667 _M_reset(_State __state) noexcept
668 { _M_next_resize = __state; }
669
670 static const std::size_t _S_growth_factor = 2;
671
672 float _M_max_load_factor;
673 std::size_t _M_next_resize;
674 };
675
676 // Base classes for std::_Hashtable. We define these base classes
677 // because in some cases we want to do different things depending on
678 // the value of a policy class. In some cases the policy class
679 // affects which member functions and nested typedefs are defined;
680 // we handle that by specializing base class templates. Several of
681 // the base class templates need to access other members of class
682 // template _Hashtable, so we use a variant of the "Curiously
683 // Recurring Template Pattern" (CRTP) technique.
684
685 /**
686 * Primary class template _Map_base.
687 *
688 * If the hashtable has a value type of the form pair<const T1, T2> and
689 * a key extraction policy (_ExtractKey) that returns the first part
690 * of the pair, the hashtable gets a mapped_type typedef. If it
691 * satisfies those criteria and also has unique keys, then it also
692 * gets an operator[].
693 */
694 template<typename _Key, typename _Value, typename _Alloc,
695 typename _ExtractKey, typename _Equal,
696 typename _Hash, typename _RangeHash, typename _Unused,
697 typename _RehashPolicy, typename _Traits,
698 bool _Unique_keys = _Traits::__unique_keys::value>
699 struct _Map_base { };
700
701 /// Partial specialization, __unique_keys set to false, std::pair value type.
702 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
703 typename _Hash, typename _RangeHash, typename _Unused,
704 typename _RehashPolicy, typename _Traits>
705 struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
706 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
707 {
708 using mapped_type = _Val;
709 };
710
711 /// Partial specialization, __unique_keys set to true.
712 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
713 typename _Hash, typename _RangeHash, typename _Unused,
714 typename _RehashPolicy, typename _Traits>
715 struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
716 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
717 {
718 private:
719 using __hashtable_base = _Hashtable_base<_Key, pair<const _Key, _Val>,
720 _Select1st, _Equal, _Hash,
721 _RangeHash, _Unused,
722 _Traits>;
723
724 using __hashtable = _Hashtable<_Key, pair<const _Key, _Val>, _Alloc,
725 _Select1st, _Equal, _Hash, _RangeHash,
726 _Unused, _RehashPolicy, _Traits>;
727
728 using __hash_code = typename __hashtable_base::__hash_code;
729
730 public:
731 using key_type = typename __hashtable_base::key_type;
732 using mapped_type = _Val;
733
734 mapped_type&
735 operator[](const key_type& __k);
736
737 mapped_type&
738 operator[](key_type&& __k);
739
740 // _GLIBCXX_RESOLVE_LIB_DEFECTS
741 // DR 761. unordered_map needs an at() member function.
742 mapped_type&
743 at(const key_type& __k)
744 {
745 auto __ite = static_cast<__hashtable*>(this)->find(__k);
746 if (!__ite._M_cur)
747 __throw_out_of_range(__N("unordered_map::at"));
748 return __ite->second;
749 }
750
751 const mapped_type&
752 at(const key_type& __k) const
753 {
754 auto __ite = static_cast<const __hashtable*>(this)->find(__k);
755 if (!__ite._M_cur)
756 __throw_out_of_range(__N("unordered_map::at"));
757 return __ite->second;
758 }
759 };
760
761 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
762 typename _Hash, typename _RangeHash, typename _Unused,
763 typename _RehashPolicy, typename _Traits>
764 auto
765 _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
766 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
767 operator[](const key_type& __k)
768 -> mapped_type&
769 {
770 __hashtable* __h = static_cast<__hashtable*>(this);
771 __hash_code __code = __h->_M_hash_code(__k);
772 std::size_t __bkt = __h->_M_bucket_index(__code);
773 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
774 return __node->_M_v().second;
775
776 typename __hashtable::_Scoped_node __node {
777 __h,
781 };
782 auto __pos
783 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
784 __node._M_node = nullptr;
785 return __pos->second;
786 }
787
788 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
789 typename _Hash, typename _RangeHash, typename _Unused,
790 typename _RehashPolicy, typename _Traits>
791 auto
792 _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
793 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
794 operator[](key_type&& __k)
795 -> mapped_type&
796 {
797 __hashtable* __h = static_cast<__hashtable*>(this);
798 __hash_code __code = __h->_M_hash_code(__k);
799 std::size_t __bkt = __h->_M_bucket_index(__code);
800 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
801 return __node->_M_v().second;
802
803 typename __hashtable::_Scoped_node __node {
804 __h,
808 };
809 auto __pos
810 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
811 __node._M_node = nullptr;
812 return __pos->second;
813 }
814
815 // Partial specialization for unordered_map<const T, U>, see PR 104174.
816 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
817 typename _Hash, typename _RangeHash, typename _Unused,
818 typename _RehashPolicy, typename _Traits, bool __uniq>
819 struct _Map_base<const _Key, pair<const _Key, _Val>,
820 _Alloc, _Select1st, _Equal, _Hash,
821 _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
822 : _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal, _Hash,
823 _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
824 { };
825
826 /**
827 * Primary class template _Insert_base.
828 *
829 * Defines @c insert member functions appropriate to all _Hashtables.
830 */
831 template<typename _Key, typename _Value, typename _Alloc,
832 typename _ExtractKey, typename _Equal,
833 typename _Hash, typename _RangeHash, typename _Unused,
834 typename _RehashPolicy, typename _Traits>
835 struct _Insert_base
836 {
837 protected:
838 using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
839 _Equal, _Hash, _RangeHash,
840 _Unused, _Traits>;
841
842 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
843 _Hash, _RangeHash,
844 _Unused, _RehashPolicy, _Traits>;
845
846 using __hash_cached = typename _Traits::__hash_cached;
847 using __constant_iterators = typename _Traits::__constant_iterators;
848
849 using __hashtable_alloc = _Hashtable_alloc<
850 __alloc_rebind<_Alloc, _Hash_node<_Value,
851 __hash_cached::value>>>;
852
853 using value_type = typename __hashtable_base::value_type;
854 using size_type = typename __hashtable_base::size_type;
855
856 using __unique_keys = typename _Traits::__unique_keys;
857 using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
858 using __node_gen_type = _AllocNode<__node_alloc_type>;
859
860 __hashtable&
861 _M_conjure_hashtable()
862 { return *(static_cast<__hashtable*>(this)); }
863
864 template<typename _InputIterator, typename _NodeGetter>
865 void
866 _M_insert_range(_InputIterator __first, _InputIterator __last,
867 const _NodeGetter&, true_type __uks);
868
869 template<typename _InputIterator, typename _NodeGetter>
870 void
871 _M_insert_range(_InputIterator __first, _InputIterator __last,
872 const _NodeGetter&, false_type __uks);
873
874 public:
875 using iterator = _Node_iterator<_Value, __constant_iterators::value,
876 __hash_cached::value>;
877
878 using const_iterator = _Node_const_iterator<_Value,
879 __constant_iterators::value,
880 __hash_cached::value>;
881
882 using __ireturn_type = __conditional_t<__unique_keys::value,
884 iterator>;
885
886 __ireturn_type
887 insert(const value_type& __v)
888 {
889 __hashtable& __h = _M_conjure_hashtable();
890 __node_gen_type __node_gen(__h);
891 return __h._M_insert(__v, __node_gen, __unique_keys{});
892 }
893
894 iterator
895 insert(const_iterator __hint, const value_type& __v)
896 {
897 __hashtable& __h = _M_conjure_hashtable();
898 __node_gen_type __node_gen(__h);
899 return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
900 }
901
902 template<typename _KType, typename... _Args>
904 try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
905 {
906 __hashtable& __h = _M_conjure_hashtable();
907 auto __code = __h._M_hash_code(__k);
908 std::size_t __bkt = __h._M_bucket_index(__code);
909 if (auto __node = __h._M_find_node(__bkt, __k, __code))
910 return { iterator(__node), false };
911
912 typename __hashtable::_Scoped_node __node {
913 &__h,
915 std::forward_as_tuple(std::forward<_KType>(__k)),
916 std::forward_as_tuple(std::forward<_Args>(__args)...)
917 };
918 auto __it
919 = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
920 __node._M_node = nullptr;
921 return { __it, true };
922 }
923
924 void
925 insert(initializer_list<value_type> __l)
926 { this->insert(__l.begin(), __l.end()); }
927
928 template<typename _InputIterator>
929 void
930 insert(_InputIterator __first, _InputIterator __last)
931 {
932 __hashtable& __h = _M_conjure_hashtable();
933 __node_gen_type __node_gen(__h);
934 return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
935 }
936 };
937
938 template<typename _Key, typename _Value, typename _Alloc,
939 typename _ExtractKey, typename _Equal,
940 typename _Hash, typename _RangeHash, typename _Unused,
941 typename _RehashPolicy, typename _Traits>
942 template<typename _InputIterator, typename _NodeGetter>
943 void
944 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
945 _Hash, _RangeHash, _Unused,
946 _RehashPolicy, _Traits>::
947 _M_insert_range(_InputIterator __first, _InputIterator __last,
948 const _NodeGetter& __node_gen, true_type __uks)
949 {
950 __hashtable& __h = _M_conjure_hashtable();
951 for (; __first != __last; ++__first)
952 __h._M_insert(*__first, __node_gen, __uks);
953 }
954
955 template<typename _Key, typename _Value, typename _Alloc,
956 typename _ExtractKey, typename _Equal,
957 typename _Hash, typename _RangeHash, typename _Unused,
958 typename _RehashPolicy, typename _Traits>
959 template<typename _InputIterator, typename _NodeGetter>
960 void
961 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
962 _Hash, _RangeHash, _Unused,
963 _RehashPolicy, _Traits>::
964 _M_insert_range(_InputIterator __first, _InputIterator __last,
965 const _NodeGetter& __node_gen, false_type __uks)
966 {
967 using __rehash_type = typename __hashtable::__rehash_type;
968 using __rehash_state = typename __hashtable::__rehash_state;
969 using pair_type = std::pair<bool, std::size_t>;
970
971 size_type __n_elt = __detail::__distance_fw(__first, __last);
972 if (__n_elt == 0)
973 return;
974
975 __hashtable& __h = _M_conjure_hashtable();
976 __rehash_type& __rehash = __h._M_rehash_policy;
977 const __rehash_state& __saved_state = __rehash._M_state();
978 pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
979 __h._M_element_count,
980 __n_elt);
981
982 if (__do_rehash.first)
983 __h._M_rehash(__do_rehash.second, __saved_state);
984
985 for (; __first != __last; ++__first)
986 __h._M_insert(*__first, __node_gen, __uks);
987 }
988
989 /**
990 * Primary class template _Insert.
991 *
992 * Defines @c insert member functions that depend on _Hashtable policies,
993 * via partial specializations.
994 */
995 template<typename _Key, typename _Value, typename _Alloc,
996 typename _ExtractKey, typename _Equal,
997 typename _Hash, typename _RangeHash, typename _Unused,
998 typename _RehashPolicy, typename _Traits,
999 bool _Constant_iterators = _Traits::__constant_iterators::value>
1000 struct _Insert;
1001
1002 /// Specialization.
1003 template<typename _Key, typename _Value, typename _Alloc,
1004 typename _ExtractKey, typename _Equal,
1005 typename _Hash, typename _RangeHash, typename _Unused,
1006 typename _RehashPolicy, typename _Traits>
1007 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1008 _Hash, _RangeHash, _Unused,
1009 _RehashPolicy, _Traits, true>
1010 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1011 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1012 {
1013 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1014 _Equal, _Hash, _RangeHash, _Unused,
1015 _RehashPolicy, _Traits>;
1016
1017 using value_type = typename __base_type::value_type;
1018 using iterator = typename __base_type::iterator;
1019 using const_iterator = typename __base_type::const_iterator;
1020 using __ireturn_type = typename __base_type::__ireturn_type;
1021
1022 using __unique_keys = typename __base_type::__unique_keys;
1023 using __hashtable = typename __base_type::__hashtable;
1024 using __node_gen_type = typename __base_type::__node_gen_type;
1025
1026 using __base_type::insert;
1027
1028 __ireturn_type
1029 insert(value_type&& __v)
1030 {
1031 __hashtable& __h = this->_M_conjure_hashtable();
1032 __node_gen_type __node_gen(__h);
1033 return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
1034 }
1035
1036 iterator
1037 insert(const_iterator __hint, value_type&& __v)
1038 {
1039 __hashtable& __h = this->_M_conjure_hashtable();
1040 __node_gen_type __node_gen(__h);
1041 return __h._M_insert(__hint, std::move(__v), __node_gen,
1042 __unique_keys{});
1043 }
1044 };
1045
1046 /// Specialization.
1047 template<typename _Key, typename _Value, typename _Alloc,
1048 typename _ExtractKey, typename _Equal,
1049 typename _Hash, typename _RangeHash, typename _Unused,
1050 typename _RehashPolicy, typename _Traits>
1051 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1052 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1053 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1054 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1055 {
1056 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1057 _Equal, _Hash, _RangeHash, _Unused,
1058 _RehashPolicy, _Traits>;
1059 using value_type = typename __base_type::value_type;
1060 using iterator = typename __base_type::iterator;
1061 using const_iterator = typename __base_type::const_iterator;
1062
1063 using __unique_keys = typename __base_type::__unique_keys;
1064 using __hashtable = typename __base_type::__hashtable;
1065 using __ireturn_type = typename __base_type::__ireturn_type;
1066
1067 using __base_type::insert;
1068
1069 template<typename _Pair>
1071
1072 template<typename _Pair>
1074
1075 template<typename _Pair>
1076 using _IFconsp = typename _IFcons<_Pair>::type;
1077
1078 template<typename _Pair, typename = _IFconsp<_Pair>>
1079 __ireturn_type
1080 insert(_Pair&& __v)
1081 {
1082 __hashtable& __h = this->_M_conjure_hashtable();
1083 return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
1084 }
1085
1086 template<typename _Pair, typename = _IFconsp<_Pair>>
1087 iterator
1088 insert(const_iterator __hint, _Pair&& __v)
1089 {
1090 __hashtable& __h = this->_M_conjure_hashtable();
1091 return __h._M_emplace(__hint, __unique_keys{},
1092 std::forward<_Pair>(__v));
1093 }
1094 };
1095
1096 template<typename _Policy>
1097 using __has_load_factor = typename _Policy::__has_load_factor;
1098
1099 /**
1100 * Primary class template _Rehash_base.
1101 *
1102 * Give hashtable the max_load_factor functions and reserve iff the
1103 * rehash policy supports it.
1104 */
1105 template<typename _Key, typename _Value, typename _Alloc,
1106 typename _ExtractKey, typename _Equal,
1107 typename _Hash, typename _RangeHash, typename _Unused,
1108 typename _RehashPolicy, typename _Traits,
1109 typename =
1110 __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
1111 struct _Rehash_base;
1112
1113 /// Specialization when rehash policy doesn't provide load factor management.
1114 template<typename _Key, typename _Value, typename _Alloc,
1115 typename _ExtractKey, typename _Equal,
1116 typename _Hash, typename _RangeHash, typename _Unused,
1117 typename _RehashPolicy, typename _Traits>
1118 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1119 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1120 false_type /* Has load factor */>
1121 {
1122 };
1123
1124 /// Specialization when rehash policy provide load factor management.
1125 template<typename _Key, typename _Value, typename _Alloc,
1126 typename _ExtractKey, typename _Equal,
1127 typename _Hash, typename _RangeHash, typename _Unused,
1128 typename _RehashPolicy, typename _Traits>
1129 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1130 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1131 true_type /* Has load factor */>
1132 {
1133 private:
1134 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
1135 _Equal, _Hash, _RangeHash, _Unused,
1136 _RehashPolicy, _Traits>;
1137
1138 public:
1139 float
1140 max_load_factor() const noexcept
1141 {
1142 const __hashtable* __this = static_cast<const __hashtable*>(this);
1143 return __this->__rehash_policy().max_load_factor();
1144 }
1145
1146 void
1147 max_load_factor(float __z)
1148 {
1149 __hashtable* __this = static_cast<__hashtable*>(this);
1150 __this->__rehash_policy(_RehashPolicy(__z));
1151 }
1152
1153 void
1154 reserve(std::size_t __n)
1155 {
1156 __hashtable* __this = static_cast<__hashtable*>(this);
1157 __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
1158 }
1159 };
1160
1161 /**
1162 * Primary class template _Hashtable_ebo_helper.
1163 *
1164 * Helper class using EBO when it is not forbidden (the type is not
1165 * final) and when it is worth it (the type is empty.)
1166 */
1167 template<int _Nm, typename _Tp,
1168 bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
1169 struct _Hashtable_ebo_helper;
1170
1171 /// Specialization using EBO.
1172 template<int _Nm, typename _Tp>
1173 struct _Hashtable_ebo_helper<_Nm, _Tp, true>
1174 : private _Tp
1175 {
1176 _Hashtable_ebo_helper() noexcept(noexcept(_Tp())) : _Tp() { }
1177
1178 template<typename _OtherTp>
1179 _Hashtable_ebo_helper(_OtherTp&& __tp)
1180 : _Tp(std::forward<_OtherTp>(__tp))
1181 { }
1182
1183 const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
1184 _Tp& _M_get() { return static_cast<_Tp&>(*this); }
1185 };
1186
1187 /// Specialization not using EBO.
1188 template<int _Nm, typename _Tp>
1189 struct _Hashtable_ebo_helper<_Nm, _Tp, false>
1190 {
1191 _Hashtable_ebo_helper() = default;
1192
1193 template<typename _OtherTp>
1194 _Hashtable_ebo_helper(_OtherTp&& __tp)
1195 : _M_tp(std::forward<_OtherTp>(__tp))
1196 { }
1197
1198 const _Tp& _M_cget() const { return _M_tp; }
1199 _Tp& _M_get() { return _M_tp; }
1200
1201 private:
1202 _Tp _M_tp{};
1203 };
1204
1205 /**
1206 * Primary class template _Local_iterator_base.
1207 *
1208 * Base class for local iterators, used to iterate within a bucket
1209 * but not between buckets.
1210 */
1211 template<typename _Key, typename _Value, typename _ExtractKey,
1212 typename _Hash, typename _RangeHash, typename _Unused,
1213 bool __cache_hash_code>
1214 struct _Local_iterator_base;
1215
1216 /**
1217 * Primary class template _Hash_code_base.
1218 *
1219 * Encapsulates two policy issues that aren't quite orthogonal.
1220 * (1) the difference between using a ranged hash function and using
1221 * the combination of a hash function and a range-hashing function.
1222 * In the former case we don't have such things as hash codes, so
1223 * we have a dummy type as placeholder.
1224 * (2) Whether or not we cache hash codes. Caching hash codes is
1225 * meaningless if we have a ranged hash function.
1226 *
1227 * We also put the key extraction objects here, for convenience.
1228 * Each specialization derives from one or more of the template
1229 * parameters to benefit from Ebo. This is important as this type
1230 * is inherited in some cases by the _Local_iterator_base type used
1231 * to implement local_iterator and const_local_iterator. As with
1232 * any iterator type we prefer to make it as small as possible.
1233 */
1234 template<typename _Key, typename _Value, typename _ExtractKey,
1235 typename _Hash, typename _RangeHash, typename _Unused,
1236 bool __cache_hash_code>
1237 struct _Hash_code_base
1238 : private _Hashtable_ebo_helper<1, _Hash>
1239 {
1240 private:
1241 using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
1242
1243 // Gives the local iterator implementation access to _M_bucket_index().
1244 friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1245 _Hash, _RangeHash, _Unused, false>;
1246
1247 public:
1248 typedef _Hash hasher;
1249
1250 hasher
1251 hash_function() const
1252 { return _M_hash(); }
1253
1254 protected:
1255 typedef std::size_t __hash_code;
1256
1257 // We need the default constructor for the local iterators and _Hashtable
1258 // default constructor.
1259 _Hash_code_base() = default;
1260
1261 _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
1262
1263 __hash_code
1264 _M_hash_code(const _Key& __k) const
1265 {
1266 static_assert(__is_invocable<const _Hash&, const _Key&>{},
1267 "hash function must be invocable with an argument of key type");
1268 return _M_hash()(__k);
1269 }
1270
1271 template<typename _Kt>
1272 __hash_code
1273 _M_hash_code_tr(const _Kt& __k) const
1274 {
1275 static_assert(__is_invocable<const _Hash&, const _Kt&>{},
1276 "hash function must be invocable with an argument of key type");
1277 return _M_hash()(__k);
1278 }
1279
1280 __hash_code
1281 _M_hash_code(const _Hash&,
1282 const _Hash_node_value<_Value, true>& __n) const
1283 { return __n._M_hash_code; }
1284
1285 // Compute hash code using _Hash as __n _M_hash_code, if present, was
1286 // computed using _H2.
1287 template<typename _H2>
1288 __hash_code
1289 _M_hash_code(const _H2&,
1290 const _Hash_node_value<_Value, __cache_hash_code>& __n) const
1291 { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
1292
1293 __hash_code
1294 _M_hash_code(const _Hash_node_value<_Value, false>& __n) const
1295 { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
1296
1297 __hash_code
1298 _M_hash_code(const _Hash_node_value<_Value, true>& __n) const
1299 { return __n._M_hash_code; }
1300
1301 std::size_t
1302 _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
1303 { return _RangeHash{}(__c, __bkt_count); }
1304
1305 std::size_t
1306 _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
1307 std::size_t __bkt_count) const
1308 noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
1309 && noexcept(declval<const _RangeHash&>()((__hash_code)0,
1310 (std::size_t)0)) )
1311 {
1312 return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
1313 __bkt_count);
1314 }
1315
1316 std::size_t
1317 _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
1318 std::size_t __bkt_count) const
1319 noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
1320 (std::size_t)0)) )
1321 { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
1322
1323 void
1324 _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
1325 { }
1326
1327 void
1328 _M_copy_code(_Hash_node_code_cache<false>&,
1329 const _Hash_node_code_cache<false>&) const
1330 { }
1331
1332 void
1333 _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
1334 { __n._M_hash_code = __c; }
1335
1336 void
1337 _M_copy_code(_Hash_node_code_cache<true>& __to,
1338 const _Hash_node_code_cache<true>& __from) const
1339 { __to._M_hash_code = __from._M_hash_code; }
1340
1341 void
1342 _M_swap(_Hash_code_base& __x)
1343 { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
1344
1345 const _Hash&
1346 _M_hash() const { return __ebo_hash::_M_cget(); }
1347 };
1348
1349 /// Partial specialization used when nodes contain a cached hash code.
1350 template<typename _Key, typename _Value, typename _ExtractKey,
1351 typename _Hash, typename _RangeHash, typename _Unused>
1352 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1353 _Hash, _RangeHash, _Unused, true>
1354 : public _Node_iterator_base<_Value, true>
1355 {
1356 protected:
1357 using __base_node_iter = _Node_iterator_base<_Value, true>;
1358 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1359 _Hash, _RangeHash, _Unused, true>;
1360
1361 _Local_iterator_base() = default;
1362 _Local_iterator_base(const __hash_code_base&,
1363 _Hash_node<_Value, true>* __p,
1364 std::size_t __bkt, std::size_t __bkt_count)
1365 : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1366 { }
1367
1368 void
1369 _M_incr()
1370 {
1371 __base_node_iter::_M_incr();
1372 if (this->_M_cur)
1373 {
1374 std::size_t __bkt
1375 = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
1376 if (__bkt != _M_bucket)
1377 this->_M_cur = nullptr;
1378 }
1379 }
1380
1381 std::size_t _M_bucket;
1382 std::size_t _M_bucket_count;
1383
1384 public:
1385 std::size_t
1386 _M_get_bucket() const { return _M_bucket; } // for debug mode
1387 };
1388
1389 // Uninitialized storage for a _Hash_code_base.
1390 // This type is DefaultConstructible and Assignable even if the
1391 // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1392 // can be DefaultConstructible and Assignable.
1393 template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
1394 struct _Hash_code_storage
1395 {
1396 __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
1397
1398 _Tp*
1399 _M_h() { return _M_storage._M_ptr(); }
1400
1401 const _Tp*
1402 _M_h() const { return _M_storage._M_ptr(); }
1403 };
1404
1405 // Empty partial specialization for empty _Hash_code_base types.
1406 template<typename _Tp>
1407 struct _Hash_code_storage<_Tp, true>
1408 {
1409 static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
1410
1411 // As _Tp is an empty type there will be no bytes written/read through
1412 // the cast pointer, so no strict-aliasing violation.
1413 _Tp*
1414 _M_h() { return reinterpret_cast<_Tp*>(this); }
1415
1416 const _Tp*
1417 _M_h() const { return reinterpret_cast<const _Tp*>(this); }
1418 };
1419
1420 template<typename _Key, typename _Value, typename _ExtractKey,
1421 typename _Hash, typename _RangeHash, typename _Unused>
1422 using __hash_code_for_local_iter
1423 = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
1424 _Hash, _RangeHash, _Unused, false>>;
1425
1426 // Partial specialization used when hash codes are not cached
1427 template<typename _Key, typename _Value, typename _ExtractKey,
1428 typename _Hash, typename _RangeHash, typename _Unused>
1429 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1430 _Hash, _RangeHash, _Unused, false>
1431 : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1432 _Unused>
1433 , _Node_iterator_base<_Value, false>
1434 {
1435 protected:
1436 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1437 _Hash, _RangeHash, _Unused, false>;
1438 using __node_iter_base = _Node_iterator_base<_Value, false>;
1439
1440 _Local_iterator_base() : _M_bucket_count(-1) { }
1441
1442 _Local_iterator_base(const __hash_code_base& __base,
1443 _Hash_node<_Value, false>* __p,
1444 std::size_t __bkt, std::size_t __bkt_count)
1445 : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1446 { _M_init(__base); }
1447
1448 ~_Local_iterator_base()
1449 {
1450 if (_M_bucket_count != size_t(-1))
1451 _M_destroy();
1452 }
1453
1454 _Local_iterator_base(const _Local_iterator_base& __iter)
1455 : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
1456 , _M_bucket_count(__iter._M_bucket_count)
1457 {
1458 if (_M_bucket_count != size_t(-1))
1459 _M_init(*__iter._M_h());
1460 }
1461
1462 _Local_iterator_base&
1463 operator=(const _Local_iterator_base& __iter)
1464 {
1465 if (_M_bucket_count != -1)
1466 _M_destroy();
1467 this->_M_cur = __iter._M_cur;
1468 _M_bucket = __iter._M_bucket;
1469 _M_bucket_count = __iter._M_bucket_count;
1470 if (_M_bucket_count != -1)
1471 _M_init(*__iter._M_h());
1472 return *this;
1473 }
1474
1475 void
1476 _M_incr()
1477 {
1478 __node_iter_base::_M_incr();
1479 if (this->_M_cur)
1480 {
1481 std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
1482 _M_bucket_count);
1483 if (__bkt != _M_bucket)
1484 this->_M_cur = nullptr;
1485 }
1486 }
1487
1488 std::size_t _M_bucket;
1489 std::size_t _M_bucket_count;
1490
1491 void
1492 _M_init(const __hash_code_base& __base)
1493 { ::new(this->_M_h()) __hash_code_base(__base); }
1494
1495 void
1496 _M_destroy() { this->_M_h()->~__hash_code_base(); }
1497
1498 public:
1499 std::size_t
1500 _M_get_bucket() const { return _M_bucket; } // for debug mode
1501 };
1502
1503 /// local iterators
1504 template<typename _Key, typename _Value, typename _ExtractKey,
1505 typename _Hash, typename _RangeHash, typename _Unused,
1506 bool __constant_iterators, bool __cache>
1507 struct _Local_iterator
1508 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1509 _Hash, _RangeHash, _Unused, __cache>
1510 {
1511 private:
1512 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1513 _Hash, _RangeHash, _Unused, __cache>;
1514 using __hash_code_base = typename __base_type::__hash_code_base;
1515
1516 public:
1517 using value_type = _Value;
1518 using pointer = __conditional_t<__constant_iterators,
1519 const value_type*, value_type*>;
1520 using reference = __conditional_t<__constant_iterators,
1521 const value_type&, value_type&>;
1522 using difference_type = ptrdiff_t;
1523 using iterator_category = forward_iterator_tag;
1524
1525 _Local_iterator() = default;
1526
1527 _Local_iterator(const __hash_code_base& __base,
1528 _Hash_node<_Value, __cache>* __n,
1529 std::size_t __bkt, std::size_t __bkt_count)
1530 : __base_type(__base, __n, __bkt, __bkt_count)
1531 { }
1532
1533 reference
1534 operator*() const
1535 { return this->_M_cur->_M_v(); }
1536
1537 pointer
1538 operator->() const
1539 { return this->_M_cur->_M_valptr(); }
1540
1541 _Local_iterator&
1542 operator++()
1543 {
1544 this->_M_incr();
1545 return *this;
1546 }
1547
1548 _Local_iterator
1549 operator++(int)
1550 {
1551 _Local_iterator __tmp(*this);
1552 this->_M_incr();
1553 return __tmp;
1554 }
1555 };
1556
1557 /// local const_iterators
1558 template<typename _Key, typename _Value, typename _ExtractKey,
1559 typename _Hash, typename _RangeHash, typename _Unused,
1560 bool __constant_iterators, bool __cache>
1561 struct _Local_const_iterator
1562 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1563 _Hash, _RangeHash, _Unused, __cache>
1564 {
1565 private:
1566 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1567 _Hash, _RangeHash, _Unused, __cache>;
1568 using __hash_code_base = typename __base_type::__hash_code_base;
1569
1570 public:
1571 typedef _Value value_type;
1572 typedef const value_type* pointer;
1573 typedef const value_type& reference;
1574 typedef std::ptrdiff_t difference_type;
1575 typedef std::forward_iterator_tag iterator_category;
1576
1577 _Local_const_iterator() = default;
1578
1579 _Local_const_iterator(const __hash_code_base& __base,
1580 _Hash_node<_Value, __cache>* __n,
1581 std::size_t __bkt, std::size_t __bkt_count)
1582 : __base_type(__base, __n, __bkt, __bkt_count)
1583 { }
1584
1585 _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
1586 _Hash, _RangeHash, _Unused,
1587 __constant_iterators,
1588 __cache>& __x)
1589 : __base_type(__x)
1590 { }
1591
1592 reference
1593 operator*() const
1594 { return this->_M_cur->_M_v(); }
1595
1596 pointer
1597 operator->() const
1598 { return this->_M_cur->_M_valptr(); }
1599
1600 _Local_const_iterator&
1601 operator++()
1602 {
1603 this->_M_incr();
1604 return *this;
1605 }
1606
1607 _Local_const_iterator
1608 operator++(int)
1609 {
1610 _Local_const_iterator __tmp(*this);
1611 this->_M_incr();
1612 return __tmp;
1613 }
1614 };
1615
1616 /**
1617 * Primary class template _Hashtable_base.
1618 *
1619 * Helper class adding management of _Equal functor to
1620 * _Hash_code_base type.
1621 *
1622 * Base class templates are:
1623 * - __detail::_Hash_code_base
1624 * - __detail::_Hashtable_ebo_helper
1625 */
1626 template<typename _Key, typename _Value, typename _ExtractKey,
1627 typename _Equal, typename _Hash, typename _RangeHash,
1628 typename _Unused, typename _Traits>
1629 struct _Hashtable_base
1630 : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1631 _Unused, _Traits::__hash_cached::value>,
1632 private _Hashtable_ebo_helper<0, _Equal>
1633 {
1634 public:
1635 typedef _Key key_type;
1636 typedef _Value value_type;
1637 typedef _Equal key_equal;
1638 typedef std::size_t size_type;
1639 typedef std::ptrdiff_t difference_type;
1640
1641 using __traits_type = _Traits;
1642 using __hash_cached = typename __traits_type::__hash_cached;
1643
1644 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1645 _Hash, _RangeHash, _Unused,
1646 __hash_cached::value>;
1647
1648 using __hash_code = typename __hash_code_base::__hash_code;
1649
1650 private:
1651 using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
1652
1653 static bool
1654 _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
1655 { return true; }
1656
1657 static bool
1658 _S_node_equals(const _Hash_node_code_cache<false>&,
1659 const _Hash_node_code_cache<false>&)
1660 { return true; }
1661
1662 static bool
1663 _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
1664 { return __c == __n._M_hash_code; }
1665
1666 static bool
1667 _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
1668 const _Hash_node_code_cache<true>& __rhn)
1669 { return __lhn._M_hash_code == __rhn._M_hash_code; }
1670
1671 protected:
1672 _Hashtable_base() = default;
1673
1674 _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
1675 : __hash_code_base(__hash), _EqualEBO(__eq)
1676 { }
1677
1678 bool
1679 _M_key_equals(const _Key& __k,
1680 const _Hash_node_value<_Value,
1681 __hash_cached::value>& __n) const
1682 {
1683 static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
1684 "key equality predicate must be invocable with two arguments of "
1685 "key type");
1686 return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1687 }
1688
1689 template<typename _Kt>
1690 bool
1691 _M_key_equals_tr(const _Kt& __k,
1692 const _Hash_node_value<_Value,
1693 __hash_cached::value>& __n) const
1694 {
1695 static_assert(
1696 __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
1697 "key equality predicate must be invocable with two arguments of "
1698 "key type");
1699 return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1700 }
1701
1702 bool
1703 _M_equals(const _Key& __k, __hash_code __c,
1704 const _Hash_node_value<_Value, __hash_cached::value>& __n) const
1705 { return _S_equals(__c, __n) && _M_key_equals(__k, __n); }
1706
1707 template<typename _Kt>
1708 bool
1709 _M_equals_tr(const _Kt& __k, __hash_code __c,
1710 const _Hash_node_value<_Value,
1711 __hash_cached::value>& __n) const
1712 { return _S_equals(__c, __n) && _M_key_equals_tr(__k, __n); }
1713
1714 bool
1715 _M_node_equals(
1716 const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
1717 const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
1718 {
1719 return _S_node_equals(__lhn, __rhn)
1720 && _M_key_equals(_ExtractKey{}(__lhn._M_v()), __rhn);
1721 }
1722
1723 void
1724 _M_swap(_Hashtable_base& __x)
1725 {
1726 __hash_code_base::_M_swap(__x);
1727 std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
1728 }
1729
1730 const _Equal&
1731 _M_eq() const { return _EqualEBO::_M_cget(); }
1732 };
1733
1734 /**
1735 * Primary class template _Equality.
1736 *
1737 * This is for implementing equality comparison for unordered
1738 * containers, per N3068, by John Lakos and Pablo Halpern.
1739 * Algorithmically, we follow closely the reference implementations
1740 * therein.
1741 */
1742 template<typename _Key, typename _Value, typename _Alloc,
1743 typename _ExtractKey, typename _Equal,
1744 typename _Hash, typename _RangeHash, typename _Unused,
1745 typename _RehashPolicy, typename _Traits,
1746 bool _Unique_keys = _Traits::__unique_keys::value>
1747 struct _Equality;
1748
1749 /// unordered_map and unordered_set specializations.
1750 template<typename _Key, typename _Value, typename _Alloc,
1751 typename _ExtractKey, typename _Equal,
1752 typename _Hash, typename _RangeHash, typename _Unused,
1753 typename _RehashPolicy, typename _Traits>
1754 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1755 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
1756 {
1757 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1758 _Hash, _RangeHash, _Unused,
1759 _RehashPolicy, _Traits>;
1760
1761 bool
1762 _M_equal(const __hashtable&) const;
1763 };
1764
1765 template<typename _Key, typename _Value, typename _Alloc,
1766 typename _ExtractKey, typename _Equal,
1767 typename _Hash, typename _RangeHash, typename _Unused,
1768 typename _RehashPolicy, typename _Traits>
1769 bool
1770 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1771 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
1772 _M_equal(const __hashtable& __other) const
1773 {
1774 using __node_type = typename __hashtable::__node_type;
1775 const __hashtable* __this = static_cast<const __hashtable*>(this);
1776 if (__this->size() != __other.size())
1777 return false;
1778
1779 for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
1780 {
1781 std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1782 auto __prev_n = __other._M_buckets[__ybkt];
1783 if (!__prev_n)
1784 return false;
1785
1786 for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
1787 __n = __n->_M_next())
1788 {
1789 if (__n->_M_v() == *__itx)
1790 break;
1791
1792 if (!__n->_M_nxt
1793 || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
1794 return false;
1795 }
1796 }
1797
1798 return true;
1799 }
1800
1801 /// unordered_multiset and unordered_multimap specializations.
1802 template<typename _Key, typename _Value, typename _Alloc,
1803 typename _ExtractKey, typename _Equal,
1804 typename _Hash, typename _RangeHash, typename _Unused,
1805 typename _RehashPolicy, typename _Traits>
1806 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1807 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1808 {
1809 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1810 _Hash, _RangeHash, _Unused,
1811 _RehashPolicy, _Traits>;
1812
1813 bool
1814 _M_equal(const __hashtable&) const;
1815 };
1816
1817 template<typename _Key, typename _Value, typename _Alloc,
1818 typename _ExtractKey, typename _Equal,
1819 typename _Hash, typename _RangeHash, typename _Unused,
1820 typename _RehashPolicy, typename _Traits>
1821 bool
1822 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1823 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
1824 _M_equal(const __hashtable& __other) const
1825 {
1826 using __node_type = typename __hashtable::__node_type;
1827 const __hashtable* __this = static_cast<const __hashtable*>(this);
1828 if (__this->size() != __other.size())
1829 return false;
1830
1831 for (auto __itx = __this->begin(); __itx != __this->end();)
1832 {
1833 std::size_t __x_count = 1;
1834 auto __itx_end = __itx;
1835 for (++__itx_end; __itx_end != __this->end()
1836 && __this->key_eq()(_ExtractKey{}(*__itx),
1837 _ExtractKey{}(*__itx_end));
1838 ++__itx_end)
1839 ++__x_count;
1840
1841 std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1842 auto __y_prev_n = __other._M_buckets[__ybkt];
1843 if (!__y_prev_n)
1844 return false;
1845
1846 __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
1847 for (;;)
1848 {
1849 if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
1850 _ExtractKey{}(*__itx)))
1851 break;
1852
1853 auto __y_ref_n = __y_n;
1854 for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
1855 if (!__other._M_node_equals(*__y_ref_n, *__y_n))
1856 break;
1857
1858 if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
1859 return false;
1860 }
1861
1862 typename __hashtable::const_iterator __ity(__y_n);
1863 for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
1864 if (--__x_count == 0)
1865 break;
1866
1867 if (__x_count != 0)
1868 return false;
1869
1870 if (!std::is_permutation(__itx, __itx_end, __ity))
1871 return false;
1872
1873 __itx = __itx_end;
1874 }
1875 return true;
1876 }
1877
1878 /**
1879 * This type deals with all allocation and keeps an allocator instance
1880 * through inheritance to benefit from EBO when possible.
1881 */
1882 template<typename _NodeAlloc>
1883 struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
1884 {
1885 private:
1886 using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
1887
1888 template<typename>
1889 struct __get_value_type;
1890 template<typename _Val, bool _Cache_hash_code>
1891 struct __get_value_type<_Hash_node<_Val, _Cache_hash_code>>
1892 { using type = _Val; };
1893
1894 public:
1895 using __node_type = typename _NodeAlloc::value_type;
1896 using __node_alloc_type = _NodeAlloc;
1897 // Use __gnu_cxx to benefit from _S_always_equal and al.
1898 using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
1899
1900 using __value_alloc_traits = typename __node_alloc_traits::template
1901 rebind_traits<typename __get_value_type<__node_type>::type>;
1902
1903 using __node_ptr = __node_type*;
1904 using __node_base = _Hash_node_base;
1905 using __node_base_ptr = __node_base*;
1906 using __buckets_alloc_type =
1907 __alloc_rebind<__node_alloc_type, __node_base_ptr>;
1908 using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
1909 using __buckets_ptr = __node_base_ptr*;
1910
1911 _Hashtable_alloc() = default;
1912 _Hashtable_alloc(const _Hashtable_alloc&) = default;
1913 _Hashtable_alloc(_Hashtable_alloc&&) = default;
1914
1915 template<typename _Alloc>
1916 _Hashtable_alloc(_Alloc&& __a)
1917 : __ebo_node_alloc(std::forward<_Alloc>(__a))
1918 { }
1919
1920 __node_alloc_type&
1921 _M_node_allocator()
1922 { return __ebo_node_alloc::_M_get(); }
1923
1924 const __node_alloc_type&
1925 _M_node_allocator() const
1926 { return __ebo_node_alloc::_M_cget(); }
1927
1928 // Allocate a node and construct an element within it.
1929 template<typename... _Args>
1930 __node_ptr
1931 _M_allocate_node(_Args&&... __args);
1932
1933 // Destroy the element within a node and deallocate the node.
1934 void
1935 _M_deallocate_node(__node_ptr __n);
1936
1937 // Deallocate a node.
1938 void
1939 _M_deallocate_node_ptr(__node_ptr __n);
1940
1941 // Deallocate the linked list of nodes pointed to by __n.
1942 // The elements within the nodes are destroyed.
1943 void
1944 _M_deallocate_nodes(__node_ptr __n);
1945
1946 __buckets_ptr
1947 _M_allocate_buckets(std::size_t __bkt_count);
1948
1949 void
1950 _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
1951 };
1952
1953 // Definitions of class template _Hashtable_alloc's out-of-line member
1954 // functions.
1955 template<typename _NodeAlloc>
1956 template<typename... _Args>
1957 auto
1958 _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
1959 -> __node_ptr
1960 {
1961 auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
1962 __node_ptr __n = std::__to_address(__nptr);
1963 __try
1964 {
1965 ::new ((void*)__n) __node_type;
1966 __node_alloc_traits::construct(_M_node_allocator(),
1967 __n->_M_valptr(),
1968 std::forward<_Args>(__args)...);
1969 return __n;
1970 }
1971 __catch(...)
1972 {
1973 __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
1974 __throw_exception_again;
1975 }
1976 }
1977
1978 template<typename _NodeAlloc>
1979 void
1980 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
1981 {
1982 __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
1983 _M_deallocate_node_ptr(__n);
1984 }
1985
1986 template<typename _NodeAlloc>
1987 void
1988 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
1989 {
1990 typedef typename __node_alloc_traits::pointer _Ptr;
1991 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
1992 __n->~__node_type();
1993 __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
1994 }
1995
1996 template<typename _NodeAlloc>
1997 void
1998 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
1999 {
2000 while (__n)
2001 {
2002 __node_ptr __tmp = __n;
2003 __n = __n->_M_next();
2004 _M_deallocate_node(__tmp);
2005 }
2006 }
2007
2008 template<typename _NodeAlloc>
2009 auto
2010 _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
2011 -> __buckets_ptr
2012 {
2013 __buckets_alloc_type __alloc(_M_node_allocator());
2014
2015 auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
2016 __buckets_ptr __p = std::__to_address(__ptr);
2017 __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
2018 return __p;
2019 }
2020
2021 template<typename _NodeAlloc>
2022 void
2023 _Hashtable_alloc<_NodeAlloc>::
2024 _M_deallocate_buckets(__buckets_ptr __bkts,
2025 std::size_t __bkt_count)
2026 {
2027 typedef typename __buckets_alloc_traits::pointer _Ptr;
2028 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
2029 __buckets_alloc_type __alloc(_M_node_allocator());
2030 __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
2031 }
2032
2033 ///@} hashtable-detail
2034} // namespace __detail
2035/// @endcond
2036_GLIBCXX_END_NAMESPACE_VERSION
2037} // namespace std
2038
2039#endif // _HASHTABLE_POLICY_H
constexpr complex< _Tp > operator*(const complex< _Tp > &__x, const complex< _Tp > &__y)
Return new complex value x times y.
Definition: complex:392
integral_constant< bool, true > true_type
The type used as a compile-time boolean with true value.
Definition: type_traits:82
integral_constant< bool, false > false_type
The type used as a compile-time boolean with false value.
Definition: type_traits:85
constexpr tuple< _Elements &&... > forward_as_tuple(_Elements &&... __args) noexcept
std::forward_as_tuple
Definition: tuple:1589
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
Definition: move.h:104
constexpr piecewise_construct_t piecewise_construct
Tag for piecewise construction of std::pair objects.
Definition: stl_pair.h:83
void swap(any &__x, any &__y) noexcept
Exchange the states of two any objects.
Definition: any:429
constexpr _Tp && forward(typename std::remove_reference< _Tp >::type &__t) noexcept
Forward an lvalue.
Definition: move.h:77
constexpr iterator_traits< _Iter >::iterator_category __iterator_category(const _Iter &)
ISO C++ entities toplevel namespace is std.
constexpr iterator_traits< _InputIterator >::difference_type distance(_InputIterator __first, _InputIterator __last)
A generalization of pointer arithmetic.
__numeric_traits_integer< _Tp > __int_traits
Convenience alias for __numeric_traits<integer-type>.
constexpr _Iterator __base(_Iterator __it)
Primary class template, tuple.
Definition: tuple:610
is_empty
Definition: type_traits:782
is_constructible
Definition: type_traits:979
Define a member typedef type only if a boolean constant is true.
Definition: type_traits:2229
Uniform interface to all allocator types.
Traits class for iterators.
Uniform interface to all pointer-like types.
Definition: ptr_traits.h:195
Struct holding two objects of arbitrary type.
Definition: stl_pair.h:187
Marking input iterators.
Forward iterators support a superset of input iterator operations.
Uniform interface to C++98 and C++11 allocators.