]> git.ipfire.org Git - thirdparty/gcc.git/blame - libstdc++-v3/include/tr1/hashtable_policy.h
PR libstdc++/19664 round 3
[thirdparty/gcc.git] / libstdc++-v3 / include / tr1 / hashtable_policy.h
CommitLineData
b82f782b
BK
1// Internal policy header for TR1 unordered_set and unordered_map -*- C++ -*-
2
3// Copyright (C) 2005, 2006 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 2, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// You should have received a copy of the GNU General Public License along
17// with this library; see the file COPYING. If not, write to the Free
18// Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
19// USA.
20
21// As a special exception, you may use this file as part of a free software
22// library without restriction. Specifically, if other files instantiate
23// templates or use macros or inline functions from this file, or you compile
24// this file and link it with other files to produce an executable, this
25// file does not by itself cause the resulting executable to be covered by
26// the GNU General Public License. This exception does not however
27// invalidate any other reasons why the executable file might be covered by
28// the GNU General Public License.
29
30/** @file
31 * This is a TR1 C++ Library header.
32 */
33
34#ifndef _TR1_HASHTABLE_POLICY_H
35#define _TR1_HASHTABLE_POLICY_H 1
36
37namespace std
38{
39_GLIBCXX_BEGIN_NAMESPACE(tr1)
40namespace detail
41{
42namespace
43{
44 // General utilities.
45 template<bool Flag, typename IfTrue, typename IfFalse>
46 struct IF;
47
48 template<typename IfTrue, typename IfFalse>
49 struct IF<true, IfTrue, IfFalse>
50 { typedef IfTrue type; };
51
52 template <typename IfTrue, typename IfFalse>
53 struct IF<false, IfTrue, IfFalse>
54 { typedef IfFalse type; };
55
56 // Helper function: return distance(first, last) for forward
57 // iterators, or 0 for input iterators.
58 template<class Iterator>
59 inline typename std::iterator_traits<Iterator>::difference_type
60 distance_fw(Iterator first, Iterator last, std::input_iterator_tag)
61 { return 0; }
62
63 template<class Iterator>
64 inline typename std::iterator_traits<Iterator>::difference_type
65 distance_fw(Iterator first, Iterator last, std::forward_iterator_tag)
66 { return std::distance(first, last); }
67
68 template<class Iterator>
69 inline typename std::iterator_traits<Iterator>::difference_type
70 distance_fw(Iterator first, Iterator last)
71 {
72 typedef typename std::iterator_traits<Iterator>::iterator_category tag;
73 return distance_fw(first, last, tag());
74 }
75
76 // XXX This is a hack. prime_rehash_policy's member functions, and
77 // certainly the list of primes, should be defined in a .cc file.
78 // We're temporarily putting them in a header because we don't have a
79 // place to put TR1 .cc files yet. There's no good reason for any of
80 // prime_rehash_policy's member functions to be inline, and there's
81 // certainly no good reason for X<> to exist at all.
82 struct lt
83 {
84 template<typename X, typename Y>
85 bool
86 operator()(X x, Y y)
87 { return x < y; }
88 };
89
90 template<int ulongsize = sizeof(unsigned long)>
91 struct X
92 {
93 static const int n_primes = ulongsize != 8 ? 256 : 256 + 48;
94 static const unsigned long primes[256 + 48 + 1];
95 };
96
97 template<int ulongsize>
98 const int X<ulongsize>::n_primes;
99
100 template<int ulongsize>
101 const unsigned long X<ulongsize>::primes[256 + 48 + 1] =
102 {
103 2ul, 3ul, 5ul, 7ul, 11ul, 13ul, 17ul, 19ul, 23ul, 29ul, 31ul,
104 37ul, 41ul, 43ul, 47ul, 53ul, 59ul, 61ul, 67ul, 71ul, 73ul, 79ul,
105 83ul, 89ul, 97ul, 103ul, 109ul, 113ul, 127ul, 137ul, 139ul, 149ul,
106 157ul, 167ul, 179ul, 193ul, 199ul, 211ul, 227ul, 241ul, 257ul,
107 277ul, 293ul, 313ul, 337ul, 359ul, 383ul, 409ul, 439ul, 467ul,
108 503ul, 541ul, 577ul, 619ul, 661ul, 709ul, 761ul, 823ul, 887ul,
109 953ul, 1031ul, 1109ul, 1193ul, 1289ul, 1381ul, 1493ul, 1613ul,
110 1741ul, 1879ul, 2029ul, 2179ul, 2357ul, 2549ul, 2753ul, 2971ul,
111 3209ul, 3469ul, 3739ul, 4027ul, 4349ul, 4703ul, 5087ul, 5503ul,
112 5953ul, 6427ul, 6949ul, 7517ul, 8123ul, 8783ul, 9497ul, 10273ul,
113 11113ul, 12011ul, 12983ul, 14033ul, 15173ul, 16411ul, 17749ul,
114 19183ul, 20753ul, 22447ul, 24281ul, 26267ul, 28411ul, 30727ul,
115 33223ul, 35933ul, 38873ul, 42043ul, 45481ul, 49201ul, 53201ul,
116 57557ul, 62233ul, 67307ul, 72817ul, 78779ul, 85229ul, 92203ul,
117 99733ul, 107897ul, 116731ul, 126271ul, 136607ul, 147793ul,
118 159871ul, 172933ul, 187091ul, 202409ul, 218971ul, 236897ul,
119 256279ul, 277261ul, 299951ul, 324503ul, 351061ul, 379787ul,
120 410857ul, 444487ul, 480881ul, 520241ul, 562841ul, 608903ul,
121 658753ul, 712697ul, 771049ul, 834181ul, 902483ul, 976369ul,
122 1056323ul, 1142821ul, 1236397ul, 1337629ul, 1447153ul, 1565659ul,
123 1693859ul, 1832561ul, 1982627ul, 2144977ul, 2320627ul, 2510653ul,
124 2716249ul, 2938679ul, 3179303ul, 3439651ul, 3721303ul, 4026031ul,
125 4355707ul, 4712381ul, 5098259ul, 5515729ul, 5967347ul, 6456007ul,
126 6984629ul, 7556579ul, 8175383ul, 8844859ul, 9569143ul, 10352717ul,
127 11200489ul, 12117689ul, 13109983ul, 14183539ul, 15345007ul,
128 16601593ul, 17961079ul, 19431899ul, 21023161ul, 22744717ul,
129 24607243ul, 26622317ul, 28802401ul, 31160981ul, 33712729ul,
130 36473443ul, 39460231ul, 42691603ul, 46187573ul, 49969847ul,
131 54061849ul, 58488943ul, 63278561ul, 68460391ul, 74066549ul,
132 80131819ul, 86693767ul, 93793069ul, 101473717ul, 109783337ul,
133 118773397ul, 128499677ul, 139022417ul, 150406843ul, 162723577ul,
134 176048909ul, 190465427ul, 206062531ul, 222936881ul, 241193053ul,
135 260944219ul, 282312799ul, 305431229ul, 330442829ul, 357502601ul,
136 386778277ul, 418451333ul, 452718089ul, 489790921ul, 529899637ul,
137 573292817ul, 620239453ul, 671030513ul, 725980837ul, 785430967ul,
138 849749479ul, 919334987ul, 994618837ul, 1076067617ul, 1164186217ul,
139 1259520799ul, 1362662261ul, 1474249943ul, 1594975441ul,
140 1725587117ul, 1866894511ul, 2019773507ul, 2185171673ul,
141 2364114217ul, 2557710269ul, 2767159799ul, 2993761039ul,
142 3238918481ul, 3504151727ul, 3791104843ul, 4101556399ul,
143 4294967291ul,
144 // Sentinel, so we don't have to test the result of lower_bound,
145 // or, on 64-bit machines, rest of the table.
146 ulongsize != 8 ? 4294967291ul : (unsigned long)6442450933ull,
147 (unsigned long)8589934583ull,
148 (unsigned long)12884901857ull, (unsigned long)17179869143ull,
149 (unsigned long)25769803693ull, (unsigned long)34359738337ull,
150 (unsigned long)51539607367ull, (unsigned long)68719476731ull,
151 (unsigned long)103079215087ull, (unsigned long)137438953447ull,
152 (unsigned long)206158430123ull, (unsigned long)274877906899ull,
153 (unsigned long)412316860387ull, (unsigned long)549755813881ull,
154 (unsigned long)824633720731ull, (unsigned long)1099511627689ull,
155 (unsigned long)1649267441579ull, (unsigned long)2199023255531ull,
156 (unsigned long)3298534883309ull, (unsigned long)4398046511093ull,
157 (unsigned long)6597069766607ull, (unsigned long)8796093022151ull,
158 (unsigned long)13194139533241ull, (unsigned long)17592186044399ull,
159 (unsigned long)26388279066581ull, (unsigned long)35184372088777ull,
160 (unsigned long)52776558133177ull, (unsigned long)70368744177643ull,
161 (unsigned long)105553116266399ull, (unsigned long)140737488355213ull,
162 (unsigned long)211106232532861ull, (unsigned long)281474976710597ull,
163 (unsigned long)562949953421231ull, (unsigned long)1125899906842597ull,
164 (unsigned long)2251799813685119ull, (unsigned long)4503599627370449ull,
165 (unsigned long)9007199254740881ull, (unsigned long)18014398509481951ull,
166 (unsigned long)36028797018963913ull, (unsigned long)72057594037927931ull,
167 (unsigned long)144115188075855859ull,
168 (unsigned long)288230376151711717ull,
169 (unsigned long)576460752303423433ull,
170 (unsigned long)1152921504606846883ull,
171 (unsigned long)2305843009213693951ull,
172 (unsigned long)4611686018427387847ull,
173 (unsigned long)9223372036854775783ull,
174 (unsigned long)18446744073709551557ull,
175 (unsigned long)18446744073709551557ull
176 };
177} // anonymous namespace
178
179 // Auxiliary types used for all instantiations of hashtable: nodes
180 // and iterators.
181
182 // Nodes, used to wrap elements stored in the hash table. A policy
183 // template parameter of class template hashtable controls whether
184 // nodes also store a hash code. In some cases (e.g. strings) this
185 // may be a performance win.
186 template<typename Value, bool cache_hash_code>
187 struct hash_node;
188
189 template<typename Value>
190 struct hash_node<Value, true>
191 {
192 Value m_v;
193 std::size_t hash_code;
194 hash_node* m_next;
195 };
196
197 template<typename Value>
198 struct hash_node<Value, false>
199 {
200 Value m_v;
201 hash_node* m_next;
202 };
203
204 // Local iterators, used to iterate within a bucket but not between
205 // buckets.
206 template<typename Value, bool cache>
207 struct node_iterator_base
208 {
209 node_iterator_base(hash_node<Value, cache>* p)
210 : m_cur(p) { }
211
212 void
213 incr()
214 { m_cur = m_cur->m_next; }
215
216 hash_node<Value, cache>* m_cur;
217 };
218
219 template<typename Value, bool cache>
220 inline bool
221 operator==(const node_iterator_base<Value, cache>& x,
222 const node_iterator_base<Value, cache>& y)
223 { return x.m_cur == y.m_cur; }
224
225 template<typename Value, bool cache>
226 inline bool
227 operator!=(const node_iterator_base<Value, cache>& x,
228 const node_iterator_base<Value, cache>& y)
229 { return x.m_cur != y.m_cur; }
230
231 template<typename Value, bool constant_iterators, bool cache>
232 struct node_iterator
233 : public node_iterator_base<Value, cache>
234 {
235 typedef Value value_type;
236 typedef typename IF<constant_iterators, const Value*, Value*>::type
237 pointer;
238 typedef typename IF<constant_iterators, const Value&, Value&>::type
239 reference;
240 typedef std::ptrdiff_t difference_type;
241 typedef std::forward_iterator_tag iterator_category;
242
243 node_iterator()
244 : node_iterator_base<Value, cache>(0) { }
245
246 explicit
247 node_iterator(hash_node<Value, cache>* p)
248 : node_iterator_base<Value, cache>(p) { }
249
250 reference
251 operator*() const
252 { return this->m_cur->m_v; }
253
254 pointer
255 operator->() const
256 { return &this->m_cur->m_v; }
257
258 node_iterator&
259 operator++()
260 {
261 this->incr();
262 return *this;
263 }
264
265 node_iterator
266 operator++(int)
267 {
268 node_iterator tmp(*this);
269 this->incr();
270 return tmp;
271 }
272 };
273
274 template<typename Value, bool constant_iterators, bool cache>
275 struct node_const_iterator
276 : public node_iterator_base<Value, cache>
277 {
278 typedef Value value_type;
279 typedef const Value* pointer;
280 typedef const Value& reference;
281 typedef std::ptrdiff_t difference_type;
282 typedef std::forward_iterator_tag iterator_category;
283
284 node_const_iterator()
285 : node_iterator_base<Value, cache>(0) { }
286
287 explicit
288 node_const_iterator(hash_node<Value, cache>* p)
289 : node_iterator_base<Value, cache>(p) { }
290
291 node_const_iterator(const node_iterator<Value, constant_iterators,
292 cache>& x)
293 : node_iterator_base<Value, cache>(x.m_cur) { }
294
295 reference
296 operator*() const
297 { return this->m_cur->m_v; }
298
299 pointer
300 operator->() const
301 { return &this->m_cur->m_v; }
302
303 node_const_iterator&
304 operator++()
305 {
306 this->incr();
307 return *this;
308 }
309
310 node_const_iterator
311 operator++(int)
312 {
313 node_const_iterator tmp(*this);
314 this->incr();
315 return tmp;
316 }
317 };
318
319 template<typename Value, bool cache>
320 struct hashtable_iterator_base
321 {
322 hashtable_iterator_base(hash_node<Value, cache>* node,
323 hash_node<Value, cache>** bucket)
324 : m_cur_node(node), m_cur_bucket(bucket) { }
325
326 void
327 incr()
328 {
329 m_cur_node = m_cur_node->m_next;
330 if (!m_cur_node)
331 m_incr_bucket();
332 }
333
334 void
335 m_incr_bucket();
336
337 hash_node<Value, cache>* m_cur_node;
338 hash_node<Value, cache>** m_cur_bucket;
339 };
340
341 // Global iterators, used for arbitrary iteration within a hash
342 // table. Larger and more expensive than local iterators.
343 template<typename Value, bool cache>
344 void
345 hashtable_iterator_base<Value, cache>::
346 m_incr_bucket()
347 {
348 ++m_cur_bucket;
349
350 // This loop requires the bucket array to have a non-null sentinel.
351 while (!*m_cur_bucket)
352 ++m_cur_bucket;
353 m_cur_node = *m_cur_bucket;
354 }
355
356 template<typename Value, bool cache>
357 inline bool
358 operator==(const hashtable_iterator_base<Value, cache>& x,
359 const hashtable_iterator_base<Value, cache>& y)
360 { return x.m_cur_node == y.m_cur_node; }
361
362 template<typename Value, bool cache>
363 inline bool
364 operator!=(const hashtable_iterator_base<Value, cache>& x,
365 const hashtable_iterator_base<Value, cache>& y)
366 { return x.m_cur_node != y.m_cur_node; }
367
368 template<typename Value, bool constant_iterators, bool cache>
369 struct hashtable_iterator
370 : public hashtable_iterator_base<Value, cache>
371 {
372 typedef Value value_type;
373 typedef typename IF<constant_iterators, const Value*, Value*>::type
374 pointer;
375 typedef typename IF<constant_iterators, const Value&, Value&>::type
376 reference;
377 typedef std::ptrdiff_t difference_type;
378 typedef std::forward_iterator_tag iterator_category;
379
380 hashtable_iterator()
381 : hashtable_iterator_base<Value, cache>(0, 0) { }
382
383 hashtable_iterator(hash_node<Value, cache>* p,
384 hash_node<Value, cache>** b)
385 : hashtable_iterator_base<Value, cache>(p, b) { }
386
387 explicit
388 hashtable_iterator(hash_node<Value, cache>** b)
389 : hashtable_iterator_base<Value, cache>(*b, b) { }
390
391 reference
392 operator*() const
393 { return this->m_cur_node->m_v; }
394
395 pointer
396 operator->() const
397 { return &this->m_cur_node->m_v; }
398
399 hashtable_iterator&
400 operator++()
401 {
402 this->incr();
403 return *this;
404 }
405
406 hashtable_iterator
407 operator++(int)
408 {
409 hashtable_iterator tmp(*this);
410 this->incr();
411 return tmp;
412 }
413 };
414
415 template<typename Value, bool constant_iterators, bool cache>
416 struct hashtable_const_iterator
417 : public hashtable_iterator_base<Value, cache>
418 {
419 typedef Value value_type;
420 typedef const Value* pointer;
421 typedef const Value& reference;
422 typedef std::ptrdiff_t difference_type;
423 typedef std::forward_iterator_tag iterator_category;
424
425 hashtable_const_iterator()
426 : hashtable_iterator_base<Value, cache>(0, 0) { }
427
428 hashtable_const_iterator(hash_node<Value, cache>* p,
429 hash_node<Value, cache>** b)
430 : hashtable_iterator_base<Value, cache>(p, b) { }
431
432 explicit
433 hashtable_const_iterator(hash_node<Value, cache>** b)
434 : hashtable_iterator_base<Value, cache>(*b, b) { }
435
436 hashtable_const_iterator(const hashtable_iterator<Value,
437 constant_iterators, cache>& x)
438 : hashtable_iterator_base<Value, cache>(x.m_cur_node, x.m_cur_bucket) { }
439
440 reference
441 operator*() const
442 { return this->m_cur_node->m_v; }
443
444 pointer
445 operator->() const
446 { return &this->m_cur_node->m_v; }
447
448 hashtable_const_iterator&
449 operator++()
450 {
451 this->incr();
452 return *this;
453 }
454
455 hashtable_const_iterator
456 operator++(int)
457 {
458 hashtable_const_iterator tmp(*this);
459 this->incr();
460 return tmp;
461 }
462 };
463
464
465 // Many of class template hashtable's template parameters are policy
466 // classes. These are defaults for the policies.
467
468 // The two key extraction policies used by the *set and *map variants.
469 // XXX pb_ds::type_to_type
470 template<typename T>
471 struct identity
472 {
473 const T&
474 operator()(const T& t) const
475 { return t; }
476 };
477
478 // XXX use std::_Select1st?
479 template<typename Pair>
480 struct extract1st
481 {
482 const typename Pair::first_type&
483 operator()(const Pair& p) const
484 { return p.first; }
485 };
486
487 // Default range hashing function: use division to fold a large number
488 // into the range [0, N).
489 struct mod_range_hashing
490 {
491 typedef std::size_t first_argument_type;
492 typedef std::size_t second_argument_type;
493 typedef std::size_t result_type;
494
495 result_type
496 operator()(first_argument_type r, second_argument_type N) const
497 { return r % N; }
498 };
499
500 // Default ranged hash function H. In principle it should be a
501 // function object composed from objects of type H1 and H2 such that
502 // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
503 // h1 and h2. So instead we'll just use a tag to tell class template
504 // hashtable to do that composition.
505 struct default_ranged_hash { };
506
507 // Default value for rehash policy. Bucket size is (usually) the
508 // smallest prime that keeps the load factor small enough.
509 struct prime_rehash_policy
510 {
511 prime_rehash_policy(float z = 1.0);
512
513 float
514 max_load_factor() const;
515
516 // Return a bucket size no smaller than n.
517 std::size_t
518 next_bkt(std::size_t n) const;
519
520 // Return a bucket count appropriate for n elements
521 std::size_t
522 bkt_for_elements(std::size_t n) const;
523
524 // n_bkt is current bucket count, n_elt is current element count,
525 // and n_ins is number of elements to be inserted. Do we need to
526 // increase bucket count? If so, return make_pair(true, n), where n
527 // is the new bucket count. If not, return make_pair(false, 0).
528 std::pair<bool, std::size_t>
529 need_rehash(std::size_t n_bkt, std::size_t n_elt, std::size_t n_ins) const;
530
531 float m_max_load_factor;
532 float m_growth_factor;
533 mutable std::size_t m_next_resize;
534 };
535
536 inline
537 prime_rehash_policy::
538 prime_rehash_policy(float z)
539 : m_max_load_factor(z), m_growth_factor(2.f), m_next_resize(0)
540 { }
541
542 inline float
543 prime_rehash_policy::
544 max_load_factor() const
545 { return m_max_load_factor; }
546
547 // Return a prime no smaller than n.
548 inline std::size_t
549 prime_rehash_policy::
550 next_bkt(std::size_t n) const
551 {
552 const unsigned long* const last = X<>::primes + X<>::n_primes;
553 const unsigned long* p = std::lower_bound(X<>::primes, last, n);
554 m_next_resize = static_cast<std::size_t>(std::ceil(*p * m_max_load_factor));
555 return *p;
556 }
557
558 // Return the smallest prime p such that alpha p >= n, where alpha
559 // is the load factor.
560 inline std::size_t
561 prime_rehash_policy::
562 bkt_for_elements(std::size_t n) const
563 {
564 const unsigned long* const last = X<>::primes + X<>::n_primes;
565 const float min_bkts = n / m_max_load_factor;
566 const unsigned long* p = std::lower_bound(X<>::primes, last,
567 min_bkts, lt());
568 m_next_resize = static_cast<std::size_t>(std::ceil(*p * m_max_load_factor));
569 return *p;
570 }
571
572 // Finds the smallest prime p such that alpha p > n_elt + n_ins.
573 // If p > n_bkt, return make_pair(true, p); otherwise return
574 // make_pair(false, 0). In principle this isn't very different from
575 // bkt_for_elements.
576
577 // The only tricky part is that we're caching the element count at
578 // which we need to rehash, so we don't have to do a floating-point
579 // multiply for every insertion.
580
581 inline std::pair<bool, std::size_t>
582 prime_rehash_policy::
583 need_rehash(std::size_t n_bkt, std::size_t n_elt, std::size_t n_ins) const
584 {
585 if (n_elt + n_ins > m_next_resize)
586 {
587 float min_bkts = (float(n_ins) + float(n_elt)) / m_max_load_factor;
588 if (min_bkts > n_bkt)
589 {
590 min_bkts = std::max(min_bkts, m_growth_factor * n_bkt);
591 const unsigned long* const last = X<>::primes + X<>::n_primes;
592 const unsigned long* p = std::lower_bound(X<>::primes, last,
593 min_bkts, lt());
594 m_next_resize =
595 static_cast<std::size_t>(std::ceil(*p * m_max_load_factor));
596 return std::make_pair(true, *p);
597 }
598 else
599 {
600 m_next_resize =
601 static_cast<std::size_t>(std::ceil(n_bkt * m_max_load_factor));
602 return std::make_pair(false, 0);
603 }
604 }
605 else
606 return std::make_pair(false, 0);
607 }
608
609 // Base classes for std::tr1::hashtable. We define these base
610 // classes because in some cases we want to do different things
611 // depending on the value of a policy class. In some cases the
612 // policy class affects which member functions and nested typedefs
613 // are defined; we handle that by specializing base class templates.
614 // Several of the base class templates need to access other members
615 // of class template hashtable, so we use the "curiously recurring
616 // template pattern" for them.
617
618 // class template map_base. If the hashtable has a value type of the
619 // form pair<T1, T2> and a key extraction policy that returns the
620 // first part of the pair, the hashtable gets a mapped_type typedef.
621 // If it satisfies those criteria and also has unique keys, then it
622 // also gets an operator[].
623 template<typename K, typename V, typename Ex, bool unique, typename Hashtable>
624 struct map_base { };
625
626 template<typename K, typename Pair, typename Hashtable>
627 struct map_base<K, Pair, extract1st<Pair>, false, Hashtable>
628 {
629 typedef typename Pair::second_type mapped_type;
630 };
631
632 template<typename K, typename Pair, typename Hashtable>
633 struct map_base<K, Pair, extract1st<Pair>, true, Hashtable>
634 {
635 typedef typename Pair::second_type mapped_type;
636
637 mapped_type&
638 operator[](const K& k);
639 };
640
641 template<typename K, typename Pair, typename Hashtable>
642 typename map_base<K, Pair, extract1st<Pair>, true, Hashtable>::mapped_type&
643 map_base<K, Pair, extract1st<Pair>, true, Hashtable>::
644 operator[](const K& k)
645 {
646 Hashtable* h = static_cast<Hashtable*>(this);
647 typename Hashtable::hash_code_t code = h->m_hash_code(k);
648 std::size_t n = h->bucket_index(k, code, h->bucket_count());
649
650 typename Hashtable::node* p = h->m_find_node(h->m_buckets[n], k, code);
651 if (!p)
652 return h->m_insert_bucket(std::make_pair(k, mapped_type()),
653 n, code)->second;
654 return (p->m_v).second;
655 }
656
657 // class template rehash_base. Give hashtable the max_load_factor
658 // functions iff the rehash policy is prime_rehash_policy.
659 template<typename RehashPolicy, typename Hashtable>
660 struct rehash_base { };
661
662 template<typename Hashtable>
663 struct rehash_base<prime_rehash_policy, Hashtable>
664 {
665 float
666 max_load_factor() const
667 {
668 const Hashtable* This = static_cast<const Hashtable*>(this);
669 return This->rehash_policy().max_load_factor();
670 }
671
672 void
673 max_load_factor(float z)
674 {
675 Hashtable* This = static_cast<Hashtable*>(this);
676 This->rehash_policy(prime_rehash_policy(z));
677 }
678 };
679
680 // Class template hash_code_base. Encapsulates two policy issues that
681 // aren't quite orthogonal.
682 // (1) the difference between using a ranged hash function and using
683 // the combination of a hash function and a range-hashing function.
684 // In the former case we don't have such things as hash codes, so
685 // we have a dummy type as placeholder.
686 // (2) Whether or not we cache hash codes. Caching hash codes is
687 // meaningless if we have a ranged hash function.
688 // We also put the key extraction and equality comparison function
689 // objects here, for convenience.
690
691 // Primary template: unused except as a hook for specializations.
692 template<typename Key, typename Value,
693 typename ExtractKey, typename Equal,
694 typename H1, typename H2, typename H,
695 bool cache_hash_code>
696 struct hash_code_base;
697
698 // Specialization: ranged hash function, no caching hash codes. H1
699 // and H2 are provided but ignored. We define a dummy hash code type.
700 template<typename Key, typename Value,
701 typename ExtractKey, typename Equal,
702 typename H1, typename H2, typename H>
703 struct hash_code_base<Key, Value, ExtractKey, Equal, H1, H2, H, false>
704 {
705 protected:
706 hash_code_base(const ExtractKey& ex, const Equal& eq,
707 const H1&, const H2&, const H& h)
708 : m_extract(ex), m_eq(eq), m_ranged_hash(h) { }
709
710 typedef void* hash_code_t;
711
712 hash_code_t
713 m_hash_code(const Key& k) const
714 { return 0; }
715
716 std::size_t
717 bucket_index(const Key& k, hash_code_t, std::size_t N) const
718 { return m_ranged_hash(k, N); }
719
720 std::size_t
721 bucket_index(const hash_node<Value, false>* p, std::size_t N) const
722 { return m_ranged_hash(m_extract(p->m_v), N); }
723
724 bool
725 compare(const Key& k, hash_code_t, hash_node<Value, false>* n) const
726 { return m_eq(k, m_extract(n->m_v)); }
727
728 void
729 store_code(hash_node<Value, false>*, hash_code_t) const
730 { }
731
732 void
733 copy_code(hash_node<Value, false>*, const hash_node<Value, false>*) const
734 { }
735
736 void
737 m_swap(hash_code_base& x)
738 {
739 std::swap(m_extract, x.m_extract);
740 std::swap(m_eq, x.m_eq);
741 std::swap(m_ranged_hash, x.m_ranged_hash);
742 }
743
744 protected:
745 ExtractKey m_extract;
746 Equal m_eq;
747 H m_ranged_hash;
748 };
749
750
751 // No specialization for ranged hash function while caching hash codes.
752 // That combination is meaningless, and trying to do it is an error.
753
754
755 // Specialization: ranged hash function, cache hash codes. This
756 // combination is meaningless, so we provide only a declaration
757 // and no definition.
758 template<typename Key, typename Value,
759 typename ExtractKey, typename Equal,
760 typename H1, typename H2, typename H>
761 struct hash_code_base<Key, Value, ExtractKey, Equal, H1, H2, H, true>;
762
763
764 // Specialization: hash function and range-hashing function, no
765 // caching of hash codes. H is provided but ignored. Provides
766 // typedef and accessor required by TR1.
767 template<typename Key, typename Value,
768 typename ExtractKey, typename Equal,
769 typename H1, typename H2>
770 struct hash_code_base<Key, Value, ExtractKey, Equal, H1, H2,
771 default_ranged_hash, false>
772 {
773 typedef H1 hasher;
774
775 hasher
776 hash_function() const
777 { return m_h1; }
778
779 protected:
780 hash_code_base(const ExtractKey& ex, const Equal& eq,
781 const H1& h1, const H2& h2, const default_ranged_hash&)
782 : m_extract(ex), m_eq(eq), m_h1(h1), m_h2(h2) { }
783
784 typedef std::size_t hash_code_t;
785
786 hash_code_t
787 m_hash_code(const Key& k) const
788 { return m_h1(k); }
789
790 std::size_t
791 bucket_index(const Key&, hash_code_t c, std::size_t N) const
792 { return m_h2(c, N); }
793
794 std::size_t
795 bucket_index(const hash_node<Value, false>* p, std::size_t N) const
796 { return m_h2(m_h1(m_extract(p->m_v)), N); }
797
798 bool
799 compare(const Key& k, hash_code_t, hash_node<Value, false>* n) const
800 { return m_eq(k, m_extract(n->m_v)); }
801
802 void
803 store_code(hash_node<Value, false>*, hash_code_t) const
804 { }
805
806 void
807 copy_code(hash_node<Value, false>*, const hash_node<Value, false>*) const
808 { }
809
810 void
811 m_swap(hash_code_base& x)
812 {
813 std::swap(m_extract, x.m_extract);
814 std::swap(m_eq, x.m_eq);
815 std::swap(m_h1, x.m_h1);
816 std::swap(m_h2, x.m_h2);
817 }
818
819 protected:
820 ExtractKey m_extract;
821 Equal m_eq;
822 H1 m_h1;
823 H2 m_h2;
824 };
825
826 // Specialization: hash function and range-hashing function,
827 // caching hash codes. H is provided but ignored. Provides
828 // typedef and accessor required by TR1.
829 template<typename Key, typename Value,
830 typename ExtractKey, typename Equal,
831 typename H1, typename H2>
832 struct hash_code_base<Key, Value, ExtractKey, Equal, H1, H2,
833 default_ranged_hash, true>
834 {
835 typedef H1 hasher;
836
837 hasher
838 hash_function() const
839 { return m_h1; }
840
841 protected:
842 hash_code_base(const ExtractKey& ex, const Equal& eq,
843 const H1& h1, const H2& h2, const default_ranged_hash&)
844 : m_extract(ex), m_eq(eq), m_h1(h1), m_h2(h2) { }
845
846 typedef std::size_t hash_code_t;
847
848 hash_code_t
849 m_hash_code(const Key& k) const
850 { return m_h1(k); }
851
852 std::size_t
853 bucket_index(const Key&, hash_code_t c, std::size_t N) const
854 { return m_h2(c, N); }
855
856 std::size_t
857 bucket_index(const hash_node<Value, true>* p, std::size_t N) const
858 { return m_h2(p->hash_code, N); }
859
860 bool
861 compare(const Key& k, hash_code_t c, hash_node<Value, true>* n) const
862 { return c == n->hash_code && m_eq(k, m_extract(n->m_v)); }
863
864 void
865 store_code(hash_node<Value, true>* n, hash_code_t c) const
866 { n->hash_code = c; }
867
868 void
869 copy_code(hash_node<Value, true>* to,
870 const hash_node<Value, true>* from) const
871 { to->hash_code = from->hash_code; }
872
873 void
874 m_swap(hash_code_base& x)
875 {
876 std::swap(m_extract, x.m_extract);
877 std::swap(m_eq, x.m_eq);
878 std::swap(m_h1, x.m_h1);
879 std::swap(m_h2, x.m_h2);
880 }
881
882 protected:
883 ExtractKey m_extract;
884 Equal m_eq;
885 H1 m_h1;
886 H2 m_h2;
887 };
888} // namespace detail
889_GLIBCXX_END_NAMESPACE
890} // namespace std::tr1
891
892#endif // _TR1_HASHTABLE_POLICY_H
893