1 // Internal policy header for TR1 unordered_set and unordered_map -*- C++ -*-
2 
3 // Copyright (C) 2005, 2006 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library.  This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 2, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 // GNU General Public License for more details.
15 
16 // You should have received a copy of the GNU General Public License along
17 // with this library; see the file COPYING.  If not, write to the Free
18 // Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
19 // USA.
20 
21 // As a special exception, you may use this file as part of a free software
22 // library without restriction.  Specifically, if other files instantiate
23 // templates or use macros or inline functions from this file, or you compile
24 // this file and link it with other files to produce an executable, this
25 // file does not by itself cause the resulting executable to be covered by
26 // the GNU General Public License.  This exception does not however
27 // invalidate any other reasons why the executable file might be covered by
28 // the GNU General Public License.
29 
30 /** @file tr1/hashtable_policy.h
31  *  This is a TR1 C++ Library header.
32  */
33 
34 #ifndef _TR1_HASHTABLE_POLICY_H
35 #define _TR1_HASHTABLE_POLICY_H 1
36 
37 #include <functional> // _Identity, _Select1st
38 #include <tr1/utility>
39 #include <ext/type_traits.h>
40 
41 namespace std
42 {
43 _GLIBCXX_BEGIN_NAMESPACE(tr1)
44 namespace __detail
45 {
46   // Helper function: return distance(first, last) for forward
47   // iterators, or 0 for input iterators.
48   template<class _Iterator>
49     inline typename std::iterator_traits<_Iterator>::difference_type
50     __distance_fw(_Iterator __first, _Iterator __last,
51 		  std::input_iterator_tag)
52     { return 0; }
53 
54   template<class _Iterator>
55     inline typename std::iterator_traits<_Iterator>::difference_type
56     __distance_fw(_Iterator __first, _Iterator __last,
57 		  std::forward_iterator_tag)
58     { return std::distance(__first, __last); }
59 
60   template<class _Iterator>
61     inline typename std::iterator_traits<_Iterator>::difference_type
62     __distance_fw(_Iterator __first, _Iterator __last)
63     {
64       typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag;
65       return __distance_fw(__first, __last, _Tag());
66     }
67 
68   // XXX This is a hack.  _Prime_rehash_policy's member functions, and
69   // certainly the list of primes, should be defined in a .cc file.
70   // We're temporarily putting them in a header because we don't have a
71   // place to put TR1 .cc files yet.  There's no good reason for any of
72   // _Prime_rehash_policy's member functions to be inline, and there's
73   // certainly no good reason for _Primes<> to exist at all.
74   struct _LessThan
75   {
76     template<typename _Tp, typename _Up>
77       bool
78       operator()(_Tp __x, _Up __y)
79       { return __x < __y; }
80   };
81 
82   template<int __ulongsize = sizeof(unsigned long)>
83     struct _Primes
84     {
85       static const int __n_primes = __ulongsize != 8 ? 256 : 256 + 48;
86       static const unsigned long __primes[256 + 48 + 1];
87     };
88 
89   template<int __ulongsize>
90     const int _Primes<__ulongsize>::__n_primes;
91 
92   template<int __ulongsize>
93     const unsigned long _Primes<__ulongsize>::__primes[256 + 48 + 1] =
94     {
95       2ul, 3ul, 5ul, 7ul, 11ul, 13ul, 17ul, 19ul, 23ul, 29ul, 31ul,
96       37ul, 41ul, 43ul, 47ul, 53ul, 59ul, 61ul, 67ul, 71ul, 73ul, 79ul,
97       83ul, 89ul, 97ul, 103ul, 109ul, 113ul, 127ul, 137ul, 139ul, 149ul,
98       157ul, 167ul, 179ul, 193ul, 199ul, 211ul, 227ul, 241ul, 257ul,
99       277ul, 293ul, 313ul, 337ul, 359ul, 383ul, 409ul, 439ul, 467ul,
100       503ul, 541ul, 577ul, 619ul, 661ul, 709ul, 761ul, 823ul, 887ul,
101       953ul, 1031ul, 1109ul, 1193ul, 1289ul, 1381ul, 1493ul, 1613ul,
102       1741ul, 1879ul, 2029ul, 2179ul, 2357ul, 2549ul, 2753ul, 2971ul,
103       3209ul, 3469ul, 3739ul, 4027ul, 4349ul, 4703ul, 5087ul, 5503ul,
104       5953ul, 6427ul, 6949ul, 7517ul, 8123ul, 8783ul, 9497ul, 10273ul,
105       11113ul, 12011ul, 12983ul, 14033ul, 15173ul, 16411ul, 17749ul,
106       19183ul, 20753ul, 22447ul, 24281ul, 26267ul, 28411ul, 30727ul,
107       33223ul, 35933ul, 38873ul, 42043ul, 45481ul, 49201ul, 53201ul,
108       57557ul, 62233ul, 67307ul, 72817ul, 78779ul, 85229ul, 92203ul,
109       99733ul, 107897ul, 116731ul, 126271ul, 136607ul, 147793ul,
110       159871ul, 172933ul, 187091ul, 202409ul, 218971ul, 236897ul,
111       256279ul, 277261ul, 299951ul, 324503ul, 351061ul, 379787ul,
112       410857ul, 444487ul, 480881ul, 520241ul, 562841ul, 608903ul,
113       658753ul, 712697ul, 771049ul, 834181ul, 902483ul, 976369ul,
114       1056323ul, 1142821ul, 1236397ul, 1337629ul, 1447153ul, 1565659ul,
115       1693859ul, 1832561ul, 1982627ul, 2144977ul, 2320627ul, 2510653ul,
116       2716249ul, 2938679ul, 3179303ul, 3439651ul, 3721303ul, 4026031ul,
117       4355707ul, 4712381ul, 5098259ul, 5515729ul, 5967347ul, 6456007ul,
118       6984629ul, 7556579ul, 8175383ul, 8844859ul, 9569143ul, 10352717ul,
119       11200489ul, 12117689ul, 13109983ul, 14183539ul, 15345007ul,
120       16601593ul, 17961079ul, 19431899ul, 21023161ul, 22744717ul,
121       24607243ul, 26622317ul, 28802401ul, 31160981ul, 33712729ul,
122       36473443ul, 39460231ul, 42691603ul, 46187573ul, 49969847ul,
123       54061849ul, 58488943ul, 63278561ul, 68460391ul, 74066549ul,
124       80131819ul, 86693767ul, 93793069ul, 101473717ul, 109783337ul,
125       118773397ul, 128499677ul, 139022417ul, 150406843ul, 162723577ul,
126       176048909ul, 190465427ul, 206062531ul, 222936881ul, 241193053ul,
127       260944219ul, 282312799ul, 305431229ul, 330442829ul, 357502601ul,
128       386778277ul, 418451333ul, 452718089ul, 489790921ul, 529899637ul,
129       573292817ul, 620239453ul, 671030513ul, 725980837ul, 785430967ul,
130       849749479ul, 919334987ul, 994618837ul, 1076067617ul, 1164186217ul,
131       1259520799ul, 1362662261ul, 1474249943ul, 1594975441ul,
132       1725587117ul, 1866894511ul, 2019773507ul, 2185171673ul,
133       2364114217ul, 2557710269ul, 2767159799ul, 2993761039ul,
134       3238918481ul, 3504151727ul, 3791104843ul, 4101556399ul,
135       4294967291ul,
136       // Sentinel, so we don't have to test the result of lower_bound,
137       // or, on 64-bit machines, rest of the table.
138       __ulongsize != 8 ? 4294967291ul : (unsigned long)6442450933ull,
139       (unsigned long)8589934583ull,
140       (unsigned long)12884901857ull, (unsigned long)17179869143ull,
141       (unsigned long)25769803693ull, (unsigned long)34359738337ull,
142       (unsigned long)51539607367ull, (unsigned long)68719476731ull,
143       (unsigned long)103079215087ull, (unsigned long)137438953447ull,
144       (unsigned long)206158430123ull, (unsigned long)274877906899ull,
145       (unsigned long)412316860387ull, (unsigned long)549755813881ull,
146       (unsigned long)824633720731ull, (unsigned long)1099511627689ull,
147       (unsigned long)1649267441579ull, (unsigned long)2199023255531ull,
148       (unsigned long)3298534883309ull, (unsigned long)4398046511093ull,
149       (unsigned long)6597069766607ull, (unsigned long)8796093022151ull,
150       (unsigned long)13194139533241ull, (unsigned long)17592186044399ull,
151       (unsigned long)26388279066581ull, (unsigned long)35184372088777ull,
152       (unsigned long)52776558133177ull, (unsigned long)70368744177643ull,
153       (unsigned long)105553116266399ull, (unsigned long)140737488355213ull,
154       (unsigned long)211106232532861ull, (unsigned long)281474976710597ull,
155       (unsigned long)562949953421231ull, (unsigned long)1125899906842597ull,
156       (unsigned long)2251799813685119ull, (unsigned long)4503599627370449ull,
157       (unsigned long)9007199254740881ull, (unsigned long)18014398509481951ull,
158       (unsigned long)36028797018963913ull, (unsigned long)72057594037927931ull,
159       (unsigned long)144115188075855859ull,
160       (unsigned long)288230376151711717ull,
161       (unsigned long)576460752303423433ull,
162       (unsigned long)1152921504606846883ull,
163       (unsigned long)2305843009213693951ull,
164       (unsigned long)4611686018427387847ull,
165       (unsigned long)9223372036854775783ull,
166       (unsigned long)18446744073709551557ull,
167       (unsigned long)18446744073709551557ull
168     };
169 
170   // Auxiliary types used for all instantiations of _Hashtable: nodes
171   // and iterators.
172 
173   // Nodes, used to wrap elements stored in the hash table.  A policy
174   // template parameter of class template _Hashtable controls whether
175   // nodes also store a hash code. In some cases (e.g. strings) this
176   // may be a performance win.
177   template<typename _Value, bool __cache_hash_code>
178     struct _Hash_node;
179 
180   template<typename _Value>
181     struct _Hash_node<_Value, true>
182     {
183       _Value       _M_v;
184       std::size_t  _M_hash_code;
185       _Hash_node*  _M_next;
186     };
187 
188   template<typename _Value>
189     struct _Hash_node<_Value, false>
190     {
191       _Value       _M_v;
192       _Hash_node*  _M_next;
193     };
194 
195   // Local iterators, used to iterate within a bucket but not between
196   // buckets.
197   template<typename _Value, bool __cache>
198     struct _Node_iterator_base
199     {
200       _Node_iterator_base(_Hash_node<_Value, __cache>* __p)
201       : _M_cur(__p) { }
202 
203       void
204       _M_incr()
205       { _M_cur = _M_cur->_M_next; }
206 
207       _Hash_node<_Value, __cache>*  _M_cur;
208     };
209 
210   template<typename _Value, bool __cache>
211     inline bool
212     operator==(const _Node_iterator_base<_Value, __cache>& __x,
213 	       const _Node_iterator_base<_Value, __cache>& __y)
214     { return __x._M_cur == __y._M_cur; }
215 
216   template<typename _Value, bool __cache>
217     inline bool
218     operator!=(const _Node_iterator_base<_Value, __cache>& __x,
219 	       const _Node_iterator_base<_Value, __cache>& __y)
220     { return __x._M_cur != __y._M_cur; }
221 
222   template<typename _Value, bool __constant_iterators, bool __cache>
223     struct _Node_iterator
224     : public _Node_iterator_base<_Value, __cache>
225     {
226       typedef _Value                                   value_type;
227       typedef typename
228       __gnu_cxx::__conditional_type<__constant_iterators,
229 				    const _Value*, _Value*>::__type
230                                                        pointer;
231       typedef typename
232       __gnu_cxx::__conditional_type<__constant_iterators,
233 				    const _Value&, _Value&>::__type
234                                                        reference;
235       typedef std::ptrdiff_t                           difference_type;
236       typedef std::forward_iterator_tag                iterator_category;
237 
238       _Node_iterator()
239       : _Node_iterator_base<_Value, __cache>(0) { }
240 
241       explicit
242       _Node_iterator(_Hash_node<_Value, __cache>* __p)
243       : _Node_iterator_base<_Value, __cache>(__p) { }
244 
245       reference
246       operator*() const
247       { return this->_M_cur->_M_v; }
248 
249       pointer
250       operator->() const
251       { return &this->_M_cur->_M_v; }
252 
253       _Node_iterator&
254       operator++()
255       {
256 	this->_M_incr();
257 	return *this;
258       }
259 
260       _Node_iterator
261       operator++(int)
262       {
263 	_Node_iterator __tmp(*this);
264 	this->_M_incr();
265 	return __tmp;
266       }
267     };
268 
269   template<typename _Value, bool __constant_iterators, bool __cache>
270     struct _Node_const_iterator
271     : public _Node_iterator_base<_Value, __cache>
272     {
273       typedef _Value                                   value_type;
274       typedef const _Value*                            pointer;
275       typedef const _Value&                            reference;
276       typedef std::ptrdiff_t                           difference_type;
277       typedef std::forward_iterator_tag                iterator_category;
278 
279       _Node_const_iterator()
280       : _Node_iterator_base<_Value, __cache>(0) { }
281 
282       explicit
283       _Node_const_iterator(_Hash_node<_Value, __cache>* __p)
284       : _Node_iterator_base<_Value, __cache>(__p) { }
285 
286       _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
287 			   __cache>& __x)
288       : _Node_iterator_base<_Value, __cache>(__x._M_cur) { }
289 
290       reference
291       operator*() const
292       { return this->_M_cur->_M_v; }
293 
294       pointer
295       operator->() const
296       { return &this->_M_cur->_M_v; }
297 
298       _Node_const_iterator&
299       operator++()
300       {
301 	this->_M_incr();
302 	return *this;
303       }
304 
305       _Node_const_iterator
306       operator++(int)
307       {
308 	_Node_const_iterator __tmp(*this);
309 	this->_M_incr();
310 	return __tmp;
311       }
312     };
313 
314   template<typename _Value, bool __cache>
315     struct _Hashtable_iterator_base
316     {
317       _Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node,
318 			       _Hash_node<_Value, __cache>** __bucket)
319       : _M_cur_node(__node), _M_cur_bucket(__bucket) { }
320 
321       void
322       _M_incr()
323       {
324 	_M_cur_node = _M_cur_node->_M_next;
325 	if (!_M_cur_node)
326 	  _M_incr_bucket();
327       }
328 
329       void
330       _M_incr_bucket();
331 
332       _Hash_node<_Value, __cache>*   _M_cur_node;
333       _Hash_node<_Value, __cache>**  _M_cur_bucket;
334     };
335 
336   // Global iterators, used for arbitrary iteration within a hash
337   // table.  Larger and more expensive than local iterators.
338   template<typename _Value, bool __cache>
339     void
340     _Hashtable_iterator_base<_Value, __cache>::
341     _M_incr_bucket()
342     {
343       ++_M_cur_bucket;
344 
345       // This loop requires the bucket array to have a non-null sentinel.
346       while (!*_M_cur_bucket)
347 	++_M_cur_bucket;
348       _M_cur_node = *_M_cur_bucket;
349     }
350 
351   template<typename _Value, bool __cache>
352     inline bool
353     operator==(const _Hashtable_iterator_base<_Value, __cache>& __x,
354 	       const _Hashtable_iterator_base<_Value, __cache>& __y)
355     { return __x._M_cur_node == __y._M_cur_node; }
356 
357   template<typename _Value, bool __cache>
358     inline bool
359     operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x,
360 	       const _Hashtable_iterator_base<_Value, __cache>& __y)
361     { return __x._M_cur_node != __y._M_cur_node; }
362 
363   template<typename _Value, bool __constant_iterators, bool __cache>
364     struct _Hashtable_iterator
365     : public _Hashtable_iterator_base<_Value, __cache>
366     {
367       typedef _Value                                   value_type;
368       typedef typename
369       __gnu_cxx::__conditional_type<__constant_iterators,
370 				    const _Value*, _Value*>::__type
371                                                        pointer;
372       typedef typename
373       __gnu_cxx::__conditional_type<__constant_iterators,
374 				    const _Value&, _Value&>::__type
375                                                        reference;
376       typedef std::ptrdiff_t                           difference_type;
377       typedef std::forward_iterator_tag                iterator_category;
378 
379       _Hashtable_iterator()
380       : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
381 
382       _Hashtable_iterator(_Hash_node<_Value, __cache>* __p,
383 			  _Hash_node<_Value, __cache>** __b)
384       : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
385 
386       explicit
387       _Hashtable_iterator(_Hash_node<_Value, __cache>** __b)
388       : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
389 
390       reference
391       operator*() const
392       { return this->_M_cur_node->_M_v; }
393 
394       pointer
395       operator->() const
396       { return &this->_M_cur_node->_M_v; }
397 
398       _Hashtable_iterator&
399       operator++()
400       {
401 	this->_M_incr();
402 	return *this;
403       }
404 
405       _Hashtable_iterator
406       operator++(int)
407       {
408 	_Hashtable_iterator __tmp(*this);
409 	this->_M_incr();
410 	return __tmp;
411       }
412     };
413 
414   template<typename _Value, bool __constant_iterators, bool __cache>
415     struct _Hashtable_const_iterator
416     : public _Hashtable_iterator_base<_Value, __cache>
417     {
418       typedef _Value                                   value_type;
419       typedef const _Value*                            pointer;
420       typedef const _Value&                            reference;
421       typedef std::ptrdiff_t                           difference_type;
422       typedef std::forward_iterator_tag                iterator_category;
423 
424       _Hashtable_const_iterator()
425       : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
426 
427       _Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p,
428 				_Hash_node<_Value, __cache>** __b)
429       : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
430 
431       explicit
432       _Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b)
433       : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
434 
435       _Hashtable_const_iterator(const _Hashtable_iterator<_Value,
436 				__constant_iterators, __cache>& __x)
437       : _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node,
438 						  __x._M_cur_bucket) { }
439 
440       reference
441       operator*() const
442       { return this->_M_cur_node->_M_v; }
443 
444       pointer
445       operator->() const
446       { return &this->_M_cur_node->_M_v; }
447 
448       _Hashtable_const_iterator&
449       operator++()
450       {
451 	this->_M_incr();
452 	return *this;
453       }
454 
455       _Hashtable_const_iterator
456       operator++(int)
457       {
458 	_Hashtable_const_iterator __tmp(*this);
459 	this->_M_incr();
460 	return __tmp;
461       }
462     };
463 
464 
465   // Many of class template _Hashtable's template parameters are policy
466   // classes.  These are defaults for the policies.
467 
468   // Default range hashing function: use division to fold a large number
469   // into the range [0, N).
470   struct _Mod_range_hashing
471   {
472     typedef std::size_t first_argument_type;
473     typedef std::size_t second_argument_type;
474     typedef std::size_t result_type;
475 
476     result_type
477     operator()(first_argument_type __num, second_argument_type __den) const
478     { return __num % __den; }
479   };
480 
481   // Default ranged hash function H.  In principle it should be a
482   // function object composed from objects of type H1 and H2 such that
483   // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
484   // h1 and h2.  So instead we'll just use a tag to tell class template
485   // hashtable to do that composition.
486   struct _Default_ranged_hash { };
487 
488   // Default value for rehash policy.  Bucket size is (usually) the
489   // smallest prime that keeps the load factor small enough.
490   struct _Prime_rehash_policy
491   {
492     _Prime_rehash_policy(float __z = 1.0);
493 
494     float
495     max_load_factor() const;
496 
497     // Return a bucket size no smaller than n.
498     std::size_t
499     _M_next_bkt(std::size_t __n) const;
500 
501     // Return a bucket count appropriate for n elements
502     std::size_t
503     _M_bkt_for_elements(std::size_t __n) const;
504 
505     // __n_bkt is current bucket count, __n_elt is current element count,
506     // and __n_ins is number of elements to be inserted.  Do we need to
507     // increase bucket count?  If so, return make_pair(true, n), where n
508     // is the new bucket count.  If not, return make_pair(false, 0).
509     std::pair<bool, std::size_t>
510     _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
511 		   std::size_t __n_ins) const;
512 
513     float                _M_max_load_factor;
514     float                _M_growth_factor;
515     mutable std::size_t  _M_next_resize;
516   };
517 
518   inline
519   _Prime_rehash_policy::
520   _Prime_rehash_policy(float __z)
521   : _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0)
522   { }
523 
524   inline float
525   _Prime_rehash_policy::
526   max_load_factor() const
527   { return _M_max_load_factor; }
528 
529   // Return a prime no smaller than n.
530   inline std::size_t
531   _Prime_rehash_policy::
532   _M_next_bkt(std::size_t __n) const
533   {
534     const unsigned long* const __last = (_Primes<>::__primes
535 					 + _Primes<>::__n_primes);
536     const unsigned long* __p = std::lower_bound(_Primes<>::__primes, __last,
537 						__n);
538     _M_next_resize = static_cast<std::size_t>(std::ceil(*__p
539 							* _M_max_load_factor));
540     return *__p;
541   }
542 
543   // Return the smallest prime p such that alpha p >= n, where alpha
544   // is the load factor.
545   inline std::size_t
546   _Prime_rehash_policy::
547   _M_bkt_for_elements(std::size_t __n) const
548   {
549     const unsigned long* const __last = (_Primes<>::__primes
550 					 + _Primes<>::__n_primes);
551     const float __min_bkts = __n / _M_max_load_factor;
552     const unsigned long* __p = std::lower_bound(_Primes<>::__primes, __last,
553 						__min_bkts, _LessThan());
554     _M_next_resize = static_cast<std::size_t>(std::ceil(*__p
555 							* _M_max_load_factor));
556     return *__p;
557   }
558 
559   // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
560   // If p > __n_bkt, return make_pair(true, p); otherwise return
561   // make_pair(false, 0).  In principle this isn't very different from
562   // _M_bkt_for_elements.
563 
564   // The only tricky part is that we're caching the element count at
565   // which we need to rehash, so we don't have to do a floating-point
566   // multiply for every insertion.
567 
568   inline std::pair<bool, std::size_t>
569   _Prime_rehash_policy::
570   _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
571 		 std::size_t __n_ins) const
572   {
573     if (__n_elt + __n_ins > _M_next_resize)
574       {
575 	float __min_bkts = ((float(__n_ins) + float(__n_elt))
576 			    / _M_max_load_factor);
577 	if (__min_bkts > __n_bkt)
578 	  {
579 	    __min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt);
580 	    const unsigned long* const __last = (_Primes<>::__primes
581 						 + _Primes<>::__n_primes);
582 	    const unsigned long* __p = std::lower_bound(_Primes<>::__primes,
583 							__last, __min_bkts,
584 							_LessThan());
585 	    _M_next_resize =
586 	      static_cast<std::size_t>(std::ceil(*__p * _M_max_load_factor));
587 	    return std::make_pair(true, *__p);
588 	  }
589 	else
590 	  {
591 	    _M_next_resize =
592 	      static_cast<std::size_t>(std::ceil(__n_bkt
593 						 * _M_max_load_factor));
594 	    return std::make_pair(false, 0);
595 	  }
596       }
597     else
598       return std::make_pair(false, 0);
599   }
600 
601   // Base classes for std::tr1::_Hashtable.  We define these base
602   // classes because in some cases we want to do different things
603   // depending on the value of a policy class.  In some cases the
604   // policy class affects which member functions and nested typedefs
605   // are defined; we handle that by specializing base class templates.
606   // Several of the base class templates need to access other members
607   // of class template _Hashtable, so we use the "curiously recurring
608   // template pattern" for them.
609 
610   // class template _Map_base.  If the hashtable has a value type of the
611   // form pair<T1, T2> and a key extraction policy that returns the
612   // first part of the pair, the hashtable gets a mapped_type typedef.
613   // If it satisfies those criteria and also has unique keys, then it
614   // also gets an operator[].
615   template<typename _Key, typename _Value, typename _Ex, bool __unique,
616 	   typename _Hashtable>
617     struct _Map_base { };
618 
619   template<typename _Key, typename _Pair, typename _Hashtable>
620     struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable>
621     {
622       typedef typename _Pair::second_type mapped_type;
623     };
624 
625   template<typename _Key, typename _Pair, typename _Hashtable>
626   struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>
627     {
628       typedef typename _Pair::second_type mapped_type;
629 
630       mapped_type&
631       operator[](const _Key& __k);
632     };
633 
634   template<typename _Key, typename _Pair, typename _Hashtable>
635     typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
636 		       true, _Hashtable>::mapped_type&
637     _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
638     operator[](const _Key& __k)
639     {
640       _Hashtable* __h = static_cast<_Hashtable*>(this);
641       typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
642       std::size_t __n = __h->_M_bucket_index(__k, __code,
643 					     __h->_M_bucket_count);
644 
645       typename _Hashtable::_Node* __p =
646 	__h->_M_find_node(__h->_M_buckets[__n], __k, __code);
647       if (!__p)
648 	return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()),
649 				     __n, __code)->second;
650       return (__p->_M_v).second;
651     }
652 
653   // class template _Rehash_base.  Give hashtable the max_load_factor
654   // functions iff the rehash policy is _Prime_rehash_policy.
655   template<typename _RehashPolicy, typename _Hashtable>
656     struct _Rehash_base { };
657 
658   template<typename _Hashtable>
659     struct _Rehash_base<_Prime_rehash_policy, _Hashtable>
660     {
661       float
662       max_load_factor() const
663       {
664 	const _Hashtable* __this = static_cast<const _Hashtable*>(this);
665 	return __this->__rehash_policy().max_load_factor();
666       }
667 
668       void
669       max_load_factor(float __z)
670       {
671 	_Hashtable* __this = static_cast<_Hashtable*>(this);
672 	__this->__rehash_policy(_Prime_rehash_policy(__z));
673       }
674     };
675 
676   // Class template _Hash_code_base.  Encapsulates two policy issues that
677   // aren't quite orthogonal.
678   //   (1) the difference between using a ranged hash function and using
679   //       the combination of a hash function and a range-hashing function.
680   //       In the former case we don't have such things as hash codes, so
681   //       we have a dummy type as placeholder.
682   //   (2) Whether or not we cache hash codes.  Caching hash codes is
683   //       meaningless if we have a ranged hash function.
684   // We also put the key extraction and equality comparison function
685   // objects here, for convenience.
686 
687   // Primary template: unused except as a hook for specializations.
688   template<typename _Key, typename _Value,
689 	   typename _ExtractKey, typename _Equal,
690 	   typename _H1, typename _H2, typename _Hash,
691 	   bool __cache_hash_code>
692     struct _Hash_code_base;
693 
694   // Specialization: ranged hash function, no caching hash codes.  H1
695   // and H2 are provided but ignored.  We define a dummy hash code type.
696   template<typename _Key, typename _Value,
697 	   typename _ExtractKey, typename _Equal,
698 	   typename _H1, typename _H2, typename _Hash>
699     struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
700 			   _Hash, false>
701     {
702     protected:
703       _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
704 		      const _H1&, const _H2&, const _Hash& __h)
705       : _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { }
706 
707       typedef void* _Hash_code_type;
708 
709       _Hash_code_type
710       _M_hash_code(const _Key& __key) const
711       { return 0; }
712 
713       std::size_t
714       _M_bucket_index(const _Key& __k, _Hash_code_type,
715 		      std::size_t __n) const
716       { return _M_ranged_hash(__k, __n); }
717 
718       std::size_t
719       _M_bucket_index(const _Hash_node<_Value, false>* __p,
720 		      std::size_t __n) const
721       { return _M_ranged_hash(_M_extract(__p->_M_v), __n); }
722 
723       bool
724       _M_compare(const _Key& __k, _Hash_code_type,
725 		 _Hash_node<_Value, false>* __n) const
726       { return _M_eq(__k, _M_extract(__n->_M_v)); }
727 
728       void
729       _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
730       { }
731 
732       void
733       _M_copy_code(_Hash_node<_Value, false>*,
734 		   const _Hash_node<_Value, false>*) const
735       { }
736 
737       void
738       _M_swap(_Hash_code_base& __x)
739       {
740 	std::swap(_M_extract, __x._M_extract);
741 	std::swap(_M_eq, __x._M_eq);
742 	std::swap(_M_ranged_hash, __x._M_ranged_hash);
743       }
744 
745     protected:
746       _ExtractKey  _M_extract;
747       _Equal       _M_eq;
748       _Hash        _M_ranged_hash;
749     };
750 
751 
752   // No specialization for ranged hash function while caching hash codes.
753   // That combination is meaningless, and trying to do it is an error.
754 
755 
756   // Specialization: ranged hash function, cache hash codes.  This
757   // combination is meaningless, so we provide only a declaration
758   // and no definition.
759   template<typename _Key, typename _Value,
760 	   typename _ExtractKey, typename _Equal,
761 	   typename _H1, typename _H2, typename _Hash>
762     struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
763 			   _Hash, true>;
764 
765   // Specialization: hash function and range-hashing function, no
766   // caching of hash codes.  H is provided but ignored.  Provides
767   // typedef and accessor required by TR1.
768   template<typename _Key, typename _Value,
769 	   typename _ExtractKey, typename _Equal,
770 	   typename _H1, typename _H2>
771     struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
772 			   _Default_ranged_hash, false>
773     {
774       typedef _H1 hasher;
775 
776       hasher
777       hash_function() const
778       { return _M_h1; }
779 
780     protected:
781       _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
782 		      const _H1& __h1, const _H2& __h2,
783 		      const _Default_ranged_hash&)
784       : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
785 
786       typedef std::size_t _Hash_code_type;
787 
788       _Hash_code_type
789       _M_hash_code(const _Key& __k) const
790       { return _M_h1(__k); }
791 
792       std::size_t
793       _M_bucket_index(const _Key&, _Hash_code_type __c,
794 		      std::size_t __n) const
795       { return _M_h2(__c, __n); }
796 
797       std::size_t
798       _M_bucket_index(const _Hash_node<_Value, false>* __p,
799 		      std::size_t __n) const
800       { return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); }
801 
802       bool
803       _M_compare(const _Key& __k, _Hash_code_type,
804 		 _Hash_node<_Value, false>* __n) const
805       { return _M_eq(__k, _M_extract(__n->_M_v)); }
806 
807       void
808       _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
809       { }
810 
811       void
812       _M_copy_code(_Hash_node<_Value, false>*,
813 		   const _Hash_node<_Value, false>*) const
814       { }
815 
816       void
817       _M_swap(_Hash_code_base& __x)
818       {
819 	std::swap(_M_extract, __x._M_extract);
820 	std::swap(_M_eq, __x._M_eq);
821 	std::swap(_M_h1, __x._M_h1);
822 	std::swap(_M_h2, __x._M_h2);
823       }
824 
825     protected:
826       _ExtractKey  _M_extract;
827       _Equal       _M_eq;
828       _H1          _M_h1;
829       _H2          _M_h2;
830     };
831 
832   // Specialization: hash function and range-hashing function,
833   // caching hash codes.  H is provided but ignored.  Provides
834   // typedef and accessor required by TR1.
835   template<typename _Key, typename _Value,
836 	   typename _ExtractKey, typename _Equal,
837 	   typename _H1, typename _H2>
838     struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
839 			   _Default_ranged_hash, true>
840     {
841       typedef _H1 hasher;
842 
843       hasher
844       hash_function() const
845       { return _M_h1; }
846 
847     protected:
848       _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
849 		      const _H1& __h1, const _H2& __h2,
850 		      const _Default_ranged_hash&)
851       : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
852 
853       typedef std::size_t _Hash_code_type;
854 
855       _Hash_code_type
856       _M_hash_code(const _Key& __k) const
857       { return _M_h1(__k); }
858 
859       std::size_t
860       _M_bucket_index(const _Key&, _Hash_code_type __c,
861 		      std::size_t __n) const
862       { return _M_h2(__c, __n); }
863 
864       std::size_t
865       _M_bucket_index(const _Hash_node<_Value, true>* __p,
866 		      std::size_t __n) const
867       { return _M_h2(__p->_M_hash_code, __n); }
868 
869       bool
870       _M_compare(const _Key& __k, _Hash_code_type __c,
871 		 _Hash_node<_Value, true>* __n) const
872       { return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); }
873 
874       void
875       _M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const
876       { __n->_M_hash_code = __c; }
877 
878       void
879       _M_copy_code(_Hash_node<_Value, true>* __to,
880 		   const _Hash_node<_Value, true>* __from) const
881       { __to->_M_hash_code = __from->_M_hash_code; }
882 
883       void
884       _M_swap(_Hash_code_base& __x)
885       {
886 	std::swap(_M_extract, __x._M_extract);
887 	std::swap(_M_eq, __x._M_eq);
888 	std::swap(_M_h1, __x._M_h1);
889 	std::swap(_M_h2, __x._M_h2);
890       }
891 
892     protected:
893       _ExtractKey  _M_extract;
894       _Equal       _M_eq;
895       _H1          _M_h1;
896       _H2          _M_h2;
897     };
898 } // namespace __detail
899 _GLIBCXX_END_NAMESPACE
900 } // namespace std::tr1
901 
902 #endif // _TR1_HASHTABLE_POLICY_H
903 
904