hashtable_policy.h 61 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947
  1. // Internal policy header for unordered_set and unordered_map -*- C++ -*-
  2. // Copyright (C) 2010-2021 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file bits/hashtable_policy.h
  21. * This is an internal header file, included by other library headers.
  22. * Do not attempt to use it directly.
  23. * @headername{unordered_map,unordered_set}
  24. */
  25. #ifndef _HASHTABLE_POLICY_H
  26. #define _HASHTABLE_POLICY_H 1
  27. #include <tuple> // for std::tuple, std::forward_as_tuple
  28. #include <bits/stl_algobase.h> // for std::min, std::is_permutation.
  29. #include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
  30. namespace std _GLIBCXX_VISIBILITY(default)
  31. {
  32. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  33. template<typename _Key, typename _Value, typename _Alloc,
  34. typename _ExtractKey, typename _Equal,
  35. typename _Hash, typename _RangeHash, typename _Unused,
  36. typename _RehashPolicy, typename _Traits>
  37. class _Hashtable;
  38. namespace __detail
  39. {
  40. /**
  41. * @defgroup hashtable-detail Base and Implementation Classes
  42. * @ingroup unordered_associative_containers
  43. * @{
  44. */
  45. template<typename _Key, typename _Value, typename _ExtractKey,
  46. typename _Equal, typename _Hash, typename _RangeHash,
  47. typename _Unused, typename _Traits>
  48. struct _Hashtable_base;
  49. // Helper function: return distance(first, last) for forward
  50. // iterators, or 0/1 for input iterators.
  51. template<class _Iterator>
  52. inline typename std::iterator_traits<_Iterator>::difference_type
  53. __distance_fw(_Iterator __first, _Iterator __last,
  54. std::input_iterator_tag)
  55. { return __first != __last ? 1 : 0; }
  56. template<class _Iterator>
  57. inline typename std::iterator_traits<_Iterator>::difference_type
  58. __distance_fw(_Iterator __first, _Iterator __last,
  59. std::forward_iterator_tag)
  60. { return std::distance(__first, __last); }
  61. template<class _Iterator>
  62. inline typename std::iterator_traits<_Iterator>::difference_type
  63. __distance_fw(_Iterator __first, _Iterator __last)
  64. { return __distance_fw(__first, __last,
  65. std::__iterator_category(__first)); }
  66. struct _Identity
  67. {
  68. template<typename _Tp>
  69. _Tp&&
  70. operator()(_Tp&& __x) const noexcept
  71. { return std::forward<_Tp>(__x); }
  72. };
  73. struct _Select1st
  74. {
  75. template<typename _Tp>
  76. auto
  77. operator()(_Tp&& __x) const noexcept
  78. -> decltype(std::get<0>(std::forward<_Tp>(__x)))
  79. { return std::get<0>(std::forward<_Tp>(__x)); }
  80. };
  81. template<typename _NodeAlloc>
  82. struct _Hashtable_alloc;
  83. // Functor recycling a pool of nodes and using allocation once the pool is
  84. // empty.
  85. template<typename _NodeAlloc>
  86. struct _ReuseOrAllocNode
  87. {
  88. private:
  89. using __node_alloc_type = _NodeAlloc;
  90. using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
  91. using __node_alloc_traits =
  92. typename __hashtable_alloc::__node_alloc_traits;
  93. using __node_type = typename __hashtable_alloc::__node_type;
  94. public:
  95. _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
  96. : _M_nodes(__nodes), _M_h(__h) { }
  97. _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
  98. ~_ReuseOrAllocNode()
  99. { _M_h._M_deallocate_nodes(_M_nodes); }
  100. template<typename _Arg>
  101. __node_type*
  102. operator()(_Arg&& __arg) const
  103. {
  104. if (_M_nodes)
  105. {
  106. __node_type* __node = _M_nodes;
  107. _M_nodes = _M_nodes->_M_next();
  108. __node->_M_nxt = nullptr;
  109. auto& __a = _M_h._M_node_allocator();
  110. __node_alloc_traits::destroy(__a, __node->_M_valptr());
  111. __try
  112. {
  113. __node_alloc_traits::construct(__a, __node->_M_valptr(),
  114. std::forward<_Arg>(__arg));
  115. }
  116. __catch(...)
  117. {
  118. _M_h._M_deallocate_node_ptr(__node);
  119. __throw_exception_again;
  120. }
  121. return __node;
  122. }
  123. return _M_h._M_allocate_node(std::forward<_Arg>(__arg));
  124. }
  125. private:
  126. mutable __node_type* _M_nodes;
  127. __hashtable_alloc& _M_h;
  128. };
  129. // Functor similar to the previous one but without any pool of nodes to
  130. // recycle.
  131. template<typename _NodeAlloc>
  132. struct _AllocNode
  133. {
  134. private:
  135. using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
  136. using __node_type = typename __hashtable_alloc::__node_type;
  137. public:
  138. _AllocNode(__hashtable_alloc& __h)
  139. : _M_h(__h) { }
  140. template<typename _Arg>
  141. __node_type*
  142. operator()(_Arg&& __arg) const
  143. { return _M_h._M_allocate_node(std::forward<_Arg>(__arg)); }
  144. private:
  145. __hashtable_alloc& _M_h;
  146. };
  147. // Auxiliary types used for all instantiations of _Hashtable nodes
  148. // and iterators.
  149. /**
  150. * struct _Hashtable_traits
  151. *
  152. * Important traits for hash tables.
  153. *
  154. * @tparam _Cache_hash_code Boolean value. True if the value of
  155. * the hash function is stored along with the value. This is a
  156. * time-space tradeoff. Storing it may improve lookup speed by
  157. * reducing the number of times we need to call the _Hash or _Equal
  158. * functors.
  159. *
  160. * @tparam _Constant_iterators Boolean value. True if iterator and
  161. * const_iterator are both constant iterator types. This is true
  162. * for unordered_set and unordered_multiset, false for
  163. * unordered_map and unordered_multimap.
  164. *
  165. * @tparam _Unique_keys Boolean value. True if the return value
  166. * of _Hashtable::count(k) is always at most one, false if it may
  167. * be an arbitrary number. This is true for unordered_set and
  168. * unordered_map, false for unordered_multiset and
  169. * unordered_multimap.
  170. */
  171. template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
  172. struct _Hashtable_traits
  173. {
  174. using __hash_cached = __bool_constant<_Cache_hash_code>;
  175. using __constant_iterators = __bool_constant<_Constant_iterators>;
  176. using __unique_keys = __bool_constant<_Unique_keys>;
  177. };
  178. /**
  179. * struct _Hash_node_base
  180. *
  181. * Nodes, used to wrap elements stored in the hash table. A policy
  182. * template parameter of class template _Hashtable controls whether
  183. * nodes also store a hash code. In some cases (e.g. strings) this
  184. * may be a performance win.
  185. */
  186. struct _Hash_node_base
  187. {
  188. _Hash_node_base* _M_nxt;
  189. _Hash_node_base() noexcept : _M_nxt() { }
  190. _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
  191. };
  192. /**
  193. * struct _Hash_node_value_base
  194. *
  195. * Node type with the value to store.
  196. */
  197. template<typename _Value>
  198. struct _Hash_node_value_base
  199. {
  200. typedef _Value value_type;
  201. __gnu_cxx::__aligned_buffer<_Value> _M_storage;
  202. _Value*
  203. _M_valptr() noexcept
  204. { return _M_storage._M_ptr(); }
  205. const _Value*
  206. _M_valptr() const noexcept
  207. { return _M_storage._M_ptr(); }
  208. _Value&
  209. _M_v() noexcept
  210. { return *_M_valptr(); }
  211. const _Value&
  212. _M_v() const noexcept
  213. { return *_M_valptr(); }
  214. };
  215. /**
  216. * Primary template struct _Hash_node_code_cache.
  217. */
  218. template<bool _Cache_hash_code>
  219. struct _Hash_node_code_cache
  220. { };
  221. /**
  222. * Specialization for node with cache, struct _Hash_node_code_cache.
  223. */
  224. template<>
  225. struct _Hash_node_code_cache<true>
  226. { std::size_t _M_hash_code; };
  227. template<typename _Value, bool _Cache_hash_code>
  228. struct _Hash_node_value
  229. : _Hash_node_value_base<_Value>
  230. , _Hash_node_code_cache<_Cache_hash_code>
  231. { };
  232. /**
  233. * Primary template struct _Hash_node.
  234. */
  235. template<typename _Value, bool _Cache_hash_code>
  236. struct _Hash_node
  237. : _Hash_node_base
  238. , _Hash_node_value<_Value, _Cache_hash_code>
  239. {
  240. _Hash_node*
  241. _M_next() const noexcept
  242. { return static_cast<_Hash_node*>(this->_M_nxt); }
  243. };
  244. /// Base class for node iterators.
  245. template<typename _Value, bool _Cache_hash_code>
  246. struct _Node_iterator_base
  247. {
  248. using __node_type = _Hash_node<_Value, _Cache_hash_code>;
  249. __node_type* _M_cur;
  250. _Node_iterator_base() : _M_cur(nullptr) { }
  251. _Node_iterator_base(__node_type* __p) noexcept
  252. : _M_cur(__p) { }
  253. void
  254. _M_incr() noexcept
  255. { _M_cur = _M_cur->_M_next(); }
  256. friend bool
  257. operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
  258. noexcept
  259. { return __x._M_cur == __y._M_cur; }
  260. #if __cpp_impl_three_way_comparison < 201907L
  261. friend bool
  262. operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
  263. noexcept
  264. { return __x._M_cur != __y._M_cur; }
  265. #endif
  266. };
  267. /// Node iterators, used to iterate through all the hashtable.
  268. template<typename _Value, bool __constant_iterators, bool __cache>
  269. struct _Node_iterator
  270. : public _Node_iterator_base<_Value, __cache>
  271. {
  272. private:
  273. using __base_type = _Node_iterator_base<_Value, __cache>;
  274. using __node_type = typename __base_type::__node_type;
  275. public:
  276. typedef _Value value_type;
  277. typedef std::ptrdiff_t difference_type;
  278. typedef std::forward_iterator_tag iterator_category;
  279. using pointer = typename std::conditional<__constant_iterators,
  280. const value_type*, value_type*>::type;
  281. using reference = typename std::conditional<__constant_iterators,
  282. const value_type&, value_type&>::type;
  283. _Node_iterator() = default;
  284. explicit
  285. _Node_iterator(__node_type* __p) noexcept
  286. : __base_type(__p) { }
  287. reference
  288. operator*() const noexcept
  289. { return this->_M_cur->_M_v(); }
  290. pointer
  291. operator->() const noexcept
  292. { return this->_M_cur->_M_valptr(); }
  293. _Node_iterator&
  294. operator++() noexcept
  295. {
  296. this->_M_incr();
  297. return *this;
  298. }
  299. _Node_iterator
  300. operator++(int) noexcept
  301. {
  302. _Node_iterator __tmp(*this);
  303. this->_M_incr();
  304. return __tmp;
  305. }
  306. };
  307. /// Node const_iterators, used to iterate through all the hashtable.
  308. template<typename _Value, bool __constant_iterators, bool __cache>
  309. struct _Node_const_iterator
  310. : public _Node_iterator_base<_Value, __cache>
  311. {
  312. private:
  313. using __base_type = _Node_iterator_base<_Value, __cache>;
  314. using __node_type = typename __base_type::__node_type;
  315. public:
  316. typedef _Value value_type;
  317. typedef std::ptrdiff_t difference_type;
  318. typedef std::forward_iterator_tag iterator_category;
  319. typedef const value_type* pointer;
  320. typedef const value_type& reference;
  321. _Node_const_iterator() = default;
  322. explicit
  323. _Node_const_iterator(__node_type* __p) noexcept
  324. : __base_type(__p) { }
  325. _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
  326. __cache>& __x) noexcept
  327. : __base_type(__x._M_cur) { }
  328. reference
  329. operator*() const noexcept
  330. { return this->_M_cur->_M_v(); }
  331. pointer
  332. operator->() const noexcept
  333. { return this->_M_cur->_M_valptr(); }
  334. _Node_const_iterator&
  335. operator++() noexcept
  336. {
  337. this->_M_incr();
  338. return *this;
  339. }
  340. _Node_const_iterator
  341. operator++(int) noexcept
  342. {
  343. _Node_const_iterator __tmp(*this);
  344. this->_M_incr();
  345. return __tmp;
  346. }
  347. };
  348. // Many of class template _Hashtable's template parameters are policy
  349. // classes. These are defaults for the policies.
  350. /// Default range hashing function: use division to fold a large number
  351. /// into the range [0, N).
  352. struct _Mod_range_hashing
  353. {
  354. typedef std::size_t first_argument_type;
  355. typedef std::size_t second_argument_type;
  356. typedef std::size_t result_type;
  357. result_type
  358. operator()(first_argument_type __num,
  359. second_argument_type __den) const noexcept
  360. { return __num % __den; }
  361. };
  362. /// Default ranged hash function H. In principle it should be a
  363. /// function object composed from objects of type H1 and H2 such that
  364. /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
  365. /// h1 and h2. So instead we'll just use a tag to tell class template
  366. /// hashtable to do that composition.
  367. struct _Default_ranged_hash { };
  368. /// Default value for rehash policy. Bucket size is (usually) the
  369. /// smallest prime that keeps the load factor small enough.
  370. struct _Prime_rehash_policy
  371. {
  372. using __has_load_factor = true_type;
  373. _Prime_rehash_policy(float __z = 1.0) noexcept
  374. : _M_max_load_factor(__z), _M_next_resize(0) { }
  375. float
  376. max_load_factor() const noexcept
  377. { return _M_max_load_factor; }
  378. // Return a bucket size no smaller than n.
  379. std::size_t
  380. _M_next_bkt(std::size_t __n) const;
  381. // Return a bucket count appropriate for n elements
  382. std::size_t
  383. _M_bkt_for_elements(std::size_t __n) const
  384. { return __builtin_ceil(__n / (double)_M_max_load_factor); }
  385. // __n_bkt is current bucket count, __n_elt is current element count,
  386. // and __n_ins is number of elements to be inserted. Do we need to
  387. // increase bucket count? If so, return make_pair(true, n), where n
  388. // is the new bucket count. If not, return make_pair(false, 0).
  389. std::pair<bool, std::size_t>
  390. _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
  391. std::size_t __n_ins) const;
  392. typedef std::size_t _State;
  393. _State
  394. _M_state() const
  395. { return _M_next_resize; }
  396. void
  397. _M_reset() noexcept
  398. { _M_next_resize = 0; }
  399. void
  400. _M_reset(_State __state)
  401. { _M_next_resize = __state; }
  402. static const std::size_t _S_growth_factor = 2;
  403. float _M_max_load_factor;
  404. mutable std::size_t _M_next_resize;
  405. };
  406. /// Range hashing function assuming that second arg is a power of 2.
  407. struct _Mask_range_hashing
  408. {
  409. typedef std::size_t first_argument_type;
  410. typedef std::size_t second_argument_type;
  411. typedef std::size_t result_type;
  412. result_type
  413. operator()(first_argument_type __num,
  414. second_argument_type __den) const noexcept
  415. { return __num & (__den - 1); }
  416. };
  417. /// Compute closest power of 2 not less than __n
  418. inline std::size_t
  419. __clp2(std::size_t __n) noexcept
  420. {
  421. using __gnu_cxx::__int_traits;
  422. // Equivalent to return __n ? std::bit_ceil(__n) : 0;
  423. if (__n < 2)
  424. return __n;
  425. const unsigned __lz = sizeof(size_t) > sizeof(long)
  426. ? __builtin_clzll(__n - 1ull)
  427. : __builtin_clzl(__n - 1ul);
  428. // Doing two shifts avoids undefined behaviour when __lz == 0.
  429. return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
  430. }
  431. /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
  432. /// operations.
  433. struct _Power2_rehash_policy
  434. {
  435. using __has_load_factor = true_type;
  436. _Power2_rehash_policy(float __z = 1.0) noexcept
  437. : _M_max_load_factor(__z), _M_next_resize(0) { }
  438. float
  439. max_load_factor() const noexcept
  440. { return _M_max_load_factor; }
  441. // Return a bucket size no smaller than n (as long as n is not above the
  442. // highest power of 2).
  443. std::size_t
  444. _M_next_bkt(std::size_t __n) noexcept
  445. {
  446. if (__n == 0)
  447. // Special case on container 1st initialization with 0 bucket count
  448. // hint. We keep _M_next_resize to 0 to make sure that next time we
  449. // want to add an element allocation will take place.
  450. return 1;
  451. const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
  452. const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
  453. std::size_t __res = __clp2(__n);
  454. if (__res == 0)
  455. __res = __max_bkt;
  456. else if (__res == 1)
  457. // If __res is 1 we force it to 2 to make sure there will be an
  458. // allocation so that nothing need to be stored in the initial
  459. // single bucket
  460. __res = 2;
  461. if (__res == __max_bkt)
  462. // Set next resize to the max value so that we never try to rehash again
  463. // as we already reach the biggest possible bucket number.
  464. // Note that it might result in max_load_factor not being respected.
  465. _M_next_resize = size_t(-1);
  466. else
  467. _M_next_resize
  468. = __builtin_floor(__res * (double)_M_max_load_factor);
  469. return __res;
  470. }
  471. // Return a bucket count appropriate for n elements
  472. std::size_t
  473. _M_bkt_for_elements(std::size_t __n) const noexcept
  474. { return __builtin_ceil(__n / (double)_M_max_load_factor); }
  475. // __n_bkt is current bucket count, __n_elt is current element count,
  476. // and __n_ins is number of elements to be inserted. Do we need to
  477. // increase bucket count? If so, return make_pair(true, n), where n
  478. // is the new bucket count. If not, return make_pair(false, 0).
  479. std::pair<bool, std::size_t>
  480. _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
  481. std::size_t __n_ins) noexcept
  482. {
  483. if (__n_elt + __n_ins > _M_next_resize)
  484. {
  485. // If _M_next_resize is 0 it means that we have nothing allocated so
  486. // far and that we start inserting elements. In this case we start
  487. // with an initial bucket size of 11.
  488. double __min_bkts
  489. = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
  490. / (double)_M_max_load_factor;
  491. if (__min_bkts >= __n_bkt)
  492. return { true,
  493. _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
  494. __n_bkt * _S_growth_factor)) };
  495. _M_next_resize
  496. = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
  497. return { false, 0 };
  498. }
  499. else
  500. return { false, 0 };
  501. }
  502. typedef std::size_t _State;
  503. _State
  504. _M_state() const noexcept
  505. { return _M_next_resize; }
  506. void
  507. _M_reset() noexcept
  508. { _M_next_resize = 0; }
  509. void
  510. _M_reset(_State __state) noexcept
  511. { _M_next_resize = __state; }
  512. static const std::size_t _S_growth_factor = 2;
  513. float _M_max_load_factor;
  514. std::size_t _M_next_resize;
  515. };
  516. // Base classes for std::_Hashtable. We define these base classes
  517. // because in some cases we want to do different things depending on
  518. // the value of a policy class. In some cases the policy class
  519. // affects which member functions and nested typedefs are defined;
  520. // we handle that by specializing base class templates. Several of
  521. // the base class templates need to access other members of class
  522. // template _Hashtable, so we use a variant of the "Curiously
  523. // Recurring Template Pattern" (CRTP) technique.
  524. /**
  525. * Primary class template _Map_base.
  526. *
  527. * If the hashtable has a value type of the form pair<T1, T2> and a
  528. * key extraction policy (_ExtractKey) that returns the first part
  529. * of the pair, the hashtable gets a mapped_type typedef. If it
  530. * satisfies those criteria and also has unique keys, then it also
  531. * gets an operator[].
  532. */
  533. template<typename _Key, typename _Value, typename _Alloc,
  534. typename _ExtractKey, typename _Equal,
  535. typename _Hash, typename _RangeHash, typename _Unused,
  536. typename _RehashPolicy, typename _Traits,
  537. bool _Unique_keys = _Traits::__unique_keys::value>
  538. struct _Map_base { };
  539. /// Partial specialization, __unique_keys set to false.
  540. template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
  541. typename _Hash, typename _RangeHash, typename _Unused,
  542. typename _RehashPolicy, typename _Traits>
  543. struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
  544. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  545. {
  546. using mapped_type = typename std::tuple_element<1, _Pair>::type;
  547. };
  548. /// Partial specialization, __unique_keys set to true.
  549. template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
  550. typename _Hash, typename _RangeHash, typename _Unused,
  551. typename _RehashPolicy, typename _Traits>
  552. struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
  553. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
  554. {
  555. private:
  556. using __hashtable_base = _Hashtable_base<_Key, _Pair, _Select1st, _Equal,
  557. _Hash, _RangeHash, _Unused,
  558. _Traits>;
  559. using __hashtable = _Hashtable<_Key, _Pair, _Alloc, _Select1st, _Equal,
  560. _Hash, _RangeHash,
  561. _Unused, _RehashPolicy, _Traits>;
  562. using __hash_code = typename __hashtable_base::__hash_code;
  563. public:
  564. using key_type = typename __hashtable_base::key_type;
  565. using mapped_type = typename std::tuple_element<1, _Pair>::type;
  566. mapped_type&
  567. operator[](const key_type& __k);
  568. mapped_type&
  569. operator[](key_type&& __k);
  570. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  571. // DR 761. unordered_map needs an at() member function.
  572. mapped_type&
  573. at(const key_type& __k);
  574. const mapped_type&
  575. at(const key_type& __k) const;
  576. };
  577. template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
  578. typename _Hash, typename _RangeHash, typename _Unused,
  579. typename _RehashPolicy, typename _Traits>
  580. auto
  581. _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
  582. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  583. operator[](const key_type& __k)
  584. -> mapped_type&
  585. {
  586. __hashtable* __h = static_cast<__hashtable*>(this);
  587. __hash_code __code = __h->_M_hash_code(__k);
  588. std::size_t __bkt = __h->_M_bucket_index(__code);
  589. if (auto __node = __h->_M_find_node(__bkt, __k, __code))
  590. return __node->_M_v().second;
  591. typename __hashtable::_Scoped_node __node {
  592. __h,
  593. std::piecewise_construct,
  594. std::tuple<const key_type&>(__k),
  595. std::tuple<>()
  596. };
  597. auto __pos
  598. = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
  599. __node._M_node = nullptr;
  600. return __pos->second;
  601. }
  602. template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
  603. typename _Hash, typename _RangeHash, typename _Unused,
  604. typename _RehashPolicy, typename _Traits>
  605. auto
  606. _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
  607. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  608. operator[](key_type&& __k)
  609. -> mapped_type&
  610. {
  611. __hashtable* __h = static_cast<__hashtable*>(this);
  612. __hash_code __code = __h->_M_hash_code(__k);
  613. std::size_t __bkt = __h->_M_bucket_index(__code);
  614. if (auto __node = __h->_M_find_node(__bkt, __k, __code))
  615. return __node->_M_v().second;
  616. typename __hashtable::_Scoped_node __node {
  617. __h,
  618. std::piecewise_construct,
  619. std::forward_as_tuple(std::move(__k)),
  620. std::tuple<>()
  621. };
  622. auto __pos
  623. = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
  624. __node._M_node = nullptr;
  625. return __pos->second;
  626. }
  627. template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
  628. typename _Hash, typename _RangeHash, typename _Unused,
  629. typename _RehashPolicy, typename _Traits>
  630. auto
  631. _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
  632. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  633. at(const key_type& __k)
  634. -> mapped_type&
  635. {
  636. __hashtable* __h = static_cast<__hashtable*>(this);
  637. auto __ite = __h->find(__k);
  638. if (!__ite._M_cur)
  639. __throw_out_of_range(__N("_Map_base::at"));
  640. return __ite->second;
  641. }
  642. template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
  643. typename _Hash, typename _RangeHash, typename _Unused,
  644. typename _RehashPolicy, typename _Traits>
  645. auto
  646. _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
  647. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  648. at(const key_type& __k) const
  649. -> const mapped_type&
  650. {
  651. const __hashtable* __h = static_cast<const __hashtable*>(this);
  652. auto __ite = __h->find(__k);
  653. if (!__ite._M_cur)
  654. __throw_out_of_range(__N("_Map_base::at"));
  655. return __ite->second;
  656. }
  657. /**
  658. * Primary class template _Insert_base.
  659. *
  660. * Defines @c insert member functions appropriate to all _Hashtables.
  661. */
  662. template<typename _Key, typename _Value, typename _Alloc,
  663. typename _ExtractKey, typename _Equal,
  664. typename _Hash, typename _RangeHash, typename _Unused,
  665. typename _RehashPolicy, typename _Traits>
  666. struct _Insert_base
  667. {
  668. protected:
  669. using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
  670. _Equal, _Hash, _RangeHash,
  671. _Unused, _Traits>;
  672. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  673. _Hash, _RangeHash,
  674. _Unused, _RehashPolicy, _Traits>;
  675. using __hash_cached = typename _Traits::__hash_cached;
  676. using __constant_iterators = typename _Traits::__constant_iterators;
  677. using __hashtable_alloc = _Hashtable_alloc<
  678. __alloc_rebind<_Alloc, _Hash_node<_Value,
  679. __hash_cached::value>>>;
  680. using value_type = typename __hashtable_base::value_type;
  681. using size_type = typename __hashtable_base::size_type;
  682. using __unique_keys = typename _Traits::__unique_keys;
  683. using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
  684. using __node_gen_type = _AllocNode<__node_alloc_type>;
  685. __hashtable&
  686. _M_conjure_hashtable()
  687. { return *(static_cast<__hashtable*>(this)); }
  688. template<typename _InputIterator, typename _NodeGetter>
  689. void
  690. _M_insert_range(_InputIterator __first, _InputIterator __last,
  691. const _NodeGetter&, true_type __uks);
  692. template<typename _InputIterator, typename _NodeGetter>
  693. void
  694. _M_insert_range(_InputIterator __first, _InputIterator __last,
  695. const _NodeGetter&, false_type __uks);
  696. public:
  697. using iterator = _Node_iterator<_Value, __constant_iterators::value,
  698. __hash_cached::value>;
  699. using const_iterator = _Node_const_iterator<_Value, __constant_iterators::value,
  700. __hash_cached::value>;
  701. using __ireturn_type = typename std::conditional<__unique_keys::value,
  702. std::pair<iterator, bool>,
  703. iterator>::type;
  704. __ireturn_type
  705. insert(const value_type& __v)
  706. {
  707. __hashtable& __h = _M_conjure_hashtable();
  708. __node_gen_type __node_gen(__h);
  709. return __h._M_insert(__v, __node_gen, __unique_keys{});
  710. }
  711. iterator
  712. insert(const_iterator __hint, const value_type& __v)
  713. {
  714. __hashtable& __h = _M_conjure_hashtable();
  715. __node_gen_type __node_gen(__h);
  716. return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
  717. }
  718. template<typename _KType, typename... _Args>
  719. std::pair<iterator, bool>
  720. try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
  721. {
  722. __hashtable& __h = _M_conjure_hashtable();
  723. auto __code = __h._M_hash_code(__k);
  724. std::size_t __bkt = __h._M_bucket_index(__code);
  725. if (auto __node = __h._M_find_node(__bkt, __k, __code))
  726. return { iterator(__node), false };
  727. typename __hashtable::_Scoped_node __node {
  728. &__h,
  729. std::piecewise_construct,
  730. std::forward_as_tuple(std::forward<_KType>(__k)),
  731. std::forward_as_tuple(std::forward<_Args>(__args)...)
  732. };
  733. auto __it
  734. = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
  735. __node._M_node = nullptr;
  736. return { __it, true };
  737. }
  738. void
  739. insert(initializer_list<value_type> __l)
  740. { this->insert(__l.begin(), __l.end()); }
  741. template<typename _InputIterator>
  742. void
  743. insert(_InputIterator __first, _InputIterator __last)
  744. {
  745. __hashtable& __h = _M_conjure_hashtable();
  746. __node_gen_type __node_gen(__h);
  747. return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
  748. }
  749. };
  750. template<typename _Key, typename _Value, typename _Alloc,
  751. typename _ExtractKey, typename _Equal,
  752. typename _Hash, typename _RangeHash, typename _Unused,
  753. typename _RehashPolicy, typename _Traits>
  754. template<typename _InputIterator, typename _NodeGetter>
  755. void
  756. _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  757. _Hash, _RangeHash, _Unused,
  758. _RehashPolicy, _Traits>::
  759. _M_insert_range(_InputIterator __first, _InputIterator __last,
  760. const _NodeGetter& __node_gen, true_type __uks)
  761. {
  762. __hashtable& __h = _M_conjure_hashtable();
  763. for (; __first != __last; ++__first)
  764. __h._M_insert(*__first, __node_gen, __uks);
  765. }
  766. template<typename _Key, typename _Value, typename _Alloc,
  767. typename _ExtractKey, typename _Equal,
  768. typename _Hash, typename _RangeHash, typename _Unused,
  769. typename _RehashPolicy, typename _Traits>
  770. template<typename _InputIterator, typename _NodeGetter>
  771. void
  772. _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  773. _Hash, _RangeHash, _Unused,
  774. _RehashPolicy, _Traits>::
  775. _M_insert_range(_InputIterator __first, _InputIterator __last,
  776. const _NodeGetter& __node_gen, false_type __uks)
  777. {
  778. using __rehash_type = typename __hashtable::__rehash_type;
  779. using __rehash_state = typename __hashtable::__rehash_state;
  780. using pair_type = std::pair<bool, std::size_t>;
  781. size_type __n_elt = __detail::__distance_fw(__first, __last);
  782. if (__n_elt == 0)
  783. return;
  784. __hashtable& __h = _M_conjure_hashtable();
  785. __rehash_type& __rehash = __h._M_rehash_policy;
  786. const __rehash_state& __saved_state = __rehash._M_state();
  787. pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
  788. __h._M_element_count,
  789. __n_elt);
  790. if (__do_rehash.first)
  791. __h._M_rehash(__do_rehash.second, __saved_state);
  792. for (; __first != __last; ++__first)
  793. __h._M_insert(*__first, __node_gen, __uks);
  794. }
  795. /**
  796. * Primary class template _Insert.
  797. *
  798. * Defines @c insert member functions that depend on _Hashtable policies,
  799. * via partial specializations.
  800. */
  801. template<typename _Key, typename _Value, typename _Alloc,
  802. typename _ExtractKey, typename _Equal,
  803. typename _Hash, typename _RangeHash, typename _Unused,
  804. typename _RehashPolicy, typename _Traits,
  805. bool _Constant_iterators = _Traits::__constant_iterators::value>
  806. struct _Insert;
  807. /// Specialization.
  808. template<typename _Key, typename _Value, typename _Alloc,
  809. typename _ExtractKey, typename _Equal,
  810. typename _Hash, typename _RangeHash, typename _Unused,
  811. typename _RehashPolicy, typename _Traits>
  812. struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  813. _Hash, _RangeHash, _Unused,
  814. _RehashPolicy, _Traits, true>
  815. : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  816. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
  817. {
  818. using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
  819. _Equal, _Hash, _RangeHash, _Unused,
  820. _RehashPolicy, _Traits>;
  821. using value_type = typename __base_type::value_type;
  822. using iterator = typename __base_type::iterator;
  823. using const_iterator = typename __base_type::const_iterator;
  824. using __ireturn_type = typename __base_type::__ireturn_type;
  825. using __unique_keys = typename __base_type::__unique_keys;
  826. using __hashtable = typename __base_type::__hashtable;
  827. using __node_gen_type = typename __base_type::__node_gen_type;
  828. using __base_type::insert;
  829. __ireturn_type
  830. insert(value_type&& __v)
  831. {
  832. __hashtable& __h = this->_M_conjure_hashtable();
  833. __node_gen_type __node_gen(__h);
  834. return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
  835. }
  836. iterator
  837. insert(const_iterator __hint, value_type&& __v)
  838. {
  839. __hashtable& __h = this->_M_conjure_hashtable();
  840. __node_gen_type __node_gen(__h);
  841. return __h._M_insert(__hint, std::move(__v), __node_gen,
  842. __unique_keys{});
  843. }
  844. };
  845. /// Specialization.
  846. template<typename _Key, typename _Value, typename _Alloc,
  847. typename _ExtractKey, typename _Equal,
  848. typename _Hash, typename _RangeHash, typename _Unused,
  849. typename _RehashPolicy, typename _Traits>
  850. struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  851. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  852. : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  853. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
  854. {
  855. using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
  856. _Equal, _Hash, _RangeHash, _Unused,
  857. _RehashPolicy, _Traits>;
  858. using value_type = typename __base_type::value_type;
  859. using iterator = typename __base_type::iterator;
  860. using const_iterator = typename __base_type::const_iterator;
  861. using __unique_keys = typename __base_type::__unique_keys;
  862. using __hashtable = typename __base_type::__hashtable;
  863. using __ireturn_type = typename __base_type::__ireturn_type;
  864. using __base_type::insert;
  865. template<typename _Pair>
  866. using __is_cons = std::is_constructible<value_type, _Pair&&>;
  867. template<typename _Pair>
  868. using _IFcons = std::enable_if<__is_cons<_Pair>::value>;
  869. template<typename _Pair>
  870. using _IFconsp = typename _IFcons<_Pair>::type;
  871. template<typename _Pair, typename = _IFconsp<_Pair>>
  872. __ireturn_type
  873. insert(_Pair&& __v)
  874. {
  875. __hashtable& __h = this->_M_conjure_hashtable();
  876. return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
  877. }
  878. template<typename _Pair, typename = _IFconsp<_Pair>>
  879. iterator
  880. insert(const_iterator __hint, _Pair&& __v)
  881. {
  882. __hashtable& __h = this->_M_conjure_hashtable();
  883. return __h._M_emplace(__hint, __unique_keys{},
  884. std::forward<_Pair>(__v));
  885. }
  886. };
  887. template<typename _Policy>
  888. using __has_load_factor = typename _Policy::__has_load_factor;
  889. /**
  890. * Primary class template _Rehash_base.
  891. *
  892. * Give hashtable the max_load_factor functions and reserve iff the
  893. * rehash policy supports it.
  894. */
  895. template<typename _Key, typename _Value, typename _Alloc,
  896. typename _ExtractKey, typename _Equal,
  897. typename _Hash, typename _RangeHash, typename _Unused,
  898. typename _RehashPolicy, typename _Traits,
  899. typename =
  900. __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
  901. struct _Rehash_base;
  902. /// Specialization when rehash policy doesn't provide load factor management.
  903. template<typename _Key, typename _Value, typename _Alloc,
  904. typename _ExtractKey, typename _Equal,
  905. typename _Hash, typename _RangeHash, typename _Unused,
  906. typename _RehashPolicy, typename _Traits>
  907. struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  908. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
  909. false_type /* Has load factor */>
  910. {
  911. };
  912. /// Specialization when rehash policy provide load factor management.
  913. template<typename _Key, typename _Value, typename _Alloc,
  914. typename _ExtractKey, typename _Equal,
  915. typename _Hash, typename _RangeHash, typename _Unused,
  916. typename _RehashPolicy, typename _Traits>
  917. struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  918. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
  919. true_type /* Has load factor */>
  920. {
  921. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
  922. _Equal, _Hash, _RangeHash, _Unused,
  923. _RehashPolicy, _Traits>;
  924. float
  925. max_load_factor() const noexcept
  926. {
  927. const __hashtable* __this = static_cast<const __hashtable*>(this);
  928. return __this->__rehash_policy().max_load_factor();
  929. }
  930. void
  931. max_load_factor(float __z)
  932. {
  933. __hashtable* __this = static_cast<__hashtable*>(this);
  934. __this->__rehash_policy(_RehashPolicy(__z));
  935. }
  936. void
  937. reserve(std::size_t __n)
  938. {
  939. __hashtable* __this = static_cast<__hashtable*>(this);
  940. __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
  941. }
  942. };
  943. /**
  944. * Primary class template _Hashtable_ebo_helper.
  945. *
  946. * Helper class using EBO when it is not forbidden (the type is not
  947. * final) and when it is worth it (the type is empty.)
  948. */
  949. template<int _Nm, typename _Tp,
  950. bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
  951. struct _Hashtable_ebo_helper;
  952. /// Specialization using EBO.
  953. template<int _Nm, typename _Tp>
  954. struct _Hashtable_ebo_helper<_Nm, _Tp, true>
  955. : private _Tp
  956. {
  957. _Hashtable_ebo_helper() = default;
  958. template<typename _OtherTp>
  959. _Hashtable_ebo_helper(_OtherTp&& __tp)
  960. : _Tp(std::forward<_OtherTp>(__tp))
  961. { }
  962. const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
  963. _Tp& _M_get() { return static_cast<_Tp&>(*this); }
  964. };
  965. /// Specialization not using EBO.
  966. template<int _Nm, typename _Tp>
  967. struct _Hashtable_ebo_helper<_Nm, _Tp, false>
  968. {
  969. _Hashtable_ebo_helper() = default;
  970. template<typename _OtherTp>
  971. _Hashtable_ebo_helper(_OtherTp&& __tp)
  972. : _M_tp(std::forward<_OtherTp>(__tp))
  973. { }
  974. const _Tp& _M_cget() const { return _M_tp; }
  975. _Tp& _M_get() { return _M_tp; }
  976. private:
  977. _Tp _M_tp;
  978. };
  979. /**
  980. * Primary class template _Local_iterator_base.
  981. *
  982. * Base class for local iterators, used to iterate within a bucket
  983. * but not between buckets.
  984. */
  985. template<typename _Key, typename _Value, typename _ExtractKey,
  986. typename _Hash, typename _RangeHash, typename _Unused,
  987. bool __cache_hash_code>
  988. struct _Local_iterator_base;
  989. /**
  990. * Primary class template _Hash_code_base.
  991. *
  992. * Encapsulates two policy issues that aren't quite orthogonal.
  993. * (1) the difference between using a ranged hash function and using
  994. * the combination of a hash function and a range-hashing function.
  995. * In the former case we don't have such things as hash codes, so
  996. * we have a dummy type as placeholder.
  997. * (2) Whether or not we cache hash codes. Caching hash codes is
  998. * meaningless if we have a ranged hash function.
  999. *
  1000. * We also put the key extraction objects here, for convenience.
  1001. * Each specialization derives from one or more of the template
  1002. * parameters to benefit from Ebo. This is important as this type
  1003. * is inherited in some cases by the _Local_iterator_base type used
  1004. * to implement local_iterator and const_local_iterator. As with
  1005. * any iterator type we prefer to make it as small as possible.
  1006. */
  1007. template<typename _Key, typename _Value, typename _ExtractKey,
  1008. typename _Hash, typename _RangeHash, typename _Unused,
  1009. bool __cache_hash_code>
  1010. struct _Hash_code_base
  1011. : private _Hashtable_ebo_helper<1, _Hash>
  1012. {
  1013. private:
  1014. using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
  1015. // Gives the local iterator implementation access to _M_bucket_index().
  1016. friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1017. _Hash, _RangeHash, _Unused, false>;
  1018. public:
  1019. typedef _Hash hasher;
  1020. hasher
  1021. hash_function() const
  1022. { return _M_hash(); }
  1023. protected:
  1024. typedef std::size_t __hash_code;
  1025. // We need the default constructor for the local iterators and _Hashtable
  1026. // default constructor.
  1027. _Hash_code_base() = default;
  1028. _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
  1029. __hash_code
  1030. _M_hash_code(const _Key& __k) const
  1031. {
  1032. static_assert(__is_invocable<const _Hash&, const _Key&>{},
  1033. "hash function must be invocable with an argument of key type");
  1034. return _M_hash()(__k);
  1035. }
  1036. template<typename _Kt>
  1037. __hash_code
  1038. _M_hash_code_tr(const _Kt& __k) const
  1039. {
  1040. static_assert(__is_invocable<const _Hash&, const _Kt&>{},
  1041. "hash function must be invocable with an argument of key type");
  1042. return _M_hash()(__k);
  1043. }
  1044. std::size_t
  1045. _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
  1046. { return _RangeHash{}(__c, __bkt_count); }
  1047. std::size_t
  1048. _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
  1049. std::size_t __bkt_count) const
  1050. noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
  1051. && noexcept(declval<const _RangeHash&>()((__hash_code)0,
  1052. (std::size_t)0)) )
  1053. {
  1054. return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
  1055. __bkt_count);
  1056. }
  1057. std::size_t
  1058. _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
  1059. std::size_t __bkt_count) const
  1060. noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
  1061. (std::size_t)0)) )
  1062. { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
  1063. void
  1064. _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
  1065. { }
  1066. void
  1067. _M_copy_code(_Hash_node_code_cache<false>&,
  1068. const _Hash_node_code_cache<false>&) const
  1069. { }
  1070. void
  1071. _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
  1072. { __n._M_hash_code = __c; }
  1073. void
  1074. _M_copy_code(_Hash_node_code_cache<true>& __to,
  1075. const _Hash_node_code_cache<true>& __from) const
  1076. { __to._M_hash_code = __from._M_hash_code; }
  1077. void
  1078. _M_swap(_Hash_code_base& __x)
  1079. { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
  1080. const _Hash&
  1081. _M_hash() const { return __ebo_hash::_M_cget(); }
  1082. };
  1083. /// Partial specialization used when nodes contain a cached hash code.
  1084. template<typename _Key, typename _Value, typename _ExtractKey,
  1085. typename _Hash, typename _RangeHash, typename _Unused>
  1086. struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1087. _Hash, _RangeHash, _Unused, true>
  1088. : public _Node_iterator_base<_Value, true>
  1089. {
  1090. protected:
  1091. using __base_node_iter = _Node_iterator_base<_Value, true>;
  1092. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1093. _Hash, _RangeHash, _Unused, true>;
  1094. _Local_iterator_base() = default;
  1095. _Local_iterator_base(const __hash_code_base&,
  1096. _Hash_node<_Value, true>* __p,
  1097. std::size_t __bkt, std::size_t __bkt_count)
  1098. : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
  1099. { }
  1100. void
  1101. _M_incr()
  1102. {
  1103. __base_node_iter::_M_incr();
  1104. if (this->_M_cur)
  1105. {
  1106. std::size_t __bkt
  1107. = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
  1108. if (__bkt != _M_bucket)
  1109. this->_M_cur = nullptr;
  1110. }
  1111. }
  1112. std::size_t _M_bucket;
  1113. std::size_t _M_bucket_count;
  1114. public:
  1115. std::size_t
  1116. _M_get_bucket() const { return _M_bucket; } // for debug mode
  1117. };
  1118. // Uninitialized storage for a _Hash_code_base.
  1119. // This type is DefaultConstructible and Assignable even if the
  1120. // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
  1121. // can be DefaultConstructible and Assignable.
  1122. template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
  1123. struct _Hash_code_storage
  1124. {
  1125. __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
  1126. _Tp*
  1127. _M_h() { return _M_storage._M_ptr(); }
  1128. const _Tp*
  1129. _M_h() const { return _M_storage._M_ptr(); }
  1130. };
  1131. // Empty partial specialization for empty _Hash_code_base types.
  1132. template<typename _Tp>
  1133. struct _Hash_code_storage<_Tp, true>
  1134. {
  1135. static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
  1136. // As _Tp is an empty type there will be no bytes written/read through
  1137. // the cast pointer, so no strict-aliasing violation.
  1138. _Tp*
  1139. _M_h() { return reinterpret_cast<_Tp*>(this); }
  1140. const _Tp*
  1141. _M_h() const { return reinterpret_cast<const _Tp*>(this); }
  1142. };
  1143. template<typename _Key, typename _Value, typename _ExtractKey,
  1144. typename _Hash, typename _RangeHash, typename _Unused>
  1145. using __hash_code_for_local_iter
  1146. = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
  1147. _Hash, _RangeHash, _Unused, false>>;
  1148. // Partial specialization used when hash codes are not cached
  1149. template<typename _Key, typename _Value, typename _ExtractKey,
  1150. typename _Hash, typename _RangeHash, typename _Unused>
  1151. struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1152. _Hash, _RangeHash, _Unused, false>
  1153. : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
  1154. _Unused>
  1155. , _Node_iterator_base<_Value, false>
  1156. {
  1157. protected:
  1158. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1159. _Hash, _RangeHash, _Unused, false>;
  1160. using __node_iter_base = _Node_iterator_base<_Value, false>;
  1161. _Local_iterator_base() : _M_bucket_count(-1) { }
  1162. _Local_iterator_base(const __hash_code_base& __base,
  1163. _Hash_node<_Value, false>* __p,
  1164. std::size_t __bkt, std::size_t __bkt_count)
  1165. : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
  1166. { _M_init(__base); }
  1167. ~_Local_iterator_base()
  1168. {
  1169. if (_M_bucket_count != size_t(-1))
  1170. _M_destroy();
  1171. }
  1172. _Local_iterator_base(const _Local_iterator_base& __iter)
  1173. : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
  1174. , _M_bucket_count(__iter._M_bucket_count)
  1175. {
  1176. if (_M_bucket_count != size_t(-1))
  1177. _M_init(*__iter._M_h());
  1178. }
  1179. _Local_iterator_base&
  1180. operator=(const _Local_iterator_base& __iter)
  1181. {
  1182. if (_M_bucket_count != -1)
  1183. _M_destroy();
  1184. this->_M_cur = __iter._M_cur;
  1185. _M_bucket = __iter._M_bucket;
  1186. _M_bucket_count = __iter._M_bucket_count;
  1187. if (_M_bucket_count != -1)
  1188. _M_init(*__iter._M_h());
  1189. return *this;
  1190. }
  1191. void
  1192. _M_incr()
  1193. {
  1194. __node_iter_base::_M_incr();
  1195. if (this->_M_cur)
  1196. {
  1197. std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
  1198. _M_bucket_count);
  1199. if (__bkt != _M_bucket)
  1200. this->_M_cur = nullptr;
  1201. }
  1202. }
  1203. std::size_t _M_bucket;
  1204. std::size_t _M_bucket_count;
  1205. void
  1206. _M_init(const __hash_code_base& __base)
  1207. { ::new(this->_M_h()) __hash_code_base(__base); }
  1208. void
  1209. _M_destroy() { this->_M_h()->~__hash_code_base(); }
  1210. public:
  1211. std::size_t
  1212. _M_get_bucket() const { return _M_bucket; } // for debug mode
  1213. };
  1214. /// local iterators
  1215. template<typename _Key, typename _Value, typename _ExtractKey,
  1216. typename _Hash, typename _RangeHash, typename _Unused,
  1217. bool __constant_iterators, bool __cache>
  1218. struct _Local_iterator
  1219. : public _Local_iterator_base<_Key, _Value, _ExtractKey,
  1220. _Hash, _RangeHash, _Unused, __cache>
  1221. {
  1222. private:
  1223. using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
  1224. _Hash, _RangeHash, _Unused, __cache>;
  1225. using __hash_code_base = typename __base_type::__hash_code_base;
  1226. public:
  1227. typedef _Value value_type;
  1228. typedef typename std::conditional<__constant_iterators,
  1229. const value_type*, value_type*>::type
  1230. pointer;
  1231. typedef typename std::conditional<__constant_iterators,
  1232. const value_type&, value_type&>::type
  1233. reference;
  1234. typedef std::ptrdiff_t difference_type;
  1235. typedef std::forward_iterator_tag iterator_category;
  1236. _Local_iterator() = default;
  1237. _Local_iterator(const __hash_code_base& __base,
  1238. _Hash_node<_Value, __cache>* __n,
  1239. std::size_t __bkt, std::size_t __bkt_count)
  1240. : __base_type(__base, __n, __bkt, __bkt_count)
  1241. { }
  1242. reference
  1243. operator*() const
  1244. { return this->_M_cur->_M_v(); }
  1245. pointer
  1246. operator->() const
  1247. { return this->_M_cur->_M_valptr(); }
  1248. _Local_iterator&
  1249. operator++()
  1250. {
  1251. this->_M_incr();
  1252. return *this;
  1253. }
  1254. _Local_iterator
  1255. operator++(int)
  1256. {
  1257. _Local_iterator __tmp(*this);
  1258. this->_M_incr();
  1259. return __tmp;
  1260. }
  1261. };
  1262. /// local const_iterators
  1263. template<typename _Key, typename _Value, typename _ExtractKey,
  1264. typename _Hash, typename _RangeHash, typename _Unused,
  1265. bool __constant_iterators, bool __cache>
  1266. struct _Local_const_iterator
  1267. : public _Local_iterator_base<_Key, _Value, _ExtractKey,
  1268. _Hash, _RangeHash, _Unused, __cache>
  1269. {
  1270. private:
  1271. using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
  1272. _Hash, _RangeHash, _Unused, __cache>;
  1273. using __hash_code_base = typename __base_type::__hash_code_base;
  1274. public:
  1275. typedef _Value value_type;
  1276. typedef const value_type* pointer;
  1277. typedef const value_type& reference;
  1278. typedef std::ptrdiff_t difference_type;
  1279. typedef std::forward_iterator_tag iterator_category;
  1280. _Local_const_iterator() = default;
  1281. _Local_const_iterator(const __hash_code_base& __base,
  1282. _Hash_node<_Value, __cache>* __n,
  1283. std::size_t __bkt, std::size_t __bkt_count)
  1284. : __base_type(__base, __n, __bkt, __bkt_count)
  1285. { }
  1286. _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
  1287. _Hash, _RangeHash, _Unused,
  1288. __constant_iterators,
  1289. __cache>& __x)
  1290. : __base_type(__x)
  1291. { }
  1292. reference
  1293. operator*() const
  1294. { return this->_M_cur->_M_v(); }
  1295. pointer
  1296. operator->() const
  1297. { return this->_M_cur->_M_valptr(); }
  1298. _Local_const_iterator&
  1299. operator++()
  1300. {
  1301. this->_M_incr();
  1302. return *this;
  1303. }
  1304. _Local_const_iterator
  1305. operator++(int)
  1306. {
  1307. _Local_const_iterator __tmp(*this);
  1308. this->_M_incr();
  1309. return __tmp;
  1310. }
  1311. };
  1312. /**
  1313. * Primary class template _Hashtable_base.
  1314. *
  1315. * Helper class adding management of _Equal functor to
  1316. * _Hash_code_base type.
  1317. *
  1318. * Base class templates are:
  1319. * - __detail::_Hash_code_base
  1320. * - __detail::_Hashtable_ebo_helper
  1321. */
  1322. template<typename _Key, typename _Value, typename _ExtractKey,
  1323. typename _Equal, typename _Hash, typename _RangeHash,
  1324. typename _Unused, typename _Traits>
  1325. struct _Hashtable_base
  1326. : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
  1327. _Unused, _Traits::__hash_cached::value>,
  1328. private _Hashtable_ebo_helper<0, _Equal>
  1329. {
  1330. public:
  1331. typedef _Key key_type;
  1332. typedef _Value value_type;
  1333. typedef _Equal key_equal;
  1334. typedef std::size_t size_type;
  1335. typedef std::ptrdiff_t difference_type;
  1336. using __traits_type = _Traits;
  1337. using __hash_cached = typename __traits_type::__hash_cached;
  1338. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1339. _Hash, _RangeHash, _Unused,
  1340. __hash_cached::value>;
  1341. using __hash_code = typename __hash_code_base::__hash_code;
  1342. private:
  1343. using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
  1344. static bool
  1345. _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
  1346. { return true; }
  1347. static bool
  1348. _S_node_equals(const _Hash_node_code_cache<false>&,
  1349. const _Hash_node_code_cache<false>&)
  1350. { return true; }
  1351. static bool
  1352. _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
  1353. { return __c == __n._M_hash_code; }
  1354. static bool
  1355. _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
  1356. const _Hash_node_code_cache<true>& __rhn)
  1357. { return __lhn._M_hash_code == __rhn._M_hash_code; }
  1358. protected:
  1359. _Hashtable_base() = default;
  1360. _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
  1361. : __hash_code_base(__hash), _EqualEBO(__eq)
  1362. { }
  1363. bool
  1364. _M_equals(const _Key& __k, __hash_code __c,
  1365. const _Hash_node_value<_Value, __hash_cached::value>& __n) const
  1366. {
  1367. static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
  1368. "key equality predicate must be invocable with two arguments of "
  1369. "key type");
  1370. return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
  1371. }
  1372. template<typename _Kt>
  1373. bool
  1374. _M_equals_tr(const _Kt& __k, __hash_code __c,
  1375. const _Hash_node_value<_Value,
  1376. __hash_cached::value>& __n) const
  1377. {
  1378. static_assert(
  1379. __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
  1380. "key equality predicate must be invocable with two arguments of "
  1381. "key type");
  1382. return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
  1383. }
  1384. bool
  1385. _M_node_equals(
  1386. const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
  1387. const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
  1388. {
  1389. return _S_node_equals(__lhn, __rhn)
  1390. && _M_eq()(_ExtractKey{}(__lhn._M_v()), _ExtractKey{}(__rhn._M_v()));
  1391. }
  1392. void
  1393. _M_swap(_Hashtable_base& __x)
  1394. {
  1395. __hash_code_base::_M_swap(__x);
  1396. std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
  1397. }
  1398. const _Equal&
  1399. _M_eq() const { return _EqualEBO::_M_cget(); }
  1400. };
  1401. /**
  1402. * Primary class template _Equality.
  1403. *
  1404. * This is for implementing equality comparison for unordered
  1405. * containers, per N3068, by John Lakos and Pablo Halpern.
  1406. * Algorithmically, we follow closely the reference implementations
  1407. * therein.
  1408. */
  1409. template<typename _Key, typename _Value, typename _Alloc,
  1410. typename _ExtractKey, typename _Equal,
  1411. typename _Hash, typename _RangeHash, typename _Unused,
  1412. typename _RehashPolicy, typename _Traits,
  1413. bool _Unique_keys = _Traits::__unique_keys::value>
  1414. struct _Equality;
  1415. /// unordered_map and unordered_set specializations.
  1416. template<typename _Key, typename _Value, typename _Alloc,
  1417. typename _ExtractKey, typename _Equal,
  1418. typename _Hash, typename _RangeHash, typename _Unused,
  1419. typename _RehashPolicy, typename _Traits>
  1420. struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1421. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
  1422. {
  1423. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1424. _Hash, _RangeHash, _Unused,
  1425. _RehashPolicy, _Traits>;
  1426. bool
  1427. _M_equal(const __hashtable&) const;
  1428. };
  1429. template<typename _Key, typename _Value, typename _Alloc,
  1430. typename _ExtractKey, typename _Equal,
  1431. typename _Hash, typename _RangeHash, typename _Unused,
  1432. typename _RehashPolicy, typename _Traits>
  1433. bool
  1434. _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1435. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  1436. _M_equal(const __hashtable& __other) const
  1437. {
  1438. using __node_type = typename __hashtable::__node_type;
  1439. const __hashtable* __this = static_cast<const __hashtable*>(this);
  1440. if (__this->size() != __other.size())
  1441. return false;
  1442. for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
  1443. {
  1444. std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
  1445. auto __prev_n = __other._M_buckets[__ybkt];
  1446. if (!__prev_n)
  1447. return false;
  1448. for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
  1449. __n = __n->_M_next())
  1450. {
  1451. if (__n->_M_v() == *__itx)
  1452. break;
  1453. if (!__n->_M_nxt
  1454. || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
  1455. return false;
  1456. }
  1457. }
  1458. return true;
  1459. }
  1460. /// unordered_multiset and unordered_multimap specializations.
  1461. template<typename _Key, typename _Value, typename _Alloc,
  1462. typename _ExtractKey, typename _Equal,
  1463. typename _Hash, typename _RangeHash, typename _Unused,
  1464. typename _RehashPolicy, typename _Traits>
  1465. struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1466. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  1467. {
  1468. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1469. _Hash, _RangeHash, _Unused,
  1470. _RehashPolicy, _Traits>;
  1471. bool
  1472. _M_equal(const __hashtable&) const;
  1473. };
  1474. template<typename _Key, typename _Value, typename _Alloc,
  1475. typename _ExtractKey, typename _Equal,
  1476. typename _Hash, typename _RangeHash, typename _Unused,
  1477. typename _RehashPolicy, typename _Traits>
  1478. bool
  1479. _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1480. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
  1481. _M_equal(const __hashtable& __other) const
  1482. {
  1483. using __node_type = typename __hashtable::__node_type;
  1484. const __hashtable* __this = static_cast<const __hashtable*>(this);
  1485. if (__this->size() != __other.size())
  1486. return false;
  1487. for (auto __itx = __this->begin(); __itx != __this->end();)
  1488. {
  1489. std::size_t __x_count = 1;
  1490. auto __itx_end = __itx;
  1491. for (++__itx_end; __itx_end != __this->end()
  1492. && __this->key_eq()(_ExtractKey{}(*__itx),
  1493. _ExtractKey{}(*__itx_end));
  1494. ++__itx_end)
  1495. ++__x_count;
  1496. std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
  1497. auto __y_prev_n = __other._M_buckets[__ybkt];
  1498. if (!__y_prev_n)
  1499. return false;
  1500. __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
  1501. for (;;)
  1502. {
  1503. if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
  1504. _ExtractKey{}(*__itx)))
  1505. break;
  1506. auto __y_ref_n = __y_n;
  1507. for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
  1508. if (!__other._M_node_equals(*__y_ref_n, *__y_n))
  1509. break;
  1510. if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
  1511. return false;
  1512. }
  1513. typename __hashtable::const_iterator __ity(__y_n);
  1514. for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
  1515. if (--__x_count == 0)
  1516. break;
  1517. if (__x_count != 0)
  1518. return false;
  1519. if (!std::is_permutation(__itx, __itx_end, __ity))
  1520. return false;
  1521. __itx = __itx_end;
  1522. }
  1523. return true;
  1524. }
  1525. /**
  1526. * This type deals with all allocation and keeps an allocator instance
  1527. * through inheritance to benefit from EBO when possible.
  1528. */
  1529. template<typename _NodeAlloc>
  1530. struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
  1531. {
  1532. private:
  1533. using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
  1534. public:
  1535. using __node_type = typename _NodeAlloc::value_type;
  1536. using __node_alloc_type = _NodeAlloc;
  1537. // Use __gnu_cxx to benefit from _S_always_equal and al.
  1538. using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
  1539. using __value_alloc_traits = typename __node_alloc_traits::template
  1540. rebind_traits<typename __node_type::value_type>;
  1541. using __node_ptr = __node_type*;
  1542. using __node_base = _Hash_node_base;
  1543. using __node_base_ptr = __node_base*;
  1544. using __buckets_alloc_type =
  1545. __alloc_rebind<__node_alloc_type, __node_base_ptr>;
  1546. using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
  1547. using __buckets_ptr = __node_base_ptr*;
  1548. _Hashtable_alloc() = default;
  1549. _Hashtable_alloc(const _Hashtable_alloc&) = default;
  1550. _Hashtable_alloc(_Hashtable_alloc&&) = default;
  1551. template<typename _Alloc>
  1552. _Hashtable_alloc(_Alloc&& __a)
  1553. : __ebo_node_alloc(std::forward<_Alloc>(__a))
  1554. { }
  1555. __node_alloc_type&
  1556. _M_node_allocator()
  1557. { return __ebo_node_alloc::_M_get(); }
  1558. const __node_alloc_type&
  1559. _M_node_allocator() const
  1560. { return __ebo_node_alloc::_M_cget(); }
  1561. // Allocate a node and construct an element within it.
  1562. template<typename... _Args>
  1563. __node_ptr
  1564. _M_allocate_node(_Args&&... __args);
  1565. // Destroy the element within a node and deallocate the node.
  1566. void
  1567. _M_deallocate_node(__node_ptr __n);
  1568. // Deallocate a node.
  1569. void
  1570. _M_deallocate_node_ptr(__node_ptr __n);
  1571. // Deallocate the linked list of nodes pointed to by __n.
  1572. // The elements within the nodes are destroyed.
  1573. void
  1574. _M_deallocate_nodes(__node_ptr __n);
  1575. __buckets_ptr
  1576. _M_allocate_buckets(std::size_t __bkt_count);
  1577. void
  1578. _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
  1579. };
  1580. // Definitions of class template _Hashtable_alloc's out-of-line member
  1581. // functions.
  1582. template<typename _NodeAlloc>
  1583. template<typename... _Args>
  1584. auto
  1585. _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
  1586. -> __node_ptr
  1587. {
  1588. auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
  1589. __node_ptr __n = std::__to_address(__nptr);
  1590. __try
  1591. {
  1592. ::new ((void*)__n) __node_type;
  1593. __node_alloc_traits::construct(_M_node_allocator(),
  1594. __n->_M_valptr(),
  1595. std::forward<_Args>(__args)...);
  1596. return __n;
  1597. }
  1598. __catch(...)
  1599. {
  1600. __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
  1601. __throw_exception_again;
  1602. }
  1603. }
  1604. template<typename _NodeAlloc>
  1605. void
  1606. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
  1607. {
  1608. __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
  1609. _M_deallocate_node_ptr(__n);
  1610. }
  1611. template<typename _NodeAlloc>
  1612. void
  1613. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
  1614. {
  1615. typedef typename __node_alloc_traits::pointer _Ptr;
  1616. auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
  1617. __n->~__node_type();
  1618. __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
  1619. }
  1620. template<typename _NodeAlloc>
  1621. void
  1622. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
  1623. {
  1624. while (__n)
  1625. {
  1626. __node_ptr __tmp = __n;
  1627. __n = __n->_M_next();
  1628. _M_deallocate_node(__tmp);
  1629. }
  1630. }
  1631. template<typename _NodeAlloc>
  1632. auto
  1633. _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
  1634. -> __buckets_ptr
  1635. {
  1636. __buckets_alloc_type __alloc(_M_node_allocator());
  1637. auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
  1638. __buckets_ptr __p = std::__to_address(__ptr);
  1639. __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
  1640. return __p;
  1641. }
  1642. template<typename _NodeAlloc>
  1643. void
  1644. _Hashtable_alloc<_NodeAlloc>::
  1645. _M_deallocate_buckets(__buckets_ptr __bkts,
  1646. std::size_t __bkt_count)
  1647. {
  1648. typedef typename __buckets_alloc_traits::pointer _Ptr;
  1649. auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
  1650. __buckets_alloc_type __alloc(_M_node_allocator());
  1651. __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
  1652. }
  1653. ///@} hashtable-detail
  1654. } // namespace __detail
  1655. _GLIBCXX_END_NAMESPACE_VERSION
  1656. } // namespace std
  1657. #endif // _HASHTABLE_POLICY_H