bitmap_allocator.h 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137
  1. // Bitmap Allocator. -*- C++ -*-
  2. // Copyright (C) 2004-2023 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file ext/bitmap_allocator.h
  21. * This file is a GNU extension to the Standard C++ Library.
  22. */
  23. #ifndef _BITMAP_ALLOCATOR_H
  24. #define _BITMAP_ALLOCATOR_H 1
  25. #include <bits/requires_hosted.h> // GNU extensions are currently omitted
  26. #include <utility> // For std::pair.
  27. #include <bits/functexcept.h> // For __throw_bad_alloc().
  28. #include <bits/stl_function.h> // For greater_equal, and less_equal.
  29. #include <new> // For operator new.
  30. #include <debug/debug.h> // _GLIBCXX_DEBUG_ASSERT
  31. #include <ext/concurrence.h>
  32. #include <bits/move.h>
  33. /** @brief The constant in the expression below is the alignment
  34. * required in bytes.
  35. */
  36. #define _BALLOC_ALIGN_BYTES 8
  37. namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
  38. {
  39. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  40. namespace __detail
  41. {
  42. /** @class __mini_vector bitmap_allocator.h bitmap_allocator.h
  43. *
  44. * @brief __mini_vector<> is a stripped down version of the
  45. * full-fledged std::vector<>.
  46. *
  47. * It is to be used only for built-in types or PODs. Notable
  48. * differences are:
  49. *
  50. * 1. Not all accessor functions are present.
  51. * 2. Used ONLY for PODs.
  52. * 3. No Allocator template argument. Uses ::operator new() to get
  53. * memory, and ::operator delete() to free it.
  54. * Caveat: The dtor does NOT free the memory allocated, so this a
  55. * memory-leaking vector!
  56. */
  57. template<typename _Tp>
  58. class __mini_vector
  59. {
  60. __mini_vector(const __mini_vector&);
  61. __mini_vector& operator=(const __mini_vector&);
  62. public:
  63. typedef _Tp value_type;
  64. typedef _Tp* pointer;
  65. typedef _Tp& reference;
  66. typedef const _Tp& const_reference;
  67. typedef std::size_t size_type;
  68. typedef std::ptrdiff_t difference_type;
  69. typedef pointer iterator;
  70. private:
  71. pointer _M_start;
  72. pointer _M_finish;
  73. pointer _M_end_of_storage;
  74. size_type
  75. _M_space_left() const throw()
  76. { return _M_end_of_storage - _M_finish; }
  77. _GLIBCXX_NODISCARD pointer
  78. allocate(size_type __n)
  79. { return static_cast<pointer>(::operator new(__n * sizeof(_Tp))); }
  80. void
  81. deallocate(pointer __p, size_type)
  82. { ::operator delete(__p); }
  83. public:
  84. // Members used: size(), push_back(), pop_back(),
  85. // insert(iterator, const_reference), erase(iterator),
  86. // begin(), end(), back(), operator[].
  87. __mini_vector()
  88. : _M_start(0), _M_finish(0), _M_end_of_storage(0) { }
  89. size_type
  90. size() const throw()
  91. { return _M_finish - _M_start; }
  92. iterator
  93. begin() const throw()
  94. { return this->_M_start; }
  95. iterator
  96. end() const throw()
  97. { return this->_M_finish; }
  98. reference
  99. back() const throw()
  100. { return *(this->end() - 1); }
  101. reference
  102. operator[](const size_type __pos) const throw()
  103. { return this->_M_start[__pos]; }
  104. void
  105. insert(iterator __pos, const_reference __x);
  106. void
  107. push_back(const_reference __x)
  108. {
  109. if (this->_M_space_left())
  110. {
  111. *this->end() = __x;
  112. ++this->_M_finish;
  113. }
  114. else
  115. this->insert(this->end(), __x);
  116. }
  117. void
  118. pop_back() throw()
  119. { --this->_M_finish; }
  120. void
  121. erase(iterator __pos) throw();
  122. void
  123. clear() throw()
  124. { this->_M_finish = this->_M_start; }
  125. };
  126. // Out of line function definitions.
  127. template<typename _Tp>
  128. void __mini_vector<_Tp>::
  129. insert(iterator __pos, const_reference __x)
  130. {
  131. if (this->_M_space_left())
  132. {
  133. size_type __to_move = this->_M_finish - __pos;
  134. iterator __dest = this->end();
  135. iterator __src = this->end() - 1;
  136. ++this->_M_finish;
  137. while (__to_move)
  138. {
  139. *__dest = *__src;
  140. --__dest; --__src; --__to_move;
  141. }
  142. *__pos = __x;
  143. }
  144. else
  145. {
  146. size_type __new_size = this->size() ? this->size() * 2 : 1;
  147. iterator __new_start = this->allocate(__new_size);
  148. iterator __first = this->begin();
  149. iterator __start = __new_start;
  150. while (__first != __pos)
  151. {
  152. *__start = *__first;
  153. ++__start; ++__first;
  154. }
  155. *__start = __x;
  156. ++__start;
  157. while (__first != this->end())
  158. {
  159. *__start = *__first;
  160. ++__start; ++__first;
  161. }
  162. if (this->_M_start)
  163. this->deallocate(this->_M_start, this->size());
  164. this->_M_start = __new_start;
  165. this->_M_finish = __start;
  166. this->_M_end_of_storage = this->_M_start + __new_size;
  167. }
  168. }
  169. template<typename _Tp>
  170. void __mini_vector<_Tp>::
  171. erase(iterator __pos) throw()
  172. {
  173. while (__pos + 1 != this->end())
  174. {
  175. *__pos = __pos[1];
  176. ++__pos;
  177. }
  178. --this->_M_finish;
  179. }
  180. template<typename _Tp>
  181. struct __mv_iter_traits
  182. {
  183. typedef typename _Tp::value_type value_type;
  184. typedef typename _Tp::difference_type difference_type;
  185. };
  186. template<typename _Tp>
  187. struct __mv_iter_traits<_Tp*>
  188. {
  189. typedef _Tp value_type;
  190. typedef std::ptrdiff_t difference_type;
  191. };
  192. enum
  193. {
  194. bits_per_byte = 8,
  195. bits_per_block = sizeof(std::size_t) * std::size_t(bits_per_byte)
  196. };
  197. template<typename _ForwardIterator, typename _Tp, typename _Compare>
  198. _ForwardIterator
  199. __lower_bound(_ForwardIterator __first, _ForwardIterator __last,
  200. const _Tp& __val, _Compare __comp)
  201. {
  202. typedef typename __mv_iter_traits<_ForwardIterator>::difference_type
  203. _DistanceType;
  204. _DistanceType __len = __last - __first;
  205. _DistanceType __half;
  206. _ForwardIterator __middle;
  207. while (__len > 0)
  208. {
  209. __half = __len >> 1;
  210. __middle = __first;
  211. __middle += __half;
  212. if (__comp(*__middle, __val))
  213. {
  214. __first = __middle;
  215. ++__first;
  216. __len = __len - __half - 1;
  217. }
  218. else
  219. __len = __half;
  220. }
  221. return __first;
  222. }
  223. /** @brief The number of Blocks pointed to by the address pair
  224. * passed to the function.
  225. */
  226. template<typename _AddrPair>
  227. inline std::size_t
  228. __num_blocks(_AddrPair __ap)
  229. { return (__ap.second - __ap.first) + 1; }
  230. /** @brief The number of Bit-maps pointed to by the address pair
  231. * passed to the function.
  232. */
  233. template<typename _AddrPair>
  234. inline std::size_t
  235. __num_bitmaps(_AddrPair __ap)
  236. { return __num_blocks(__ap) / std::size_t(bits_per_block); }
  237. // _Tp should be a pointer type.
  238. template<typename _Tp>
  239. class _Inclusive_between
  240. {
  241. typedef _Tp pointer;
  242. pointer _M_ptr_value;
  243. typedef typename std::pair<_Tp, _Tp> _Block_pair;
  244. public:
  245. _Inclusive_between(pointer __ptr) : _M_ptr_value(__ptr)
  246. { }
  247. bool
  248. operator()(_Block_pair __bp) const throw()
  249. {
  250. if (std::less_equal<pointer>()(_M_ptr_value, __bp.second)
  251. && std::greater_equal<pointer>()(_M_ptr_value, __bp.first))
  252. return true;
  253. else
  254. return false;
  255. }
  256. };
  257. // Used to pass a Functor to functions by reference.
  258. template<typename _Functor>
  259. class _Functor_Ref
  260. {
  261. _Functor& _M_fref;
  262. public:
  263. typedef typename _Functor::argument_type argument_type;
  264. typedef typename _Functor::result_type result_type;
  265. _Functor_Ref(_Functor& __fref) : _M_fref(__fref)
  266. { }
  267. result_type
  268. operator()(argument_type __arg)
  269. { return _M_fref(__arg); }
  270. };
  271. /** @class _Ffit_finder bitmap_allocator.h bitmap_allocator.h
  272. *
  273. * @brief The class which acts as a predicate for applying the
  274. * first-fit memory allocation policy for the bitmap allocator.
  275. */
  276. // _Tp should be a pointer type, and _Alloc is the Allocator for
  277. // the vector.
  278. template<typename _Tp>
  279. class _Ffit_finder
  280. {
  281. typedef std::pair<_Tp, _Tp> _Block_pair;
  282. typedef __detail::__mini_vector<_Block_pair> _BPVector;
  283. typedef typename _BPVector::difference_type _Counter_type;
  284. std::size_t* _M_pbitmap;
  285. _Counter_type _M_data_offset;
  286. public:
  287. typedef bool result_type;
  288. typedef _Block_pair argument_type;
  289. _Ffit_finder() : _M_pbitmap(0), _M_data_offset(0)
  290. { }
  291. bool
  292. operator()(_Block_pair __bp) throw()
  293. {
  294. using std::size_t;
  295. // Set the _rover to the last physical location bitmap,
  296. // which is the bitmap which belongs to the first free
  297. // block. Thus, the bitmaps are in exact reverse order of
  298. // the actual memory layout. So, we count down the bitmaps,
  299. // which is the same as moving up the memory.
  300. // If the used count stored at the start of the Bit Map headers
  301. // is equal to the number of Objects that the current Block can
  302. // store, then there is definitely no space for another single
  303. // object, so just return false.
  304. _Counter_type __diff = __detail::__num_bitmaps(__bp);
  305. if (*(reinterpret_cast<size_t*>
  306. (__bp.first) - (__diff + 1)) == __detail::__num_blocks(__bp))
  307. return false;
  308. size_t* __rover = reinterpret_cast<size_t*>(__bp.first) - 1;
  309. for (_Counter_type __i = 0; __i < __diff; ++__i)
  310. {
  311. _M_data_offset = __i;
  312. if (*__rover)
  313. {
  314. _M_pbitmap = __rover;
  315. return true;
  316. }
  317. --__rover;
  318. }
  319. return false;
  320. }
  321. std::size_t*
  322. _M_get() const throw()
  323. { return _M_pbitmap; }
  324. _Counter_type
  325. _M_offset() const throw()
  326. { return _M_data_offset * std::size_t(bits_per_block); }
  327. };
  328. /** @class _Bitmap_counter bitmap_allocator.h bitmap_allocator.h
  329. *
  330. * @brief The bitmap counter which acts as the bitmap
  331. * manipulator, and manages the bit-manipulation functions and
  332. * the searching and identification functions on the bit-map.
  333. */
  334. // _Tp should be a pointer type.
  335. template<typename _Tp>
  336. class _Bitmap_counter
  337. {
  338. typedef typename
  339. __detail::__mini_vector<typename std::pair<_Tp, _Tp> > _BPVector;
  340. typedef typename _BPVector::size_type _Index_type;
  341. typedef _Tp pointer;
  342. _BPVector& _M_vbp;
  343. std::size_t* _M_curr_bmap;
  344. std::size_t* _M_last_bmap_in_block;
  345. _Index_type _M_curr_index;
  346. public:
  347. // Use the 2nd parameter with care. Make sure that such an
  348. // entry exists in the vector before passing that particular
  349. // index to this ctor.
  350. _Bitmap_counter(_BPVector& Rvbp, long __index = -1) : _M_vbp(Rvbp)
  351. { this->_M_reset(__index); }
  352. void
  353. _M_reset(long __index = -1) throw()
  354. {
  355. if (__index == -1)
  356. {
  357. _M_curr_bmap = 0;
  358. _M_curr_index = static_cast<_Index_type>(-1);
  359. return;
  360. }
  361. _M_curr_index = __index;
  362. _M_curr_bmap = reinterpret_cast<std::size_t*>
  363. (_M_vbp[_M_curr_index].first) - 1;
  364. _GLIBCXX_DEBUG_ASSERT(__index <= (long)_M_vbp.size() - 1);
  365. _M_last_bmap_in_block = _M_curr_bmap
  366. - ((_M_vbp[_M_curr_index].second
  367. - _M_vbp[_M_curr_index].first + 1)
  368. / std::size_t(bits_per_block) - 1);
  369. }
  370. // Dangerous Function! Use with extreme care. Pass to this
  371. // function ONLY those values that are known to be correct,
  372. // otherwise this will mess up big time.
  373. void
  374. _M_set_internal_bitmap(std::size_t* __new_internal_marker) throw()
  375. { _M_curr_bmap = __new_internal_marker; }
  376. bool
  377. _M_finished() const throw()
  378. { return(_M_curr_bmap == 0); }
  379. _Bitmap_counter&
  380. operator++() throw()
  381. {
  382. if (_M_curr_bmap == _M_last_bmap_in_block)
  383. {
  384. if (++_M_curr_index == _M_vbp.size())
  385. _M_curr_bmap = 0;
  386. else
  387. this->_M_reset(_M_curr_index);
  388. }
  389. else
  390. --_M_curr_bmap;
  391. return *this;
  392. }
  393. std::size_t*
  394. _M_get() const throw()
  395. { return _M_curr_bmap; }
  396. pointer
  397. _M_base() const throw()
  398. { return _M_vbp[_M_curr_index].first; }
  399. _Index_type
  400. _M_offset() const throw()
  401. {
  402. return std::size_t(bits_per_block)
  403. * ((reinterpret_cast<std::size_t*>(this->_M_base())
  404. - _M_curr_bmap) - 1);
  405. }
  406. _Index_type
  407. _M_where() const throw()
  408. { return _M_curr_index; }
  409. };
  410. /** @brief Mark a memory address as allocated by re-setting the
  411. * corresponding bit in the bit-map.
  412. */
  413. inline void
  414. __bit_allocate(std::size_t* __pbmap, std::size_t __pos) throw()
  415. {
  416. std::size_t __mask = 1 << __pos;
  417. __mask = ~__mask;
  418. *__pbmap &= __mask;
  419. }
  420. /** @brief Mark a memory address as free by setting the
  421. * corresponding bit in the bit-map.
  422. */
  423. inline void
  424. __bit_free(std::size_t* __pbmap, std::size_t __pos) throw()
  425. {
  426. std::size_t __mask = 1 << __pos;
  427. *__pbmap |= __mask;
  428. }
  429. } // namespace __detail
  430. /** @brief Generic Version of the bsf instruction.
  431. */
  432. inline std::size_t
  433. _Bit_scan_forward(std::size_t __num)
  434. { return static_cast<std::size_t>(__builtin_ctzl(__num)); }
  435. /** @class free_list bitmap_allocator.h bitmap_allocator.h
  436. *
  437. * @brief The free list class for managing chunks of memory to be
  438. * given to and returned by the bitmap_allocator.
  439. */
  440. class free_list
  441. {
  442. public:
  443. typedef std::size_t* value_type;
  444. typedef __detail::__mini_vector<value_type> vector_type;
  445. typedef vector_type::iterator iterator;
  446. typedef __mutex __mutex_type;
  447. private:
  448. struct _LT_pointer_compare
  449. {
  450. bool
  451. operator()(const std::size_t* __pui,
  452. const std::size_t __cui) const throw()
  453. { return *__pui < __cui; }
  454. };
  455. #if defined __GTHREADS
  456. __mutex_type&
  457. _M_get_mutex()
  458. {
  459. static __mutex_type _S_mutex;
  460. return _S_mutex;
  461. }
  462. #endif
  463. vector_type&
  464. _M_get_free_list()
  465. {
  466. static vector_type _S_free_list;
  467. return _S_free_list;
  468. }
  469. /** @brief Performs validation of memory based on their size.
  470. *
  471. * @param __addr The pointer to the memory block to be
  472. * validated.
  473. *
  474. * Validates the memory block passed to this function and
  475. * appropriately performs the action of managing the free list of
  476. * blocks by adding this block to the free list or deleting this
  477. * or larger blocks from the free list.
  478. */
  479. void
  480. _M_validate(std::size_t* __addr) throw()
  481. {
  482. vector_type& __free_list = _M_get_free_list();
  483. const vector_type::size_type __max_size = 64;
  484. if (__free_list.size() >= __max_size)
  485. {
  486. // Ok, the threshold value has been reached. We determine
  487. // which block to remove from the list of free blocks.
  488. if (*__addr >= *__free_list.back())
  489. {
  490. // Ok, the new block is greater than or equal to the
  491. // last block in the list of free blocks. We just free
  492. // the new block.
  493. ::operator delete(static_cast<void*>(__addr));
  494. return;
  495. }
  496. else
  497. {
  498. // Deallocate the last block in the list of free lists,
  499. // and insert the new one in its correct position.
  500. ::operator delete(static_cast<void*>(__free_list.back()));
  501. __free_list.pop_back();
  502. }
  503. }
  504. // Just add the block to the list of free lists unconditionally.
  505. iterator __temp = __detail::__lower_bound
  506. (__free_list.begin(), __free_list.end(),
  507. *__addr, _LT_pointer_compare());
  508. // We may insert the new free list before _temp;
  509. __free_list.insert(__temp, __addr);
  510. }
  511. /** @brief Decides whether the wastage of memory is acceptable for
  512. * the current memory request and returns accordingly.
  513. *
  514. * @param __block_size The size of the block available in the free
  515. * list.
  516. *
  517. * @param __required_size The required size of the memory block.
  518. *
  519. * @return true if the wastage incurred is acceptable, else returns
  520. * false.
  521. */
  522. bool
  523. _M_should_i_give(std::size_t __block_size,
  524. std::size_t __required_size) throw()
  525. {
  526. const std::size_t __max_wastage_percentage = 36;
  527. if (__block_size >= __required_size &&
  528. (((__block_size - __required_size) * 100 / __block_size)
  529. < __max_wastage_percentage))
  530. return true;
  531. else
  532. return false;
  533. }
  534. public:
  535. /** @brief This function returns the block of memory to the
  536. * internal free list.
  537. *
  538. * @param __addr The pointer to the memory block that was given
  539. * by a call to the _M_get function.
  540. */
  541. inline void
  542. _M_insert(std::size_t* __addr) throw()
  543. {
  544. #if defined __GTHREADS
  545. __scoped_lock __bfl_lock(_M_get_mutex());
  546. #endif
  547. // Call _M_validate to decide what should be done with
  548. // this particular free list.
  549. this->_M_validate(reinterpret_cast<std::size_t*>(__addr) - 1);
  550. // See discussion as to why this is 1!
  551. }
  552. /** @brief This function gets a block of memory of the specified
  553. * size from the free list.
  554. *
  555. * @param __sz The size in bytes of the memory required.
  556. *
  557. * @return A pointer to the new memory block of size at least
  558. * equal to that requested.
  559. */
  560. std::size_t*
  561. _M_get(std::size_t __sz) _GLIBCXX_THROW(std::bad_alloc);
  562. /** @brief This function just clears the internal Free List, and
  563. * gives back all the memory to the OS.
  564. */
  565. void
  566. _M_clear();
  567. };
  568. // Forward declare the class.
  569. template<typename _Tp>
  570. class bitmap_allocator;
  571. // Specialize for void:
  572. template<>
  573. class bitmap_allocator<void>
  574. {
  575. public:
  576. typedef void* pointer;
  577. typedef const void* const_pointer;
  578. // Reference-to-void members are impossible.
  579. typedef void value_type;
  580. template<typename _Tp1>
  581. struct rebind
  582. {
  583. typedef bitmap_allocator<_Tp1> other;
  584. };
  585. };
  586. /**
  587. * @brief Bitmap Allocator, primary template.
  588. * @ingroup allocators
  589. */
  590. template<typename _Tp>
  591. class bitmap_allocator : private free_list
  592. {
  593. public:
  594. typedef std::size_t size_type;
  595. typedef std::ptrdiff_t difference_type;
  596. typedef _Tp* pointer;
  597. typedef const _Tp* const_pointer;
  598. typedef _Tp& reference;
  599. typedef const _Tp& const_reference;
  600. typedef _Tp value_type;
  601. typedef free_list::__mutex_type __mutex_type;
  602. template<typename _Tp1>
  603. struct rebind
  604. {
  605. typedef bitmap_allocator<_Tp1> other;
  606. };
  607. #if __cplusplus >= 201103L
  608. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  609. // 2103. propagate_on_container_move_assignment
  610. typedef std::true_type propagate_on_container_move_assignment;
  611. #endif
  612. private:
  613. template<std::size_t _BSize, std::size_t _AlignSize>
  614. struct aligned_size
  615. {
  616. enum
  617. {
  618. modulus = _BSize % _AlignSize,
  619. value = _BSize + (modulus ? _AlignSize - (modulus) : 0)
  620. };
  621. };
  622. struct _Alloc_block
  623. {
  624. char __M_unused[aligned_size<sizeof(value_type),
  625. _BALLOC_ALIGN_BYTES>::value];
  626. };
  627. typedef typename std::pair<_Alloc_block*, _Alloc_block*> _Block_pair;
  628. typedef typename __detail::__mini_vector<_Block_pair> _BPVector;
  629. typedef typename _BPVector::iterator _BPiter;
  630. template<typename _Predicate>
  631. static _BPiter
  632. _S_find(_Predicate __p)
  633. {
  634. _BPiter __first = _S_mem_blocks.begin();
  635. while (__first != _S_mem_blocks.end() && !__p(*__first))
  636. ++__first;
  637. return __first;
  638. }
  639. #if defined _GLIBCXX_DEBUG
  640. // Complexity: O(lg(N)). Where, N is the number of block of size
  641. // sizeof(value_type).
  642. void
  643. _S_check_for_free_blocks() throw()
  644. {
  645. typedef typename __detail::_Ffit_finder<_Alloc_block*> _FFF;
  646. _BPiter __bpi = _S_find(_FFF());
  647. _GLIBCXX_DEBUG_ASSERT(__bpi == _S_mem_blocks.end());
  648. }
  649. #endif
  650. /** @brief Responsible for exponentially growing the internal
  651. * memory pool.
  652. *
  653. * @throw std::bad_alloc. If memory cannot be allocated.
  654. *
  655. * Complexity: O(1), but internally depends upon the
  656. * complexity of the function free_list::_M_get. The part where
  657. * the bitmap headers are written has complexity: O(X),where X
  658. * is the number of blocks of size sizeof(value_type) within
  659. * the newly acquired block. Having a tight bound.
  660. */
  661. void
  662. _S_refill_pool() _GLIBCXX_THROW(std::bad_alloc)
  663. {
  664. using std::size_t;
  665. #if defined _GLIBCXX_DEBUG
  666. _S_check_for_free_blocks();
  667. #endif
  668. const size_t __num_bitmaps = (_S_block_size
  669. / size_t(__detail::bits_per_block));
  670. const size_t __size_to_allocate = sizeof(size_t)
  671. + _S_block_size * sizeof(_Alloc_block)
  672. + __num_bitmaps * sizeof(size_t);
  673. size_t* __temp =
  674. reinterpret_cast<size_t*>(this->_M_get(__size_to_allocate));
  675. *__temp = 0;
  676. ++__temp;
  677. // The Header information goes at the Beginning of the Block.
  678. _Block_pair __bp =
  679. std::make_pair(reinterpret_cast<_Alloc_block*>
  680. (__temp + __num_bitmaps),
  681. reinterpret_cast<_Alloc_block*>
  682. (__temp + __num_bitmaps)
  683. + _S_block_size - 1);
  684. // Fill the Vector with this information.
  685. _S_mem_blocks.push_back(__bp);
  686. for (size_t __i = 0; __i < __num_bitmaps; ++__i)
  687. __temp[__i] = ~static_cast<size_t>(0); // 1 Indicates all Free.
  688. _S_block_size *= 2;
  689. }
  690. static _BPVector _S_mem_blocks;
  691. static std::size_t _S_block_size;
  692. static __detail::_Bitmap_counter<_Alloc_block*> _S_last_request;
  693. static typename _BPVector::size_type _S_last_dealloc_index;
  694. #if defined __GTHREADS
  695. static __mutex_type _S_mut;
  696. #endif
  697. public:
  698. /** @brief Allocates memory for a single object of size
  699. * sizeof(_Tp).
  700. *
  701. * @throw std::bad_alloc. If memory cannot be allocated.
  702. *
  703. * Complexity: Worst case complexity is O(N), but that
  704. * is hardly ever hit. If and when this particular case is
  705. * encountered, the next few cases are guaranteed to have a
  706. * worst case complexity of O(1)! That's why this function
  707. * performs very well on average. You can consider this
  708. * function to have a complexity referred to commonly as:
  709. * Amortized Constant time.
  710. */
  711. pointer
  712. _M_allocate_single_object() _GLIBCXX_THROW(std::bad_alloc)
  713. {
  714. using std::size_t;
  715. #if defined __GTHREADS
  716. __scoped_lock __bit_lock(_S_mut);
  717. #endif
  718. // The algorithm is something like this: The last_request
  719. // variable points to the last accessed Bit Map. When such a
  720. // condition occurs, we try to find a free block in the
  721. // current bitmap, or succeeding bitmaps until the last bitmap
  722. // is reached. If no free block turns up, we resort to First
  723. // Fit method.
  724. // WARNING: Do not re-order the condition in the while
  725. // statement below, because it relies on C++'s short-circuit
  726. // evaluation. The return from _S_last_request->_M_get() will
  727. // NOT be dereference able if _S_last_request->_M_finished()
  728. // returns true. This would inevitably lead to a NULL pointer
  729. // dereference if tinkered with.
  730. while (_S_last_request._M_finished() == false
  731. && (*(_S_last_request._M_get()) == 0))
  732. _S_last_request.operator++();
  733. if (__builtin_expect(_S_last_request._M_finished() == true, false))
  734. {
  735. // Fall Back to First Fit algorithm.
  736. typedef typename __detail::_Ffit_finder<_Alloc_block*> _FFF;
  737. _FFF __fff;
  738. _BPiter __bpi = _S_find(__detail::_Functor_Ref<_FFF>(__fff));
  739. if (__bpi != _S_mem_blocks.end())
  740. {
  741. // Search was successful. Ok, now mark the first bit from
  742. // the right as 0, meaning Allocated. This bit is obtained
  743. // by calling _M_get() on __fff.
  744. size_t __nz_bit = _Bit_scan_forward(*__fff._M_get());
  745. __detail::__bit_allocate(__fff._M_get(), __nz_bit);
  746. _S_last_request._M_reset(__bpi - _S_mem_blocks.begin());
  747. // Now, get the address of the bit we marked as allocated.
  748. pointer __ret = reinterpret_cast<pointer>
  749. (__bpi->first + __fff._M_offset() + __nz_bit);
  750. size_t* __puse_count =
  751. reinterpret_cast<size_t*>
  752. (__bpi->first) - (__detail::__num_bitmaps(*__bpi) + 1);
  753. ++(*__puse_count);
  754. return __ret;
  755. }
  756. else
  757. {
  758. // Search was unsuccessful. We Add more memory to the
  759. // pool by calling _S_refill_pool().
  760. _S_refill_pool();
  761. // _M_Reset the _S_last_request structure to the first
  762. // free block's bit map.
  763. _S_last_request._M_reset(_S_mem_blocks.size() - 1);
  764. // Now, mark that bit as allocated.
  765. }
  766. }
  767. // _S_last_request holds a pointer to a valid bit map, that
  768. // points to a free block in memory.
  769. size_t __nz_bit = _Bit_scan_forward(*_S_last_request._M_get());
  770. __detail::__bit_allocate(_S_last_request._M_get(), __nz_bit);
  771. pointer __ret = reinterpret_cast<pointer>
  772. (_S_last_request._M_base() + _S_last_request._M_offset() + __nz_bit);
  773. size_t* __puse_count = reinterpret_cast<size_t*>
  774. (_S_mem_blocks[_S_last_request._M_where()].first)
  775. - (__detail::
  776. __num_bitmaps(_S_mem_blocks[_S_last_request._M_where()]) + 1);
  777. ++(*__puse_count);
  778. return __ret;
  779. }
  780. /** @brief Deallocates memory that belongs to a single object of
  781. * size sizeof(_Tp).
  782. *
  783. * Complexity: O(lg(N)), but the worst case is not hit
  784. * often! This is because containers usually deallocate memory
  785. * close to each other and this case is handled in O(1) time by
  786. * the deallocate function.
  787. */
  788. void
  789. _M_deallocate_single_object(pointer __p) throw()
  790. {
  791. using std::size_t;
  792. #if defined __GTHREADS
  793. __scoped_lock __bit_lock(_S_mut);
  794. #endif
  795. _Alloc_block* __real_p = reinterpret_cast<_Alloc_block*>(__p);
  796. typedef typename _BPVector::iterator _Iterator;
  797. typedef typename _BPVector::difference_type _Difference_type;
  798. _Difference_type __diff;
  799. long __displacement;
  800. _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0);
  801. __detail::_Inclusive_between<_Alloc_block*> __ibt(__real_p);
  802. if (__ibt(_S_mem_blocks[_S_last_dealloc_index]))
  803. {
  804. _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index
  805. <= _S_mem_blocks.size() - 1);
  806. // Initial Assumption was correct!
  807. __diff = _S_last_dealloc_index;
  808. __displacement = __real_p - _S_mem_blocks[__diff].first;
  809. }
  810. else
  811. {
  812. _Iterator _iter = _S_find(__ibt);
  813. _GLIBCXX_DEBUG_ASSERT(_iter != _S_mem_blocks.end());
  814. __diff = _iter - _S_mem_blocks.begin();
  815. __displacement = __real_p - _S_mem_blocks[__diff].first;
  816. _S_last_dealloc_index = __diff;
  817. }
  818. // Get the position of the iterator that has been found.
  819. const size_t __rotate = (__displacement
  820. % size_t(__detail::bits_per_block));
  821. size_t* __bitmapC =
  822. reinterpret_cast<size_t*>
  823. (_S_mem_blocks[__diff].first) - 1;
  824. __bitmapC -= (__displacement / size_t(__detail::bits_per_block));
  825. __detail::__bit_free(__bitmapC, __rotate);
  826. size_t* __puse_count = reinterpret_cast<size_t*>
  827. (_S_mem_blocks[__diff].first)
  828. - (__detail::__num_bitmaps(_S_mem_blocks[__diff]) + 1);
  829. _GLIBCXX_DEBUG_ASSERT(*__puse_count != 0);
  830. --(*__puse_count);
  831. if (__builtin_expect(*__puse_count == 0, false))
  832. {
  833. _S_block_size /= 2;
  834. // We can safely remove this block.
  835. // _Block_pair __bp = _S_mem_blocks[__diff];
  836. this->_M_insert(__puse_count);
  837. _S_mem_blocks.erase(_S_mem_blocks.begin() + __diff);
  838. // Reset the _S_last_request variable to reflect the
  839. // erased block. We do this to protect future requests
  840. // after the last block has been removed from a particular
  841. // memory Chunk, which in turn has been returned to the
  842. // free list, and hence had been erased from the vector,
  843. // so the size of the vector gets reduced by 1.
  844. if ((_Difference_type)_S_last_request._M_where() >= __diff--)
  845. _S_last_request._M_reset(__diff);
  846. // If the Index into the vector of the region of memory
  847. // that might hold the next address that will be passed to
  848. // deallocated may have been invalidated due to the above
  849. // erase procedure being called on the vector, hence we
  850. // try to restore this invariant too.
  851. if (_S_last_dealloc_index >= _S_mem_blocks.size())
  852. {
  853. _S_last_dealloc_index =(__diff != -1 ? __diff : 0);
  854. _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0);
  855. }
  856. }
  857. }
  858. public:
  859. bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
  860. { }
  861. bitmap_allocator(const bitmap_allocator&) _GLIBCXX_USE_NOEXCEPT
  862. { }
  863. template<typename _Tp1>
  864. bitmap_allocator(const bitmap_allocator<_Tp1>&) _GLIBCXX_USE_NOEXCEPT
  865. { }
  866. ~bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
  867. { }
  868. _GLIBCXX_NODISCARD pointer
  869. allocate(size_type __n)
  870. {
  871. if (__n > this->max_size())
  872. std::__throw_bad_alloc();
  873. #if __cpp_aligned_new
  874. if (alignof(value_type) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
  875. {
  876. const size_type __b = __n * sizeof(value_type);
  877. std::align_val_t __al = std::align_val_t(alignof(value_type));
  878. return static_cast<pointer>(::operator new(__b, __al));
  879. }
  880. #endif
  881. if (__builtin_expect(__n == 1, true))
  882. return this->_M_allocate_single_object();
  883. else
  884. {
  885. const size_type __b = __n * sizeof(value_type);
  886. return reinterpret_cast<pointer>(::operator new(__b));
  887. }
  888. }
  889. _GLIBCXX_NODISCARD pointer
  890. allocate(size_type __n, typename bitmap_allocator<void>::const_pointer)
  891. { return allocate(__n); }
  892. void
  893. deallocate(pointer __p, size_type __n) throw()
  894. {
  895. if (__builtin_expect(__p != 0, true))
  896. {
  897. #if __cpp_aligned_new
  898. // Types with extended alignment are handled by operator delete.
  899. if (alignof(value_type) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
  900. {
  901. ::operator delete(__p, std::align_val_t(alignof(value_type)));
  902. return;
  903. }
  904. #endif
  905. if (__builtin_expect(__n == 1, true))
  906. this->_M_deallocate_single_object(__p);
  907. else
  908. ::operator delete(__p);
  909. }
  910. }
  911. pointer
  912. address(reference __r) const _GLIBCXX_NOEXCEPT
  913. { return std::__addressof(__r); }
  914. const_pointer
  915. address(const_reference __r) const _GLIBCXX_NOEXCEPT
  916. { return std::__addressof(__r); }
  917. size_type
  918. max_size() const _GLIBCXX_USE_NOEXCEPT
  919. { return size_type(-1) / sizeof(value_type); }
  920. #if __cplusplus >= 201103L
  921. template<typename _Up, typename... _Args>
  922. void
  923. construct(_Up* __p, _Args&&... __args)
  924. { ::new((void *)__p) _Up(std::forward<_Args>(__args)...); }
  925. template<typename _Up>
  926. void
  927. destroy(_Up* __p)
  928. { __p->~_Up(); }
  929. #else
  930. void
  931. construct(pointer __p, const_reference __data)
  932. { ::new((void *)__p) value_type(__data); }
  933. void
  934. destroy(pointer __p)
  935. { __p->~value_type(); }
  936. #endif
  937. };
  938. template<typename _Tp1, typename _Tp2>
  939. bool
  940. operator==(const bitmap_allocator<_Tp1>&,
  941. const bitmap_allocator<_Tp2>&) throw()
  942. { return true; }
  943. #if __cpp_impl_three_way_comparison < 201907L
  944. template<typename _Tp1, typename _Tp2>
  945. bool
  946. operator!=(const bitmap_allocator<_Tp1>&,
  947. const bitmap_allocator<_Tp2>&) throw()
  948. { return false; }
  949. #endif
  950. // Static member definitions.
  951. template<typename _Tp>
  952. typename bitmap_allocator<_Tp>::_BPVector
  953. bitmap_allocator<_Tp>::_S_mem_blocks;
  954. template<typename _Tp>
  955. std::size_t bitmap_allocator<_Tp>::_S_block_size
  956. = 2 * std::size_t(__detail::bits_per_block);
  957. template<typename _Tp>
  958. typename bitmap_allocator<_Tp>::_BPVector::size_type
  959. bitmap_allocator<_Tp>::_S_last_dealloc_index = 0;
  960. template<typename _Tp>
  961. __detail::_Bitmap_counter
  962. <typename bitmap_allocator<_Tp>::_Alloc_block*>
  963. bitmap_allocator<_Tp>::_S_last_request(_S_mem_blocks);
  964. #if defined __GTHREADS
  965. template<typename _Tp>
  966. typename bitmap_allocator<_Tp>::__mutex_type
  967. bitmap_allocator<_Tp>::_S_mut;
  968. #endif
  969. _GLIBCXX_END_NAMESPACE_VERSION
  970. } // namespace __gnu_cxx
  971. #endif