bitmap_allocator.h 31 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133
  1. // Bitmap Allocator. -*- C++ -*-
  2. // Copyright (C) 2004-2018 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file ext/bitmap_allocator.h
  21. * This file is a GNU extension to the Standard C++ Library.
  22. */
  23. #ifndef _BITMAP_ALLOCATOR_H
  24. #define _BITMAP_ALLOCATOR_H 1
  25. #include <utility> // For std::pair.
  26. #include <bits/functexcept.h> // For __throw_bad_alloc().
  27. #include <functional> // For greater_equal, and less_equal.
  28. #include <new> // For operator new.
  29. #include <debug/debug.h> // _GLIBCXX_DEBUG_ASSERT
  30. #include <ext/concurrence.h>
  31. #include <bits/move.h>
  32. /** @brief The constant in the expression below is the alignment
  33. * required in bytes.
  34. */
  35. #define _BALLOC_ALIGN_BYTES 8
  36. namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
  37. {
  38. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  39. using std::size_t;
  40. using std::ptrdiff_t;
  41. namespace __detail
  42. {
  43. /** @class __mini_vector bitmap_allocator.h bitmap_allocator.h
  44. *
  45. * @brief __mini_vector<> is a stripped down version of the
  46. * full-fledged std::vector<>.
  47. *
  48. * It is to be used only for built-in types or PODs. Notable
  49. * differences are:
  50. *
  51. * 1. Not all accessor functions are present.
  52. * 2. Used ONLY for PODs.
  53. * 3. No Allocator template argument. Uses ::operator new() to get
  54. * memory, and ::operator delete() to free it.
  55. * Caveat: The dtor does NOT free the memory allocated, so this a
  56. * memory-leaking vector!
  57. */
  58. template<typename _Tp>
  59. class __mini_vector
  60. {
  61. __mini_vector(const __mini_vector&);
  62. __mini_vector& operator=(const __mini_vector&);
  63. public:
  64. typedef _Tp value_type;
  65. typedef _Tp* pointer;
  66. typedef _Tp& reference;
  67. typedef const _Tp& const_reference;
  68. typedef size_t size_type;
  69. typedef ptrdiff_t difference_type;
  70. typedef pointer iterator;
  71. private:
  72. pointer _M_start;
  73. pointer _M_finish;
  74. pointer _M_end_of_storage;
  75. size_type
  76. _M_space_left() const throw()
  77. { return _M_end_of_storage - _M_finish; }
  78. pointer
  79. allocate(size_type __n)
  80. { return static_cast<pointer>(::operator new(__n * sizeof(_Tp))); }
  81. void
  82. deallocate(pointer __p, size_type)
  83. { ::operator delete(__p); }
  84. public:
  85. // Members used: size(), push_back(), pop_back(),
  86. // insert(iterator, const_reference), erase(iterator),
  87. // begin(), end(), back(), operator[].
  88. __mini_vector()
  89. : _M_start(0), _M_finish(0), _M_end_of_storage(0) { }
  90. size_type
  91. size() const throw()
  92. { return _M_finish - _M_start; }
  93. iterator
  94. begin() const throw()
  95. { return this->_M_start; }
  96. iterator
  97. end() const throw()
  98. { return this->_M_finish; }
  99. reference
  100. back() const throw()
  101. { return *(this->end() - 1); }
  102. reference
  103. operator[](const size_type __pos) const throw()
  104. { return this->_M_start[__pos]; }
  105. void
  106. insert(iterator __pos, const_reference __x);
  107. void
  108. push_back(const_reference __x)
  109. {
  110. if (this->_M_space_left())
  111. {
  112. *this->end() = __x;
  113. ++this->_M_finish;
  114. }
  115. else
  116. this->insert(this->end(), __x);
  117. }
  118. void
  119. pop_back() throw()
  120. { --this->_M_finish; }
  121. void
  122. erase(iterator __pos) throw();
  123. void
  124. clear() throw()
  125. { this->_M_finish = this->_M_start; }
  126. };
  127. // Out of line function definitions.
  128. template<typename _Tp>
  129. void __mini_vector<_Tp>::
  130. insert(iterator __pos, const_reference __x)
  131. {
  132. if (this->_M_space_left())
  133. {
  134. size_type __to_move = this->_M_finish - __pos;
  135. iterator __dest = this->end();
  136. iterator __src = this->end() - 1;
  137. ++this->_M_finish;
  138. while (__to_move)
  139. {
  140. *__dest = *__src;
  141. --__dest; --__src; --__to_move;
  142. }
  143. *__pos = __x;
  144. }
  145. else
  146. {
  147. size_type __new_size = this->size() ? this->size() * 2 : 1;
  148. iterator __new_start = this->allocate(__new_size);
  149. iterator __first = this->begin();
  150. iterator __start = __new_start;
  151. while (__first != __pos)
  152. {
  153. *__start = *__first;
  154. ++__start; ++__first;
  155. }
  156. *__start = __x;
  157. ++__start;
  158. while (__first != this->end())
  159. {
  160. *__start = *__first;
  161. ++__start; ++__first;
  162. }
  163. if (this->_M_start)
  164. this->deallocate(this->_M_start, this->size());
  165. this->_M_start = __new_start;
  166. this->_M_finish = __start;
  167. this->_M_end_of_storage = this->_M_start + __new_size;
  168. }
  169. }
  170. template<typename _Tp>
  171. void __mini_vector<_Tp>::
  172. erase(iterator __pos) throw()
  173. {
  174. while (__pos + 1 != this->end())
  175. {
  176. *__pos = __pos[1];
  177. ++__pos;
  178. }
  179. --this->_M_finish;
  180. }
  181. template<typename _Tp>
  182. struct __mv_iter_traits
  183. {
  184. typedef typename _Tp::value_type value_type;
  185. typedef typename _Tp::difference_type difference_type;
  186. };
  187. template<typename _Tp>
  188. struct __mv_iter_traits<_Tp*>
  189. {
  190. typedef _Tp value_type;
  191. typedef ptrdiff_t difference_type;
  192. };
  193. enum
  194. {
  195. bits_per_byte = 8,
  196. bits_per_block = sizeof(size_t) * size_t(bits_per_byte)
  197. };
  198. template<typename _ForwardIterator, typename _Tp, typename _Compare>
  199. _ForwardIterator
  200. __lower_bound(_ForwardIterator __first, _ForwardIterator __last,
  201. const _Tp& __val, _Compare __comp)
  202. {
  203. typedef typename __mv_iter_traits<_ForwardIterator>::difference_type
  204. _DistanceType;
  205. _DistanceType __len = __last - __first;
  206. _DistanceType __half;
  207. _ForwardIterator __middle;
  208. while (__len > 0)
  209. {
  210. __half = __len >> 1;
  211. __middle = __first;
  212. __middle += __half;
  213. if (__comp(*__middle, __val))
  214. {
  215. __first = __middle;
  216. ++__first;
  217. __len = __len - __half - 1;
  218. }
  219. else
  220. __len = __half;
  221. }
  222. return __first;
  223. }
  224. /** @brief The number of Blocks pointed to by the address pair
  225. * passed to the function.
  226. */
  227. template<typename _AddrPair>
  228. inline size_t
  229. __num_blocks(_AddrPair __ap)
  230. { return (__ap.second - __ap.first) + 1; }
  231. /** @brief The number of Bit-maps pointed to by the address pair
  232. * passed to the function.
  233. */
  234. template<typename _AddrPair>
  235. inline size_t
  236. __num_bitmaps(_AddrPair __ap)
  237. { return __num_blocks(__ap) / size_t(bits_per_block); }
  238. // _Tp should be a pointer type.
  239. template<typename _Tp>
  240. class _Inclusive_between
  241. : public std::unary_function<typename std::pair<_Tp, _Tp>, bool>
  242. {
  243. typedef _Tp pointer;
  244. pointer _M_ptr_value;
  245. typedef typename std::pair<_Tp, _Tp> _Block_pair;
  246. public:
  247. _Inclusive_between(pointer __ptr) : _M_ptr_value(__ptr)
  248. { }
  249. bool
  250. operator()(_Block_pair __bp) const throw()
  251. {
  252. if (std::less_equal<pointer>()(_M_ptr_value, __bp.second)
  253. && std::greater_equal<pointer>()(_M_ptr_value, __bp.first))
  254. return true;
  255. else
  256. return false;
  257. }
  258. };
  259. // Used to pass a Functor to functions by reference.
  260. template<typename _Functor>
  261. class _Functor_Ref
  262. : public std::unary_function<typename _Functor::argument_type,
  263. typename _Functor::result_type>
  264. {
  265. _Functor& _M_fref;
  266. public:
  267. typedef typename _Functor::argument_type argument_type;
  268. typedef typename _Functor::result_type result_type;
  269. _Functor_Ref(_Functor& __fref) : _M_fref(__fref)
  270. { }
  271. result_type
  272. operator()(argument_type __arg)
  273. { return _M_fref(__arg); }
  274. };
  275. /** @class _Ffit_finder bitmap_allocator.h bitmap_allocator.h
  276. *
  277. * @brief The class which acts as a predicate for applying the
  278. * first-fit memory allocation policy for the bitmap allocator.
  279. */
  280. // _Tp should be a pointer type, and _Alloc is the Allocator for
  281. // the vector.
  282. template<typename _Tp>
  283. class _Ffit_finder
  284. : public std::unary_function<typename std::pair<_Tp, _Tp>, bool>
  285. {
  286. typedef typename std::pair<_Tp, _Tp> _Block_pair;
  287. typedef typename __detail::__mini_vector<_Block_pair> _BPVector;
  288. typedef typename _BPVector::difference_type _Counter_type;
  289. size_t* _M_pbitmap;
  290. _Counter_type _M_data_offset;
  291. public:
  292. _Ffit_finder() : _M_pbitmap(0), _M_data_offset(0)
  293. { }
  294. bool
  295. operator()(_Block_pair __bp) throw()
  296. {
  297. // Set the _rover to the last physical location bitmap,
  298. // which is the bitmap which belongs to the first free
  299. // block. Thus, the bitmaps are in exact reverse order of
  300. // the actual memory layout. So, we count down the bitmaps,
  301. // which is the same as moving up the memory.
  302. // If the used count stored at the start of the Bit Map headers
  303. // is equal to the number of Objects that the current Block can
  304. // store, then there is definitely no space for another single
  305. // object, so just return false.
  306. _Counter_type __diff = __detail::__num_bitmaps(__bp);
  307. if (*(reinterpret_cast<size_t*>
  308. (__bp.first) - (__diff + 1)) == __detail::__num_blocks(__bp))
  309. return false;
  310. size_t* __rover = reinterpret_cast<size_t*>(__bp.first) - 1;
  311. for (_Counter_type __i = 0; __i < __diff; ++__i)
  312. {
  313. _M_data_offset = __i;
  314. if (*__rover)
  315. {
  316. _M_pbitmap = __rover;
  317. return true;
  318. }
  319. --__rover;
  320. }
  321. return false;
  322. }
  323. size_t*
  324. _M_get() const throw()
  325. { return _M_pbitmap; }
  326. _Counter_type
  327. _M_offset() const throw()
  328. { return _M_data_offset * size_t(bits_per_block); }
  329. };
  330. /** @class _Bitmap_counter bitmap_allocator.h bitmap_allocator.h
  331. *
  332. * @brief The bitmap counter which acts as the bitmap
  333. * manipulator, and manages the bit-manipulation functions and
  334. * the searching and identification functions on the bit-map.
  335. */
  336. // _Tp should be a pointer type.
  337. template<typename _Tp>
  338. class _Bitmap_counter
  339. {
  340. typedef typename
  341. __detail::__mini_vector<typename std::pair<_Tp, _Tp> > _BPVector;
  342. typedef typename _BPVector::size_type _Index_type;
  343. typedef _Tp pointer;
  344. _BPVector& _M_vbp;
  345. size_t* _M_curr_bmap;
  346. size_t* _M_last_bmap_in_block;
  347. _Index_type _M_curr_index;
  348. public:
  349. // Use the 2nd parameter with care. Make sure that such an
  350. // entry exists in the vector before passing that particular
  351. // index to this ctor.
  352. _Bitmap_counter(_BPVector& Rvbp, long __index = -1) : _M_vbp(Rvbp)
  353. { this->_M_reset(__index); }
  354. void
  355. _M_reset(long __index = -1) throw()
  356. {
  357. if (__index == -1)
  358. {
  359. _M_curr_bmap = 0;
  360. _M_curr_index = static_cast<_Index_type>(-1);
  361. return;
  362. }
  363. _M_curr_index = __index;
  364. _M_curr_bmap = reinterpret_cast<size_t*>
  365. (_M_vbp[_M_curr_index].first) - 1;
  366. _GLIBCXX_DEBUG_ASSERT(__index <= (long)_M_vbp.size() - 1);
  367. _M_last_bmap_in_block = _M_curr_bmap
  368. - ((_M_vbp[_M_curr_index].second
  369. - _M_vbp[_M_curr_index].first + 1)
  370. / size_t(bits_per_block) - 1);
  371. }
  372. // Dangerous Function! Use with extreme care. Pass to this
  373. // function ONLY those values that are known to be correct,
  374. // otherwise this will mess up big time.
  375. void
  376. _M_set_internal_bitmap(size_t* __new_internal_marker) throw()
  377. { _M_curr_bmap = __new_internal_marker; }
  378. bool
  379. _M_finished() const throw()
  380. { return(_M_curr_bmap == 0); }
  381. _Bitmap_counter&
  382. operator++() throw()
  383. {
  384. if (_M_curr_bmap == _M_last_bmap_in_block)
  385. {
  386. if (++_M_curr_index == _M_vbp.size())
  387. _M_curr_bmap = 0;
  388. else
  389. this->_M_reset(_M_curr_index);
  390. }
  391. else
  392. --_M_curr_bmap;
  393. return *this;
  394. }
  395. size_t*
  396. _M_get() const throw()
  397. { return _M_curr_bmap; }
  398. pointer
  399. _M_base() const throw()
  400. { return _M_vbp[_M_curr_index].first; }
  401. _Index_type
  402. _M_offset() const throw()
  403. {
  404. return size_t(bits_per_block)
  405. * ((reinterpret_cast<size_t*>(this->_M_base())
  406. - _M_curr_bmap) - 1);
  407. }
  408. _Index_type
  409. _M_where() const throw()
  410. { return _M_curr_index; }
  411. };
  412. /** @brief Mark a memory address as allocated by re-setting the
  413. * corresponding bit in the bit-map.
  414. */
  415. inline void
  416. __bit_allocate(size_t* __pbmap, size_t __pos) throw()
  417. {
  418. size_t __mask = 1 << __pos;
  419. __mask = ~__mask;
  420. *__pbmap &= __mask;
  421. }
  422. /** @brief Mark a memory address as free by setting the
  423. * corresponding bit in the bit-map.
  424. */
  425. inline void
  426. __bit_free(size_t* __pbmap, size_t __pos) throw()
  427. {
  428. size_t __mask = 1 << __pos;
  429. *__pbmap |= __mask;
  430. }
  431. } // namespace __detail
  432. /** @brief Generic Version of the bsf instruction.
  433. */
  434. inline size_t
  435. _Bit_scan_forward(size_t __num)
  436. { return static_cast<size_t>(__builtin_ctzl(__num)); }
  437. /** @class free_list bitmap_allocator.h bitmap_allocator.h
  438. *
  439. * @brief The free list class for managing chunks of memory to be
  440. * given to and returned by the bitmap_allocator.
  441. */
  442. class free_list
  443. {
  444. public:
  445. typedef size_t* value_type;
  446. typedef __detail::__mini_vector<value_type> vector_type;
  447. typedef vector_type::iterator iterator;
  448. typedef __mutex __mutex_type;
  449. private:
  450. struct _LT_pointer_compare
  451. {
  452. bool
  453. operator()(const size_t* __pui,
  454. const size_t __cui) const throw()
  455. { return *__pui < __cui; }
  456. };
  457. #if defined __GTHREADS
  458. __mutex_type&
  459. _M_get_mutex()
  460. {
  461. static __mutex_type _S_mutex;
  462. return _S_mutex;
  463. }
  464. #endif
  465. vector_type&
  466. _M_get_free_list()
  467. {
  468. static vector_type _S_free_list;
  469. return _S_free_list;
  470. }
  471. /** @brief Performs validation of memory based on their size.
  472. *
  473. * @param __addr The pointer to the memory block to be
  474. * validated.
  475. *
  476. * Validates the memory block passed to this function and
  477. * appropriately performs the action of managing the free list of
  478. * blocks by adding this block to the free list or deleting this
  479. * or larger blocks from the free list.
  480. */
  481. void
  482. _M_validate(size_t* __addr) throw()
  483. {
  484. vector_type& __free_list = _M_get_free_list();
  485. const vector_type::size_type __max_size = 64;
  486. if (__free_list.size() >= __max_size)
  487. {
  488. // Ok, the threshold value has been reached. We determine
  489. // which block to remove from the list of free blocks.
  490. if (*__addr >= *__free_list.back())
  491. {
  492. // Ok, the new block is greater than or equal to the
  493. // last block in the list of free blocks. We just free
  494. // the new block.
  495. ::operator delete(static_cast<void*>(__addr));
  496. return;
  497. }
  498. else
  499. {
  500. // Deallocate the last block in the list of free lists,
  501. // and insert the new one in its correct position.
  502. ::operator delete(static_cast<void*>(__free_list.back()));
  503. __free_list.pop_back();
  504. }
  505. }
  506. // Just add the block to the list of free lists unconditionally.
  507. iterator __temp = __detail::__lower_bound
  508. (__free_list.begin(), __free_list.end(),
  509. *__addr, _LT_pointer_compare());
  510. // We may insert the new free list before _temp;
  511. __free_list.insert(__temp, __addr);
  512. }
  513. /** @brief Decides whether the wastage of memory is acceptable for
  514. * the current memory request and returns accordingly.
  515. *
  516. * @param __block_size The size of the block available in the free
  517. * list.
  518. *
  519. * @param __required_size The required size of the memory block.
  520. *
  521. * @return true if the wastage incurred is acceptable, else returns
  522. * false.
  523. */
  524. bool
  525. _M_should_i_give(size_t __block_size,
  526. size_t __required_size) throw()
  527. {
  528. const size_t __max_wastage_percentage = 36;
  529. if (__block_size >= __required_size &&
  530. (((__block_size - __required_size) * 100 / __block_size)
  531. < __max_wastage_percentage))
  532. return true;
  533. else
  534. return false;
  535. }
  536. public:
  537. /** @brief This function returns the block of memory to the
  538. * internal free list.
  539. *
  540. * @param __addr The pointer to the memory block that was given
  541. * by a call to the _M_get function.
  542. */
  543. inline void
  544. _M_insert(size_t* __addr) throw()
  545. {
  546. #if defined __GTHREADS
  547. __scoped_lock __bfl_lock(_M_get_mutex());
  548. #endif
  549. // Call _M_validate to decide what should be done with
  550. // this particular free list.
  551. this->_M_validate(reinterpret_cast<size_t*>(__addr) - 1);
  552. // See discussion as to why this is 1!
  553. }
  554. /** @brief This function gets a block of memory of the specified
  555. * size from the free list.
  556. *
  557. * @param __sz The size in bytes of the memory required.
  558. *
  559. * @return A pointer to the new memory block of size at least
  560. * equal to that requested.
  561. */
  562. size_t*
  563. _M_get(size_t __sz) _GLIBCXX_THROW(std::bad_alloc);
  564. /** @brief This function just clears the internal Free List, and
  565. * gives back all the memory to the OS.
  566. */
  567. void
  568. _M_clear();
  569. };
  570. // Forward declare the class.
  571. template<typename _Tp>
  572. class bitmap_allocator;
  573. // Specialize for void:
  574. template<>
  575. class bitmap_allocator<void>
  576. {
  577. public:
  578. typedef void* pointer;
  579. typedef const void* const_pointer;
  580. // Reference-to-void members are impossible.
  581. typedef void value_type;
  582. template<typename _Tp1>
  583. struct rebind
  584. {
  585. typedef bitmap_allocator<_Tp1> other;
  586. };
  587. };
  588. /**
  589. * @brief Bitmap Allocator, primary template.
  590. * @ingroup allocators
  591. */
  592. template<typename _Tp>
  593. class bitmap_allocator : private free_list
  594. {
  595. public:
  596. typedef size_t size_type;
  597. typedef ptrdiff_t difference_type;
  598. typedef _Tp* pointer;
  599. typedef const _Tp* const_pointer;
  600. typedef _Tp& reference;
  601. typedef const _Tp& const_reference;
  602. typedef _Tp value_type;
  603. typedef free_list::__mutex_type __mutex_type;
  604. template<typename _Tp1>
  605. struct rebind
  606. {
  607. typedef bitmap_allocator<_Tp1> other;
  608. };
  609. #if __cplusplus >= 201103L
  610. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  611. // 2103. propagate_on_container_move_assignment
  612. typedef std::true_type propagate_on_container_move_assignment;
  613. #endif
  614. private:
  615. template<size_t _BSize, size_t _AlignSize>
  616. struct aligned_size
  617. {
  618. enum
  619. {
  620. modulus = _BSize % _AlignSize,
  621. value = _BSize + (modulus ? _AlignSize - (modulus) : 0)
  622. };
  623. };
  624. struct _Alloc_block
  625. {
  626. char __M_unused[aligned_size<sizeof(value_type),
  627. _BALLOC_ALIGN_BYTES>::value];
  628. };
  629. typedef typename std::pair<_Alloc_block*, _Alloc_block*> _Block_pair;
  630. typedef typename __detail::__mini_vector<_Block_pair> _BPVector;
  631. typedef typename _BPVector::iterator _BPiter;
  632. template<typename _Predicate>
  633. static _BPiter
  634. _S_find(_Predicate __p)
  635. {
  636. _BPiter __first = _S_mem_blocks.begin();
  637. while (__first != _S_mem_blocks.end() && !__p(*__first))
  638. ++__first;
  639. return __first;
  640. }
  641. #if defined _GLIBCXX_DEBUG
  642. // Complexity: O(lg(N)). Where, N is the number of block of size
  643. // sizeof(value_type).
  644. void
  645. _S_check_for_free_blocks() throw()
  646. {
  647. typedef typename __detail::_Ffit_finder<_Alloc_block*> _FFF;
  648. _BPiter __bpi = _S_find(_FFF());
  649. _GLIBCXX_DEBUG_ASSERT(__bpi == _S_mem_blocks.end());
  650. }
  651. #endif
  652. /** @brief Responsible for exponentially growing the internal
  653. * memory pool.
  654. *
  655. * @throw std::bad_alloc. If memory can not be allocated.
  656. *
  657. * Complexity: O(1), but internally depends upon the
  658. * complexity of the function free_list::_M_get. The part where
  659. * the bitmap headers are written has complexity: O(X),where X
  660. * is the number of blocks of size sizeof(value_type) within
  661. * the newly acquired block. Having a tight bound.
  662. */
  663. void
  664. _S_refill_pool() _GLIBCXX_THROW(std::bad_alloc)
  665. {
  666. #if defined _GLIBCXX_DEBUG
  667. _S_check_for_free_blocks();
  668. #endif
  669. const size_t __num_bitmaps = (_S_block_size
  670. / size_t(__detail::bits_per_block));
  671. const size_t __size_to_allocate = sizeof(size_t)
  672. + _S_block_size * sizeof(_Alloc_block)
  673. + __num_bitmaps * sizeof(size_t);
  674. size_t* __temp =
  675. reinterpret_cast<size_t*>(this->_M_get(__size_to_allocate));
  676. *__temp = 0;
  677. ++__temp;
  678. // The Header information goes at the Beginning of the Block.
  679. _Block_pair __bp =
  680. std::make_pair(reinterpret_cast<_Alloc_block*>
  681. (__temp + __num_bitmaps),
  682. reinterpret_cast<_Alloc_block*>
  683. (__temp + __num_bitmaps)
  684. + _S_block_size - 1);
  685. // Fill the Vector with this information.
  686. _S_mem_blocks.push_back(__bp);
  687. for (size_t __i = 0; __i < __num_bitmaps; ++__i)
  688. __temp[__i] = ~static_cast<size_t>(0); // 1 Indicates all Free.
  689. _S_block_size *= 2;
  690. }
  691. static _BPVector _S_mem_blocks;
  692. static size_t _S_block_size;
  693. static __detail::_Bitmap_counter<_Alloc_block*> _S_last_request;
  694. static typename _BPVector::size_type _S_last_dealloc_index;
  695. #if defined __GTHREADS
  696. static __mutex_type _S_mut;
  697. #endif
  698. public:
  699. /** @brief Allocates memory for a single object of size
  700. * sizeof(_Tp).
  701. *
  702. * @throw std::bad_alloc. If memory can not be allocated.
  703. *
  704. * Complexity: Worst case complexity is O(N), but that
  705. * is hardly ever hit. If and when this particular case is
  706. * encountered, the next few cases are guaranteed to have a
  707. * worst case complexity of O(1)! That's why this function
  708. * performs very well on average. You can consider this
  709. * function to have a complexity referred to commonly as:
  710. * Amortized Constant time.
  711. */
  712. pointer
  713. _M_allocate_single_object() _GLIBCXX_THROW(std::bad_alloc)
  714. {
  715. #if defined __GTHREADS
  716. __scoped_lock __bit_lock(_S_mut);
  717. #endif
  718. // The algorithm is something like this: The last_request
  719. // variable points to the last accessed Bit Map. When such a
  720. // condition occurs, we try to find a free block in the
  721. // current bitmap, or succeeding bitmaps until the last bitmap
  722. // is reached. If no free block turns up, we resort to First
  723. // Fit method.
  724. // WARNING: Do not re-order the condition in the while
  725. // statement below, because it relies on C++'s short-circuit
  726. // evaluation. The return from _S_last_request->_M_get() will
  727. // NOT be dereference able if _S_last_request->_M_finished()
  728. // returns true. This would inevitably lead to a NULL pointer
  729. // dereference if tinkered with.
  730. while (_S_last_request._M_finished() == false
  731. && (*(_S_last_request._M_get()) == 0))
  732. _S_last_request.operator++();
  733. if (__builtin_expect(_S_last_request._M_finished() == true, false))
  734. {
  735. // Fall Back to First Fit algorithm.
  736. typedef typename __detail::_Ffit_finder<_Alloc_block*> _FFF;
  737. _FFF __fff;
  738. _BPiter __bpi = _S_find(__detail::_Functor_Ref<_FFF>(__fff));
  739. if (__bpi != _S_mem_blocks.end())
  740. {
  741. // Search was successful. Ok, now mark the first bit from
  742. // the right as 0, meaning Allocated. This bit is obtained
  743. // by calling _M_get() on __fff.
  744. size_t __nz_bit = _Bit_scan_forward(*__fff._M_get());
  745. __detail::__bit_allocate(__fff._M_get(), __nz_bit);
  746. _S_last_request._M_reset(__bpi - _S_mem_blocks.begin());
  747. // Now, get the address of the bit we marked as allocated.
  748. pointer __ret = reinterpret_cast<pointer>
  749. (__bpi->first + __fff._M_offset() + __nz_bit);
  750. size_t* __puse_count =
  751. reinterpret_cast<size_t*>
  752. (__bpi->first) - (__detail::__num_bitmaps(*__bpi) + 1);
  753. ++(*__puse_count);
  754. return __ret;
  755. }
  756. else
  757. {
  758. // Search was unsuccessful. We Add more memory to the
  759. // pool by calling _S_refill_pool().
  760. _S_refill_pool();
  761. // _M_Reset the _S_last_request structure to the first
  762. // free block's bit map.
  763. _S_last_request._M_reset(_S_mem_blocks.size() - 1);
  764. // Now, mark that bit as allocated.
  765. }
  766. }
  767. // _S_last_request holds a pointer to a valid bit map, that
  768. // points to a free block in memory.
  769. size_t __nz_bit = _Bit_scan_forward(*_S_last_request._M_get());
  770. __detail::__bit_allocate(_S_last_request._M_get(), __nz_bit);
  771. pointer __ret = reinterpret_cast<pointer>
  772. (_S_last_request._M_base() + _S_last_request._M_offset() + __nz_bit);
  773. size_t* __puse_count = reinterpret_cast<size_t*>
  774. (_S_mem_blocks[_S_last_request._M_where()].first)
  775. - (__detail::
  776. __num_bitmaps(_S_mem_blocks[_S_last_request._M_where()]) + 1);
  777. ++(*__puse_count);
  778. return __ret;
  779. }
  780. /** @brief Deallocates memory that belongs to a single object of
  781. * size sizeof(_Tp).
  782. *
  783. * Complexity: O(lg(N)), but the worst case is not hit
  784. * often! This is because containers usually deallocate memory
  785. * close to each other and this case is handled in O(1) time by
  786. * the deallocate function.
  787. */
  788. void
  789. _M_deallocate_single_object(pointer __p) throw()
  790. {
  791. #if defined __GTHREADS
  792. __scoped_lock __bit_lock(_S_mut);
  793. #endif
  794. _Alloc_block* __real_p = reinterpret_cast<_Alloc_block*>(__p);
  795. typedef typename _BPVector::iterator _Iterator;
  796. typedef typename _BPVector::difference_type _Difference_type;
  797. _Difference_type __diff;
  798. long __displacement;
  799. _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0);
  800. __detail::_Inclusive_between<_Alloc_block*> __ibt(__real_p);
  801. if (__ibt(_S_mem_blocks[_S_last_dealloc_index]))
  802. {
  803. _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index
  804. <= _S_mem_blocks.size() - 1);
  805. // Initial Assumption was correct!
  806. __diff = _S_last_dealloc_index;
  807. __displacement = __real_p - _S_mem_blocks[__diff].first;
  808. }
  809. else
  810. {
  811. _Iterator _iter = _S_find(__ibt);
  812. _GLIBCXX_DEBUG_ASSERT(_iter != _S_mem_blocks.end());
  813. __diff = _iter - _S_mem_blocks.begin();
  814. __displacement = __real_p - _S_mem_blocks[__diff].first;
  815. _S_last_dealloc_index = __diff;
  816. }
  817. // Get the position of the iterator that has been found.
  818. const size_t __rotate = (__displacement
  819. % size_t(__detail::bits_per_block));
  820. size_t* __bitmapC =
  821. reinterpret_cast<size_t*>
  822. (_S_mem_blocks[__diff].first) - 1;
  823. __bitmapC -= (__displacement / size_t(__detail::bits_per_block));
  824. __detail::__bit_free(__bitmapC, __rotate);
  825. size_t* __puse_count = reinterpret_cast<size_t*>
  826. (_S_mem_blocks[__diff].first)
  827. - (__detail::__num_bitmaps(_S_mem_blocks[__diff]) + 1);
  828. _GLIBCXX_DEBUG_ASSERT(*__puse_count != 0);
  829. --(*__puse_count);
  830. if (__builtin_expect(*__puse_count == 0, false))
  831. {
  832. _S_block_size /= 2;
  833. // We can safely remove this block.
  834. // _Block_pair __bp = _S_mem_blocks[__diff];
  835. this->_M_insert(__puse_count);
  836. _S_mem_blocks.erase(_S_mem_blocks.begin() + __diff);
  837. // Reset the _S_last_request variable to reflect the
  838. // erased block. We do this to protect future requests
  839. // after the last block has been removed from a particular
  840. // memory Chunk, which in turn has been returned to the
  841. // free list, and hence had been erased from the vector,
  842. // so the size of the vector gets reduced by 1.
  843. if ((_Difference_type)_S_last_request._M_where() >= __diff--)
  844. _S_last_request._M_reset(__diff);
  845. // If the Index into the vector of the region of memory
  846. // that might hold the next address that will be passed to
  847. // deallocated may have been invalidated due to the above
  848. // erase procedure being called on the vector, hence we
  849. // try to restore this invariant too.
  850. if (_S_last_dealloc_index >= _S_mem_blocks.size())
  851. {
  852. _S_last_dealloc_index =(__diff != -1 ? __diff : 0);
  853. _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0);
  854. }
  855. }
  856. }
  857. public:
  858. bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
  859. { }
  860. bitmap_allocator(const bitmap_allocator&) _GLIBCXX_USE_NOEXCEPT
  861. { }
  862. template<typename _Tp1>
  863. bitmap_allocator(const bitmap_allocator<_Tp1>&) _GLIBCXX_USE_NOEXCEPT
  864. { }
  865. ~bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
  866. { }
  867. pointer
  868. allocate(size_type __n)
  869. {
  870. if (__n > this->max_size())
  871. std::__throw_bad_alloc();
  872. #if __cpp_aligned_new
  873. if (alignof(value_type) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
  874. {
  875. const size_type __b = __n * sizeof(value_type);
  876. std::align_val_t __al = std::align_val_t(alignof(value_type));
  877. return static_cast<pointer>(::operator new(__b, __al));
  878. }
  879. #endif
  880. if (__builtin_expect(__n == 1, true))
  881. return this->_M_allocate_single_object();
  882. else
  883. {
  884. const size_type __b = __n * sizeof(value_type);
  885. return reinterpret_cast<pointer>(::operator new(__b));
  886. }
  887. }
  888. pointer
  889. allocate(size_type __n, typename bitmap_allocator<void>::const_pointer)
  890. { return allocate(__n); }
  891. void
  892. deallocate(pointer __p, size_type __n) throw()
  893. {
  894. if (__builtin_expect(__p != 0, true))
  895. {
  896. #if __cpp_aligned_new
  897. // Types with extended alignment are handled by operator delete.
  898. if (alignof(value_type) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
  899. {
  900. ::operator delete(__p, std::align_val_t(alignof(value_type)));
  901. return;
  902. }
  903. #endif
  904. if (__builtin_expect(__n == 1, true))
  905. this->_M_deallocate_single_object(__p);
  906. else
  907. ::operator delete(__p);
  908. }
  909. }
  910. pointer
  911. address(reference __r) const _GLIBCXX_NOEXCEPT
  912. { return std::__addressof(__r); }
  913. const_pointer
  914. address(const_reference __r) const _GLIBCXX_NOEXCEPT
  915. { return std::__addressof(__r); }
  916. size_type
  917. max_size() const _GLIBCXX_USE_NOEXCEPT
  918. { return size_type(-1) / sizeof(value_type); }
  919. #if __cplusplus >= 201103L
  920. template<typename _Up, typename... _Args>
  921. void
  922. construct(_Up* __p, _Args&&... __args)
  923. { ::new((void *)__p) _Up(std::forward<_Args>(__args)...); }
  924. template<typename _Up>
  925. void
  926. destroy(_Up* __p)
  927. { __p->~_Up(); }
  928. #else
  929. void
  930. construct(pointer __p, const_reference __data)
  931. { ::new((void *)__p) value_type(__data); }
  932. void
  933. destroy(pointer __p)
  934. { __p->~value_type(); }
  935. #endif
  936. };
  937. template<typename _Tp1, typename _Tp2>
  938. bool
  939. operator==(const bitmap_allocator<_Tp1>&,
  940. const bitmap_allocator<_Tp2>&) throw()
  941. { return true; }
  942. template<typename _Tp1, typename _Tp2>
  943. bool
  944. operator!=(const bitmap_allocator<_Tp1>&,
  945. const bitmap_allocator<_Tp2>&) throw()
  946. { return false; }
  947. // Static member definitions.
  948. template<typename _Tp>
  949. typename bitmap_allocator<_Tp>::_BPVector
  950. bitmap_allocator<_Tp>::_S_mem_blocks;
  951. template<typename _Tp>
  952. size_t bitmap_allocator<_Tp>::_S_block_size =
  953. 2 * size_t(__detail::bits_per_block);
  954. template<typename _Tp>
  955. typename bitmap_allocator<_Tp>::_BPVector::size_type
  956. bitmap_allocator<_Tp>::_S_last_dealloc_index = 0;
  957. template<typename _Tp>
  958. __detail::_Bitmap_counter
  959. <typename bitmap_allocator<_Tp>::_Alloc_block*>
  960. bitmap_allocator<_Tp>::_S_last_request(_S_mem_blocks);
  961. #if defined __GTHREADS
  962. template<typename _Tp>
  963. typename bitmap_allocator<_Tp>::__mutex_type
  964. bitmap_allocator<_Tp>::_S_mut;
  965. #endif
  966. _GLIBCXX_END_NAMESPACE_VERSION
  967. } // namespace __gnu_cxx
  968. #endif