adaptive_node_pool_impl.hpp 53 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260
  1. //////////////////////////////////////////////////////////////////////////////
  2. //
  3. // (C) Copyright Ion Gaztanaga 2005-2013. Distributed under the Boost
  4. // Software License, Version 1.0. (See accompanying file
  5. // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
  6. //
  7. // See http://www.boost.org/libs/container for documentation.
  8. //
  9. //////////////////////////////////////////////////////////////////////////////
  10. #ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  11. #define BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  12. #ifndef BOOST_CONFIG_HPP
  13. # include <boost/config.hpp>
  14. #endif
  15. #if defined(BOOST_HAS_PRAGMA_ONCE)
  16. # pragma once
  17. #endif
  18. #include <boost/container/detail/config_begin.hpp>
  19. #include <boost/container/detail/workaround.hpp>
  20. // container
  21. #include <boost/container/container_fwd.hpp>
  22. #include <boost/container/throw_exception.hpp>
  23. // container/detail
  24. #include <boost/container/detail/pool_common.hpp>
  25. #include <boost/container/detail/iterator.hpp>
  26. #include <boost/move/detail/iterator_to_raw_pointer.hpp>
  27. #include <boost/container/detail/math_functions.hpp>
  28. #include <boost/container/detail/mpl.hpp>
  29. #include <boost/move/detail/to_raw_pointer.hpp>
  30. #include <boost/container/detail/type_traits.hpp>
  31. // intrusive
  32. #include <boost/intrusive/pointer_traits.hpp>
  33. #include <boost/intrusive/set.hpp>
  34. #include <boost/intrusive/list.hpp>
  35. #include <boost/intrusive/slist.hpp>
  36. // other
  37. #include <boost/assert.hpp>
  38. #include <boost/core/no_exceptions_support.hpp>
  39. #include <cstddef>
  40. namespace boost {
  41. namespace container {
  42. namespace adaptive_pool_flag {
  43. static const unsigned int none = 0u;
  44. static const unsigned int align_only = 1u << 0u;
  45. static const unsigned int size_ordered = 1u << 1u;
  46. static const unsigned int address_ordered = 1u << 2u;
  47. } //namespace adaptive_pool_flag{
  48. namespace dtl {
  49. template<class size_type>
  50. struct hdr_offset_holder_t
  51. {
  52. hdr_offset_holder_t(size_type offset = 0)
  53. : hdr_offset(offset)
  54. {}
  55. size_type hdr_offset;
  56. };
  57. template<class SizeType, unsigned int Flags>
  58. struct less_func;
  59. template<class SizeType>
  60. struct less_func<SizeType, adaptive_pool_flag::none>
  61. {
  62. static bool less(SizeType, SizeType, const void *, const void *)
  63. { return true; }
  64. };
  65. template<class SizeType>
  66. struct less_func<SizeType, adaptive_pool_flag::size_ordered>
  67. {
  68. static bool less(SizeType ls, SizeType rs, const void *, const void *)
  69. { return ls < rs; }
  70. };
  71. template<class SizeType>
  72. struct less_func<SizeType, adaptive_pool_flag::address_ordered>
  73. {
  74. static bool less(SizeType, SizeType, const void *la, const void *ra)
  75. { return la < ra; }
  76. };
  77. template<class SizeType>
  78. struct less_func<SizeType, adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered>
  79. {
  80. static bool less(SizeType ls, SizeType rs, const void *la, const void *ra)
  81. { return (ls < rs) || ((ls == rs) && (la < ra)); }
  82. };
  83. template<class VoidPointer, class SizeType, unsigned OrderFlags>
  84. struct block_container_traits
  85. {
  86. typedef typename bi::make_set_base_hook
  87. < bi::void_pointer<VoidPointer>
  88. , bi::optimize_size<true>
  89. , bi::link_mode<bi::normal_link> >::type hook_t;
  90. template<class T>
  91. struct container
  92. {
  93. typedef typename bi::make_multiset
  94. <T, bi::base_hook<hook_t>, bi::size_type<SizeType> >::type type;
  95. };
  96. template<class Container>
  97. static void reinsert_was_used(Container &container, typename Container::reference v, bool)
  98. {
  99. typedef typename Container::const_iterator const_block_iterator;
  100. typedef typename Container::iterator block_iterator;
  101. typedef typename Container::value_compare value_compare;
  102. const block_iterator this_block(Container::s_iterator_to(v));
  103. const const_block_iterator cendit(container.cend());
  104. block_iterator next_block(this_block);
  105. if(++next_block != cendit && value_compare()(*next_block, v)){
  106. const_block_iterator next2_block(next_block);
  107. //Test if v should be swapped with next (optimization)
  108. if(++next2_block == cendit || !value_compare()(*next2_block, v)){
  109. v.swap_nodes(*next_block);
  110. BOOST_ASSERT(++next_block == this_block);
  111. }
  112. else{
  113. container.erase(this_block);
  114. container.insert(v);
  115. }
  116. }
  117. }
  118. template<class Container>
  119. static void insert_was_empty(Container &container, typename Container::value_type &v, bool)
  120. {
  121. container.insert(v);
  122. }
  123. template<class Container>
  124. static void erase_first(Container &container)
  125. {
  126. container.erase(container.cbegin());
  127. }
  128. template<class Container>
  129. static void erase_last(Container &container)
  130. {
  131. container.erase(--container.cend());
  132. }
  133. };
  134. template<class VoidPointer, class SizeType>
  135. struct block_container_traits<VoidPointer, SizeType, 0u>
  136. {
  137. typedef typename bi::make_list_base_hook
  138. < bi::void_pointer<VoidPointer>
  139. , bi::link_mode<bi::normal_link> >::type hook_t;
  140. template<class T>
  141. struct container
  142. {
  143. typedef typename bi::make_list
  144. <T, bi::base_hook<hook_t>, bi::size_type<SizeType>, bi::constant_time_size<false> >::type type;
  145. };
  146. template<class Container>
  147. static void reinsert_was_used(Container &container, typename Container::value_type &v, bool is_full)
  148. {
  149. if(is_full){
  150. container.erase(Container::s_iterator_to(v));
  151. container.push_back(v);
  152. }
  153. }
  154. template<class Container>
  155. static void insert_was_empty(Container &container, typename Container::value_type &v, bool is_full)
  156. {
  157. if(is_full){
  158. container.push_back(v);
  159. }
  160. else{
  161. container.push_front(v);
  162. }
  163. }
  164. template<class Container>
  165. static void erase_first(Container &container)
  166. {
  167. container.pop_front();
  168. }
  169. template<class Container>
  170. static void erase_last(Container &container)
  171. {
  172. container.pop_back();
  173. }
  174. };
  175. /////////////////////////////
  176. //
  177. // adaptive_pool_types
  178. //
  179. /////////////////////////////
  180. template<class MultiallocationChain, class VoidPointer, class SizeType, unsigned int Flags>
  181. struct adaptive_pool_types
  182. {
  183. typedef VoidPointer void_pointer;
  184. static const unsigned ordered = (Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered));
  185. typedef block_container_traits<VoidPointer, SizeType, ordered> block_container_traits_t;
  186. typedef typename block_container_traits_t::hook_t hook_t;
  187. typedef hdr_offset_holder_t<SizeType> hdr_offset_holder;
  188. static const unsigned int order_flags = Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered);
  189. typedef MultiallocationChain free_nodes_t;
  190. struct block_info_t
  191. : public hdr_offset_holder,
  192. public hook_t
  193. {
  194. //An intrusive list of free node from this block
  195. free_nodes_t free_nodes;
  196. friend bool operator <(const block_info_t &l, const block_info_t &r)
  197. {
  198. return less_func<SizeType, order_flags>::
  199. less(l.free_nodes.size(), r.free_nodes.size(), &l , &r);
  200. }
  201. friend bool operator ==(const block_info_t &l, const block_info_t &r)
  202. { return &l == &r; }
  203. };
  204. typedef typename block_container_traits_t:: template container<block_info_t>::type block_container_t;
  205. };
  206. /////////////////////////////////////////////
  207. //
  208. // candidate_power_of_2_ct
  209. //
  210. /////////////////////////////////////////////
  211. template< std::size_t alignment
  212. , std::size_t real_node_size
  213. , std::size_t payload_per_allocation
  214. , std::size_t min_elements_per_block
  215. , std::size_t hdr_size
  216. , std::size_t hdr_offset_size
  217. , std::size_t overhead_percent>
  218. struct candidate_power_of_2_ct_helper
  219. {
  220. static const std::size_t hdr_subblock_elements_alone = (alignment - hdr_size - payload_per_allocation)/real_node_size;
  221. static const std::size_t hdr_subblock_elements_first = (alignment - hdr_size - payload_per_allocation)/real_node_size;
  222. static const std::size_t elements_per_b_subblock_mid = (alignment - hdr_offset_size)/real_node_size;
  223. static const std::size_t elements_per_b_subblock_end = (alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
  224. static const std::size_t num_b_subblock =
  225. hdr_subblock_elements_alone >= min_elements_per_block
  226. ? 0
  227. : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
  228. ? 1
  229. : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
  230. )
  231. ;
  232. static const std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
  233. static const std::size_t total_nodes = (num_b_subblock == 0)
  234. ? hdr_subblock_elements_alone
  235. : ( (num_b_subblock == 1)
  236. ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
  237. : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
  238. )
  239. ;
  240. static const std::size_t total_data = total_nodes*real_node_size;
  241. static const std::size_t total_size = alignment*(num_b_subblock+1);
  242. static const bool overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
  243. };
  244. template< std::size_t initial_alignment
  245. , std::size_t real_node_size
  246. , std::size_t payload_per_allocation
  247. , std::size_t min_elements_per_block
  248. , std::size_t hdr_size
  249. , std::size_t hdr_offset_size
  250. , std::size_t overhead_percent
  251. , bool Loop = true>
  252. struct candidate_power_of_2_ct
  253. {
  254. typedef candidate_power_of_2_ct_helper
  255. < initial_alignment
  256. , real_node_size
  257. , payload_per_allocation
  258. , min_elements_per_block
  259. , hdr_size
  260. , hdr_offset_size
  261. , overhead_percent> helper_t;
  262. static const std::size_t candidate_power_of_2 = initial_alignment << std::size_t(!helper_t::overhead_satisfied);
  263. typedef typename candidate_power_of_2_ct
  264. < candidate_power_of_2
  265. , real_node_size
  266. , payload_per_allocation
  267. , min_elements_per_block
  268. , hdr_size
  269. , hdr_offset_size
  270. , overhead_percent
  271. , !helper_t::overhead_satisfied
  272. >::type type;
  273. static const std::size_t alignment = type::alignment;
  274. static const std::size_t num_subblocks = type::num_subblocks;
  275. static const std::size_t real_num_node = type::real_num_node;
  276. };
  277. template< std::size_t initial_alignment
  278. , std::size_t real_node_size
  279. , std::size_t payload_per_allocation
  280. , std::size_t min_elements_per_block
  281. , std::size_t hdr_size
  282. , std::size_t hdr_offset_size
  283. , std::size_t overhead_percent
  284. >
  285. struct candidate_power_of_2_ct
  286. < initial_alignment
  287. , real_node_size
  288. , payload_per_allocation
  289. , min_elements_per_block
  290. , hdr_size
  291. , hdr_offset_size
  292. , overhead_percent
  293. , false>
  294. {
  295. typedef candidate_power_of_2_ct
  296. < initial_alignment
  297. , real_node_size
  298. , payload_per_allocation
  299. , min_elements_per_block
  300. , hdr_size
  301. , hdr_offset_size
  302. , overhead_percent
  303. , false> type;
  304. typedef candidate_power_of_2_ct_helper
  305. < initial_alignment
  306. , real_node_size
  307. , payload_per_allocation
  308. , min_elements_per_block
  309. , hdr_size
  310. , hdr_offset_size
  311. , overhead_percent> helper_t;
  312. static const std::size_t alignment = initial_alignment;
  313. static const std::size_t num_subblocks = helper_t::num_b_subblock+1;
  314. static const std::size_t real_num_node = helper_t::total_nodes;
  315. };
  316. /////////////////////////////////////////////
  317. //
  318. // candidate_power_of_2_rt
  319. //
  320. /////////////////////////////////////////////
  321. inline void candidate_power_of_2_rt ( std::size_t initial_alignment
  322. , std::size_t real_node_size
  323. , std::size_t payload_per_allocation
  324. , std::size_t min_elements_per_block
  325. , std::size_t hdr_size
  326. , std::size_t hdr_offset_size
  327. , std::size_t overhead_percent
  328. , std::size_t &alignment
  329. , std::size_t &num_subblocks
  330. , std::size_t &real_num_node)
  331. {
  332. bool overhead_satisfied = false;
  333. std::size_t num_b_subblock = 0;
  334. std::size_t total_nodes = 0;
  335. while(!overhead_satisfied)
  336. {
  337. std::size_t hdr_subblock_elements_alone = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
  338. std::size_t hdr_subblock_elements_first = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
  339. std::size_t elements_per_b_subblock_mid = (initial_alignment - hdr_offset_size)/real_node_size;
  340. std::size_t elements_per_b_subblock_end = (initial_alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
  341. num_b_subblock =
  342. hdr_subblock_elements_alone >= min_elements_per_block
  343. ? 0
  344. : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
  345. ? 1
  346. : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
  347. )
  348. ;
  349. std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
  350. total_nodes = (num_b_subblock == 0)
  351. ? hdr_subblock_elements_alone
  352. : ( (num_b_subblock == 1)
  353. ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
  354. : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
  355. )
  356. ;
  357. std::size_t total_data = total_nodes*real_node_size;
  358. std::size_t total_size = initial_alignment*(num_b_subblock+1);
  359. overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
  360. initial_alignment = initial_alignment << std::size_t(!overhead_satisfied);
  361. }
  362. alignment = initial_alignment;
  363. num_subblocks = num_b_subblock+1;
  364. real_num_node = total_nodes;
  365. }
  366. /////////////////////////////////////////////
  367. //
  368. // private_adaptive_node_pool_impl_common
  369. //
  370. /////////////////////////////////////////////
  371. template< class SegmentManagerBase, unsigned int Flags>
  372. class private_adaptive_node_pool_impl_common
  373. {
  374. public:
  375. //!Segment manager typedef
  376. typedef SegmentManagerBase segment_manager_base_type;
  377. typedef typename SegmentManagerBase::multiallocation_chain multiallocation_chain;
  378. typedef typename SegmentManagerBase::size_type size_type;
  379. //Flags
  380. //align_only
  381. static const bool AlignOnly = (Flags & adaptive_pool_flag::align_only) != 0;
  382. typedef bool_<AlignOnly> IsAlignOnly;
  383. typedef true_ AlignOnlyTrue;
  384. typedef false_ AlignOnlyFalse;
  385. typedef typename SegmentManagerBase::void_pointer void_pointer;
  386. static const typename SegmentManagerBase::
  387. size_type PayloadPerAllocation = SegmentManagerBase::PayloadPerAllocation;
  388. typedef typename boost::intrusive::pointer_traits
  389. <void_pointer>::template rebind_pointer<segment_manager_base_type>::type segment_mngr_base_ptr_t;
  390. protected:
  391. typedef adaptive_pool_types
  392. <multiallocation_chain, void_pointer, size_type, Flags> adaptive_pool_types_t;
  393. typedef typename adaptive_pool_types_t::free_nodes_t free_nodes_t;
  394. typedef typename adaptive_pool_types_t::block_info_t block_info_t;
  395. typedef typename adaptive_pool_types_t::block_container_t block_container_t;
  396. typedef typename adaptive_pool_types_t::block_container_traits_t block_container_traits_t;
  397. typedef typename block_container_t::iterator block_iterator;
  398. typedef typename block_container_t::const_iterator const_block_iterator;
  399. typedef typename adaptive_pool_types_t::hdr_offset_holder hdr_offset_holder;
  400. typedef private_adaptive_node_pool_impl_common this_type;
  401. static const size_type MaxAlign = alignment_of<void_pointer>::value;
  402. static const size_type HdrSize = ((sizeof(block_info_t)-1)/MaxAlign+1)*MaxAlign;
  403. static const size_type HdrOffsetSize = ((sizeof(hdr_offset_holder)-1)/MaxAlign+1)*MaxAlign;
  404. segment_mngr_base_ptr_t mp_segment_mngr_base; //Segment manager
  405. block_container_t m_block_container; //Intrusive block list
  406. size_type m_totally_free_blocks; //Free blocks
  407. class block_destroyer;
  408. friend class block_destroyer;
  409. class block_destroyer
  410. {
  411. public:
  412. block_destroyer(const this_type *impl, multiallocation_chain &chain, const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
  413. : mp_impl(impl), m_chain(chain), m_num_subblocks(num_subblocks), m_real_block_alignment(real_block_alignment), m_real_num_node(real_num_node)
  414. {}
  415. void operator()(typename block_container_t::pointer to_deallocate)
  416. { return this->do_destroy(to_deallocate, IsAlignOnly()); }
  417. private:
  418. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyTrue)
  419. {
  420. BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
  421. m_chain.push_back(to_deallocate);
  422. }
  423. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyFalse)
  424. {
  425. BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
  426. BOOST_ASSERT(0 == to_deallocate->hdr_offset);
  427. hdr_offset_holder *hdr_off_holder =
  428. mp_impl->priv_first_subblock_from_block(boost::movelib::to_raw_pointer(to_deallocate), m_num_subblocks, m_real_block_alignment);
  429. m_chain.push_back(hdr_off_holder);
  430. }
  431. const this_type *mp_impl;
  432. multiallocation_chain &m_chain;
  433. const size_type m_num_subblocks;
  434. const size_type m_real_block_alignment;
  435. const size_type m_real_num_node;
  436. };
  437. //This macro will activate invariant checking. Slow, but helpful for debugging the code.
  438. //#define BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  439. void priv_invariants(const size_type real_num_node, const size_type num_subblocks, const size_type real_block_alignment) const
  440. {
  441. (void)real_num_node; (void)num_subblocks; (void)real_block_alignment;
  442. #ifdef BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  443. //Check that the total totally free blocks are correct
  444. BOOST_ASSERT(m_block_container.size() >= m_totally_free_blocks);
  445. const const_block_iterator itend(m_block_container.cend());
  446. const const_block_iterator itbeg(m_block_container.cbegin());
  447. { //Try to do checks in a single iteration
  448. const_block_iterator it(itbeg);
  449. size_type total_free_nodes = 0;
  450. size_type total_free_blocks = 0u;
  451. for(; it != itend; ++it){
  452. if(it != itbeg){
  453. //Check order invariant
  454. const_block_iterator prev(it);
  455. --prev;
  456. BOOST_ASSERT(!(m_block_container.key_comp()(*it, *prev)));
  457. (void)prev; (void)it;
  458. }
  459. //free_nodes invariant
  460. const size_type free_nodes = it->free_nodes.size();
  461. BOOST_ASSERT(free_nodes <= real_num_node);
  462. BOOST_ASSERT(free_nodes != 0);
  463. //Acummulate total_free_nodes and total_free_blocks
  464. total_free_nodes += free_nodes;
  465. total_free_blocks += it->free_nodes.size() == real_num_node;
  466. if (!AlignOnly) {
  467. //Check that header offsets are correct
  468. hdr_offset_holder *hdr_off_holder = this->priv_first_subblock_from_block(const_cast<block_info_t *>(&*it), num_subblocks, real_block_alignment);
  469. for (size_type i = 0, max = num_subblocks; i < max; ++i) {
  470. const size_type offset = reinterpret_cast<char*>(const_cast<block_info_t *>(&*it)) - reinterpret_cast<char*>(hdr_off_holder);
  471. (void)offset;
  472. BOOST_ASSERT(hdr_off_holder->hdr_offset == offset);
  473. BOOST_ASSERT(0 == (reinterpret_cast<std::size_t>(hdr_off_holder) & (real_block_alignment - 1)));
  474. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  475. hdr_off_holder = reinterpret_cast<hdr_offset_holder *>(reinterpret_cast<char*>(hdr_off_holder) + real_block_alignment);
  476. }
  477. }
  478. }
  479. BOOST_ASSERT(total_free_blocks == m_totally_free_blocks);
  480. BOOST_ASSERT(total_free_nodes >= m_totally_free_blocks*real_num_node);
  481. }
  482. #endif
  483. }
  484. void priv_deallocate_free_blocks( const size_type max_free_blocks, const size_type real_num_node
  485. , const size_type num_subblocks, const size_type real_block_alignment)
  486. { //Trampoline function to ease inlining
  487. if(m_totally_free_blocks > max_free_blocks){
  488. this->priv_deallocate_free_blocks_impl(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  489. }
  490. }
  491. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment) const
  492. { return this->priv_first_subblock_from_block(block, num_subblocks, real_block_alignment, IsAlignOnly()); }
  493. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyFalse) const
  494. {
  495. hdr_offset_holder *const hdr_off_holder = reinterpret_cast<hdr_offset_holder*>
  496. (reinterpret_cast<char*>(block) - (num_subblocks-1)*real_block_alignment);
  497. BOOST_ASSERT(hdr_off_holder->hdr_offset == size_type(reinterpret_cast<char*>(block) - reinterpret_cast<char*>(hdr_off_holder)));
  498. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
  499. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  500. return hdr_off_holder;
  501. }
  502. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyTrue) const
  503. {
  504. (void)num_subblocks; (void)real_block_alignment;
  505. return reinterpret_cast<hdr_offset_holder*>(block);
  506. }
  507. void priv_deallocate_free_blocks_impl( const size_type max_free_blocks, const size_type real_num_node
  508. , const size_type num_subblocks, const size_type real_block_alignment)
  509. {
  510. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  511. //Now check if we've reached the free nodes limit
  512. //and check if we have free blocks. If so, deallocate as much
  513. //as we can to stay below the limit
  514. multiallocation_chain chain;
  515. {
  516. if(Flags & adaptive_pool_flag::size_ordered){
  517. const_block_iterator it = m_block_container.cend();
  518. --it;
  519. size_type totally_free_blocks = m_totally_free_blocks;
  520. for( ; totally_free_blocks > max_free_blocks; --totally_free_blocks){
  521. BOOST_ASSERT(it->free_nodes.size() == real_num_node);
  522. void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
  523. --it;
  524. block_container_traits_t::erase_last(m_block_container);
  525. chain.push_front(void_pointer(addr));
  526. }
  527. }
  528. else{
  529. const_block_iterator it = m_block_container.cend();
  530. size_type totally_free_blocks = m_totally_free_blocks;
  531. while(totally_free_blocks > max_free_blocks){
  532. --it;
  533. if(it->free_nodes.size() == real_num_node){
  534. void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
  535. it = m_block_container.erase(it);
  536. chain.push_front(void_pointer(addr));
  537. --totally_free_blocks;
  538. }
  539. }
  540. }
  541. BOOST_ASSERT((m_totally_free_blocks - max_free_blocks) == chain.size());
  542. m_totally_free_blocks = max_free_blocks;
  543. }
  544. this->mp_segment_mngr_base->deallocate_many(chain);
  545. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  546. }
  547. void priv_fill_chain_remaining_to_block
  548. ( multiallocation_chain &chain, size_type target_elem_in_chain, block_info_t &c_info
  549. , char *mem_address, size_type max_node_in_mem
  550. , const size_type real_node_size)
  551. {
  552. BOOST_ASSERT(chain.size() <= target_elem_in_chain);
  553. //First add all possible nodes to the chain
  554. const size_type left = target_elem_in_chain - chain.size();
  555. const size_type add_to_chain = (max_node_in_mem < left) ? max_node_in_mem : left;
  556. char *free_mem_address = static_cast<char *>(boost::movelib::to_raw_pointer
  557. (chain.incorporate_after(chain.last(), void_pointer(mem_address), real_node_size, add_to_chain)));
  558. //Now store remaining nodes in the free list
  559. if(const size_type free = max_node_in_mem - add_to_chain){
  560. free_nodes_t & free_nodes = c_info.free_nodes;
  561. free_nodes.incorporate_after(free_nodes.last(), void_pointer(free_mem_address), real_node_size, free);
  562. }
  563. }
  564. //!Allocates a several blocks of nodes. Can throw
  565. void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
  566. , const size_type max_free_blocks
  567. , const size_type real_block_alignment, const size_type real_node_size
  568. , const size_type real_num_node, const size_type num_subblocks
  569. , AlignOnlyTrue)
  570. {
  571. (void)num_subblocks;
  572. BOOST_ASSERT(m_block_container.empty());
  573. BOOST_ASSERT(min_elements > 0);
  574. const size_type n = (min_elements - 1)/real_num_node + 1;
  575. const size_type real_block_size = real_block_alignment - PayloadPerAllocation;
  576. const size_type target_elem_in_chain = chain.size() + min_elements;
  577. for(size_type i = 0; i != n; ++i){
  578. //We allocate a new NodeBlock and put it the last
  579. //element of the tree
  580. char *mem_address = static_cast<char*>
  581. (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
  582. if(!mem_address){
  583. //In case of error, free memory deallocating all nodes (the new ones allocated
  584. //in this function plus previously stored nodes in chain).
  585. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  586. throw_bad_alloc();
  587. }
  588. block_info_t &c_info = *new(mem_address)block_info_t();
  589. mem_address += HdrSize;
  590. this->priv_fill_chain_remaining_to_block(chain, target_elem_in_chain, c_info, mem_address, real_num_node, real_node_size);
  591. const size_type free_nodes = c_info.free_nodes.size();
  592. if(free_nodes){
  593. const bool is_full = free_nodes == real_num_node;
  594. BOOST_ASSERT(free_nodes < real_num_node);
  595. m_totally_free_blocks += static_cast<size_type>(is_full);
  596. block_container_traits_t::insert_was_empty(m_block_container, c_info, is_full);
  597. }
  598. }
  599. }
  600. void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
  601. , const size_type max_free_blocks
  602. , const size_type real_block_alignment, const size_type real_node_size
  603. , const size_type real_num_node, const size_type num_subblocks
  604. , AlignOnlyFalse)
  605. {
  606. BOOST_ASSERT(m_block_container.empty());
  607. BOOST_ASSERT(min_elements > 0);
  608. const size_type n = (min_elements - 1)/real_num_node + 1;
  609. const size_type real_block_size = real_block_alignment*num_subblocks - PayloadPerAllocation;
  610. const size_type elements_per_subblock_mid = (real_block_alignment - HdrOffsetSize)/real_node_size;
  611. const size_type elements_per_subblock_end = (real_block_alignment - HdrOffsetSize - PayloadPerAllocation) / real_node_size;
  612. const size_type hdr_subblock_elements = (real_block_alignment - HdrSize - PayloadPerAllocation)/real_node_size;
  613. const size_type target_elem_in_chain = chain.size() + min_elements;
  614. for(size_type i = 0; i != n; ++i){
  615. //We allocate a new NodeBlock and put it the last
  616. //element of the tree
  617. char *mem_address = static_cast<char*>
  618. (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
  619. if(!mem_address){
  620. //In case of error, free memory deallocating all nodes (the new ones allocated
  621. //in this function plus previously stored nodes in chain).
  622. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  623. throw_bad_alloc();
  624. }
  625. //First initialize header information on the last subblock
  626. char *hdr_addr = mem_address + real_block_alignment*(num_subblocks-1);
  627. block_info_t &c_info = *new(hdr_addr)block_info_t();
  628. //Some structural checks
  629. BOOST_ASSERT(static_cast<void*>(&static_cast<hdr_offset_holder&>(c_info).hdr_offset) ==
  630. static_cast<void*>(&c_info)); (void)c_info;
  631. for( size_type subblock = 0, maxsubblock = num_subblocks - 1
  632. ; subblock < maxsubblock
  633. ; ++subblock, mem_address += real_block_alignment){
  634. //Initialize header offset mark
  635. new(mem_address) hdr_offset_holder(size_type(hdr_addr - mem_address));
  636. const size_type elements_per_subblock = (subblock != (maxsubblock - 1)) ? elements_per_subblock_mid : elements_per_subblock_end;
  637. this->priv_fill_chain_remaining_to_block
  638. (chain, target_elem_in_chain, c_info, mem_address + HdrOffsetSize, elements_per_subblock, real_node_size);
  639. }
  640. this->priv_fill_chain_remaining_to_block
  641. (chain, target_elem_in_chain, c_info, hdr_addr + HdrSize, hdr_subblock_elements, real_node_size);
  642. m_totally_free_blocks += static_cast<size_type>(c_info.free_nodes.size() == real_num_node);
  643. if (c_info.free_nodes.size())
  644. m_block_container.push_front(c_info);
  645. }
  646. }
  647. //!Allocates array of count elements. Can throw
  648. void *priv_allocate_node( const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
  649. , const size_type real_num_node, const size_type num_subblocks)
  650. {
  651. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  652. //If there are no free nodes we allocate a new block
  653. if(!m_block_container.empty()){
  654. //We take the first free node the multiset can't be empty
  655. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  656. BOOST_ASSERT(!free_nodes.empty());
  657. const size_type free_nodes_count = free_nodes.size();
  658. void *first_node = boost::movelib::to_raw_pointer(free_nodes.pop_front());
  659. if(free_nodes.empty()){
  660. block_container_traits_t::erase_first(m_block_container);
  661. }
  662. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count == real_num_node);
  663. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  664. return first_node;
  665. }
  666. else{
  667. multiallocation_chain chain;
  668. this->priv_append_from_new_blocks
  669. (1, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
  670. void *node = boost::movelib::to_raw_pointer(chain.pop_front());
  671. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  672. return node;
  673. }
  674. }
  675. void priv_allocate_nodes( const size_type n, multiallocation_chain &chain
  676. , const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
  677. , const size_type real_num_node, const size_type num_subblocks)
  678. {
  679. size_type i = 0;
  680. BOOST_TRY{
  681. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  682. while(i != n){
  683. //If there are no free nodes we allocate all needed blocks
  684. if (m_block_container.empty()){
  685. this->priv_append_from_new_blocks
  686. (n - i, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
  687. BOOST_ASSERT(m_block_container.empty() || (++m_block_container.cbegin() == m_block_container.cend()));
  688. BOOST_ASSERT(chain.size() == n);
  689. break;
  690. }
  691. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  692. const size_type free_nodes_count_before = free_nodes.size();
  693. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count_before == real_num_node);
  694. const size_type num_left = n-i;
  695. const size_type num_elems = (num_left < free_nodes_count_before) ? num_left : free_nodes_count_before;
  696. typedef typename free_nodes_t::iterator free_nodes_iterator;
  697. if(num_left < free_nodes_count_before){
  698. const free_nodes_iterator it_bbeg(free_nodes.before_begin());
  699. free_nodes_iterator it_bend(it_bbeg);
  700. for(size_type j = 0; j != num_elems; ++j){
  701. ++it_bend;
  702. }
  703. free_nodes_iterator it_end = it_bend; ++it_end;
  704. free_nodes_iterator it_beg = it_bbeg; ++it_beg;
  705. free_nodes.erase_after(it_bbeg, it_end, num_elems);
  706. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  707. //chain.splice_after(chain.last(), free_nodes, it_bbeg, it_bend, num_elems);
  708. BOOST_ASSERT(!free_nodes.empty());
  709. }
  710. else{
  711. const free_nodes_iterator it_beg(free_nodes.begin()), it_bend(free_nodes.last());
  712. free_nodes.clear();
  713. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  714. block_container_traits_t::erase_first(m_block_container);
  715. }
  716. i += num_elems;
  717. }
  718. }
  719. BOOST_CATCH(...){
  720. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  721. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  722. BOOST_RETHROW
  723. }
  724. BOOST_CATCH_END
  725. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  726. }
  727. //!Deallocates an array pointed by ptr. Never throws
  728. void priv_deallocate_node( void *pElem
  729. , const size_type max_free_blocks, const size_type real_num_node
  730. , const size_type num_subblocks, const size_type real_block_alignment)
  731. {
  732. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  733. block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
  734. const size_type prev_free_nodes = block_info.free_nodes.size();
  735. BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
  736. //We put the node at the beginning of the free node list
  737. block_info.free_nodes.push_back(void_pointer(pElem));
  738. //The loop reinserts all blocks except the last one
  739. this->priv_reinsert_block(block_info, prev_free_nodes == 0, real_num_node);
  740. this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  741. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  742. }
  743. void priv_deallocate_nodes( multiallocation_chain &nodes
  744. , const size_type max_free_blocks, const size_type real_num_node
  745. , const size_type num_subblocks, const size_type real_block_alignment)
  746. {
  747. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  748. //To take advantage of node locality, wait until two
  749. //nodes belong to different blocks. Only then reinsert
  750. //the block of the first node in the block tree.
  751. //Cache of the previous block
  752. block_info_t *prev_block_info = 0;
  753. //If block was empty before this call, it's not already
  754. //inserted in the block tree.
  755. bool prev_block_was_empty = false;
  756. typedef typename free_nodes_t::iterator free_nodes_iterator;
  757. {
  758. const free_nodes_iterator itbb(nodes.before_begin()), ite(nodes.end());
  759. free_nodes_iterator itf(nodes.begin()), itbf(itbb);
  760. size_type splice_node_count = size_type(-1);
  761. while(itf != ite){
  762. void *pElem = boost::movelib::to_raw_pointer(boost::movelib::iterator_to_raw_pointer(itf));
  763. block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
  764. BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
  765. ++splice_node_count;
  766. //If block change is detected calculate the cached block position in the tree
  767. if(&block_info != prev_block_info){
  768. if(prev_block_info){ //Make sure we skip the initial "dummy" cache
  769. free_nodes_iterator it(itbb); ++it;
  770. nodes.erase_after(itbb, itf, splice_node_count);
  771. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*it, &*itbf, splice_node_count);
  772. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
  773. splice_node_count = 0;
  774. }
  775. //Update cache with new data
  776. prev_block_was_empty = block_info.free_nodes.empty();
  777. prev_block_info = &block_info;
  778. }
  779. itbf = itf;
  780. ++itf;
  781. }
  782. }
  783. if(prev_block_info){
  784. //The loop reinserts all blocks except the last one
  785. const free_nodes_iterator itfirst(nodes.begin()), itlast(nodes.last());
  786. const size_type splice_node_count = nodes.size();
  787. nodes.clear();
  788. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*itfirst, &*itlast, splice_node_count);
  789. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
  790. this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  791. }
  792. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  793. }
  794. void priv_reinsert_block(block_info_t &prev_block_info, const bool prev_block_was_empty, const size_type real_num_node)
  795. {
  796. //Cache the free nodes from the block
  797. const size_type this_block_free_nodes = prev_block_info.free_nodes.size();
  798. const bool is_full = this_block_free_nodes == real_num_node;
  799. //Update free block count
  800. m_totally_free_blocks += static_cast<size_type>(is_full);
  801. if(prev_block_was_empty){
  802. block_container_traits_t::insert_was_empty(m_block_container, prev_block_info, is_full);
  803. }
  804. else{
  805. block_container_traits_t::reinsert_was_used(m_block_container, prev_block_info, is_full);
  806. }
  807. }
  808. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyFalse) const
  809. {
  810. hdr_offset_holder *hdr_off_holder =
  811. reinterpret_cast<hdr_offset_holder*>((std::size_t)node & size_type(~(real_block_alignment - 1)));
  812. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
  813. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  814. block_info_t *block = reinterpret_cast<block_info_t *>
  815. (reinterpret_cast<char*>(hdr_off_holder) + hdr_off_holder->hdr_offset);
  816. BOOST_ASSERT(block->hdr_offset == 0);
  817. return block;
  818. }
  819. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyTrue) const
  820. {
  821. return (block_info_t *)((std::size_t)node & std::size_t(~(real_block_alignment - 1)));
  822. }
  823. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment) const
  824. { return this->priv_block_from_node(node, real_block_alignment, IsAlignOnly()); }
  825. //!Deallocates all used memory. Never throws
  826. void priv_clear(const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
  827. {
  828. #ifndef NDEBUG
  829. block_iterator it = m_block_container.begin();
  830. block_iterator itend = m_block_container.end();
  831. size_type n_free_nodes = 0;
  832. for(; it != itend; ++it){
  833. //Check for memory leak
  834. BOOST_ASSERT(it->free_nodes.size() == real_num_node);
  835. ++n_free_nodes;
  836. }
  837. BOOST_ASSERT(n_free_nodes == m_totally_free_blocks);
  838. #endif
  839. //Check for memory leaks
  840. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  841. multiallocation_chain chain;
  842. m_block_container.clear_and_dispose(block_destroyer(this, chain, num_subblocks, real_block_alignment, real_num_node));
  843. this->mp_segment_mngr_base->deallocate_many(chain);
  844. m_totally_free_blocks = 0;
  845. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  846. }
  847. public:
  848. private_adaptive_node_pool_impl_common(segment_manager_base_type *segment_mngr_base)
  849. //General purpose allocator
  850. : mp_segment_mngr_base(segment_mngr_base)
  851. , m_block_container()
  852. , m_totally_free_blocks(0)
  853. {}
  854. size_type num_free_nodes()
  855. {
  856. typedef typename block_container_t::const_iterator citerator;
  857. size_type count = 0;
  858. citerator it (m_block_container.begin()), itend(m_block_container.end());
  859. for(; it != itend; ++it){
  860. count += it->free_nodes.size();
  861. }
  862. return count;
  863. }
  864. void swap(private_adaptive_node_pool_impl_common &other)
  865. {
  866. std::swap(mp_segment_mngr_base, other.mp_segment_mngr_base);
  867. std::swap(m_totally_free_blocks, other.m_totally_free_blocks);
  868. m_block_container.swap(other.m_block_container);
  869. }
  870. //!Returns the segment manager. Never throws
  871. segment_manager_base_type* get_segment_manager_base()const
  872. { return boost::movelib::to_raw_pointer(mp_segment_mngr_base); }
  873. };
  874. template< class SizeType
  875. , std::size_t HdrSize
  876. , std::size_t PayloadPerAllocation
  877. , std::size_t RealNodeSize
  878. , std::size_t NodesPerBlock
  879. , std::size_t HdrOffsetSize
  880. , std::size_t OverheadPercent
  881. , bool AlignOnly>
  882. struct calculate_alignment_ct
  883. {
  884. static const std::size_t alignment = upper_power_of_2_ct<SizeType, HdrSize + RealNodeSize*NodesPerBlock>::value;
  885. static const std::size_t num_subblocks = 0;
  886. static const std::size_t real_num_node = (alignment - PayloadPerAllocation - HdrSize)/RealNodeSize;
  887. };
  888. template< class SizeType
  889. , std::size_t HdrSize
  890. , std::size_t PayloadPerAllocation
  891. , std::size_t RealNodeSize
  892. , std::size_t NodesPerBlock
  893. , std::size_t HdrOffsetSize
  894. , std::size_t OverheadPercent>
  895. struct calculate_alignment_ct
  896. < SizeType
  897. , HdrSize
  898. , PayloadPerAllocation
  899. , RealNodeSize
  900. , NodesPerBlock
  901. , HdrOffsetSize
  902. , OverheadPercent
  903. , false>
  904. {
  905. typedef typename candidate_power_of_2_ct
  906. < upper_power_of_2_ct<SizeType, HdrSize + PayloadPerAllocation + RealNodeSize>::value
  907. , RealNodeSize
  908. , PayloadPerAllocation
  909. , NodesPerBlock
  910. , HdrSize
  911. , HdrOffsetSize
  912. , OverheadPercent
  913. >::type type;
  914. static const std::size_t alignment = type::alignment;
  915. static const std::size_t num_subblocks = type::num_subblocks;
  916. static const std::size_t real_num_node = type::real_num_node;
  917. };
  918. /////////////////////////////////////////////
  919. //
  920. // private_adaptive_node_pool_impl_ct
  921. //
  922. /////////////////////////////////////////////
  923. template< class SegmentManagerBase
  924. , std::size_t MaxFreeBlocks
  925. , std::size_t NodeSize
  926. , std::size_t NodesPerBlock
  927. , std::size_t OverheadPercent
  928. , unsigned int Flags>
  929. class private_adaptive_node_pool_impl_ct
  930. : public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
  931. {
  932. typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> base_t;
  933. //Non-copyable
  934. private_adaptive_node_pool_impl_ct();
  935. private_adaptive_node_pool_impl_ct(const private_adaptive_node_pool_impl_ct &);
  936. private_adaptive_node_pool_impl_ct &operator=(const private_adaptive_node_pool_impl_ct &);
  937. public:
  938. typedef typename base_t::void_pointer void_pointer;
  939. typedef typename base_t::size_type size_type;
  940. typedef typename base_t::multiallocation_chain multiallocation_chain;
  941. typedef typename base_t::segment_manager_base_type segment_manager_base_type;
  942. static const typename base_t::size_type PayloadPerAllocation = base_t::PayloadPerAllocation;
  943. //align_only
  944. static const bool AlignOnly = base_t::AlignOnly;
  945. private:
  946. static const size_type MaxAlign = base_t::MaxAlign;
  947. static const size_type HdrSize = base_t::HdrSize;
  948. static const size_type HdrOffsetSize = base_t::HdrOffsetSize;
  949. static const size_type RealNodeSize = lcm_ct<NodeSize, alignment_of<void_pointer>::value>::value;
  950. typedef calculate_alignment_ct
  951. < size_type, HdrSize, PayloadPerAllocation
  952. , RealNodeSize, NodesPerBlock, HdrOffsetSize, OverheadPercent, AlignOnly> data_t;
  953. //Round the size to a power of two value.
  954. //This is the total memory size (including payload) that we want to
  955. //allocate from the general-purpose allocator
  956. static const size_type NumSubBlocks = data_t::num_subblocks;
  957. static const size_type RealNumNode = data_t::real_num_node;
  958. static const size_type RealBlockAlignment = data_t::alignment;
  959. public:
  960. //!Constructor from a segment manager. Never throws
  961. private_adaptive_node_pool_impl_ct(typename base_t::segment_manager_base_type *segment_mngr_base)
  962. //General purpose allocator
  963. : base_t(segment_mngr_base)
  964. {}
  965. //!Destructor. Deallocates all allocated blocks. Never throws
  966. ~private_adaptive_node_pool_impl_ct()
  967. { this->priv_clear(NumSubBlocks, data_t::alignment, RealNumNode); }
  968. size_type get_real_num_node() const
  969. { return RealNumNode; }
  970. //!Allocates array of count elements. Can throw
  971. void *allocate_node()
  972. {
  973. return this->priv_allocate_node
  974. (MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
  975. }
  976. //!Allocates n nodes.
  977. //!Can throw
  978. void allocate_nodes(const size_type n, multiallocation_chain &chain)
  979. {
  980. this->priv_allocate_nodes
  981. (n, chain, MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
  982. }
  983. //!Deallocates an array pointed by ptr. Never throws
  984. void deallocate_node(void *pElem)
  985. {
  986. this->priv_deallocate_node(pElem, MaxFreeBlocks, RealNumNode, NumSubBlocks, RealBlockAlignment);
  987. }
  988. //!Deallocates a linked list of nodes. Never throws
  989. void deallocate_nodes(multiallocation_chain &nodes)
  990. {
  991. this->priv_deallocate_nodes(nodes, MaxFreeBlocks, RealNumNode, NumSubBlocks, data_t::alignment);
  992. }
  993. void deallocate_free_blocks()
  994. { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
  995. //Deprecated, use deallocate_free_blocks
  996. void deallocate_free_chunks()
  997. { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
  998. };
  999. /////////////////////////////////////////////
  1000. //
  1001. // private_adaptive_node_pool_impl_rt
  1002. //
  1003. /////////////////////////////////////////////
  1004. template<class SizeType>
  1005. struct private_adaptive_node_pool_impl_rt_data
  1006. {
  1007. typedef SizeType size_type;
  1008. private_adaptive_node_pool_impl_rt_data(size_type max_free_blocks, size_type real_node_size)
  1009. : m_max_free_blocks(max_free_blocks), m_real_node_size(real_node_size)
  1010. , m_real_block_alignment(), m_num_subblocks(), m_real_num_node()
  1011. {}
  1012. const size_type m_max_free_blocks;
  1013. const size_type m_real_node_size;
  1014. //Round the size to a power of two value.
  1015. //This is the total memory size (including payload) that we want to
  1016. //allocate from the general-purpose allocator
  1017. size_type m_real_block_alignment;
  1018. size_type m_num_subblocks;
  1019. //This is the real number of nodes per block
  1020. size_type m_real_num_node;
  1021. };
  1022. template<class SegmentManagerBase, unsigned int Flags>
  1023. class private_adaptive_node_pool_impl_rt
  1024. : private private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type>
  1025. , public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
  1026. {
  1027. typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> impl_t;
  1028. typedef private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type> data_t;
  1029. //Non-copyable
  1030. private_adaptive_node_pool_impl_rt();
  1031. private_adaptive_node_pool_impl_rt(const private_adaptive_node_pool_impl_rt &);
  1032. private_adaptive_node_pool_impl_rt &operator=(const private_adaptive_node_pool_impl_rt &);
  1033. protected:
  1034. typedef typename impl_t::void_pointer void_pointer;
  1035. typedef typename impl_t::size_type size_type;
  1036. typedef typename impl_t::multiallocation_chain multiallocation_chain;
  1037. static const typename impl_t::size_type PayloadPerAllocation = impl_t::PayloadPerAllocation;
  1038. //Flags
  1039. //align_only
  1040. static const bool AlignOnly = impl_t::AlignOnly;
  1041. static const size_type HdrSize = impl_t::HdrSize;
  1042. static const size_type HdrOffsetSize = impl_t::HdrOffsetSize;
  1043. public:
  1044. //!Segment manager typedef
  1045. typedef SegmentManagerBase segment_manager_base_type;
  1046. //!Constructor from a segment manager. Never throws
  1047. private_adaptive_node_pool_impl_rt
  1048. ( segment_manager_base_type *segment_mngr_base
  1049. , size_type node_size
  1050. , size_type nodes_per_block
  1051. , size_type max_free_blocks
  1052. , unsigned char overhead_percent
  1053. )
  1054. : data_t(max_free_blocks, lcm(node_size, size_type(alignment_of<void_pointer>::value)))
  1055. , impl_t(segment_mngr_base)
  1056. {
  1057. if(AlignOnly){
  1058. this->m_real_block_alignment = upper_power_of_2(HdrSize + this->m_real_node_size*nodes_per_block);
  1059. this->m_real_num_node = (this->m_real_block_alignment - PayloadPerAllocation - HdrSize)/this->m_real_node_size;
  1060. }
  1061. else{
  1062. candidate_power_of_2_rt ( upper_power_of_2(HdrSize + PayloadPerAllocation + this->m_real_node_size)
  1063. , this->m_real_node_size
  1064. , PayloadPerAllocation
  1065. , nodes_per_block
  1066. , HdrSize
  1067. , HdrOffsetSize
  1068. , overhead_percent
  1069. , this->m_real_block_alignment
  1070. , this->m_num_subblocks
  1071. , this->m_real_num_node);
  1072. }
  1073. }
  1074. //!Destructor. Deallocates all allocated blocks. Never throws
  1075. ~private_adaptive_node_pool_impl_rt()
  1076. { this->priv_clear(this->m_num_subblocks, this->m_real_block_alignment, this->m_real_num_node); }
  1077. size_type get_real_num_node() const
  1078. { return this->m_real_num_node; }
  1079. //!Allocates array of count elements. Can throw
  1080. void *allocate_node()
  1081. {
  1082. return this->priv_allocate_node
  1083. (this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
  1084. }
  1085. //!Allocates n nodes.
  1086. //!Can throw
  1087. void allocate_nodes(const size_type n, multiallocation_chain &chain)
  1088. {
  1089. this->priv_allocate_nodes
  1090. (n, chain, this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
  1091. }
  1092. //!Deallocates an array pointed by ptr. Never throws
  1093. void deallocate_node(void *pElem)
  1094. {
  1095. this->priv_deallocate_node(pElem, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
  1096. }
  1097. //!Deallocates a linked list of nodes. Never throws
  1098. void deallocate_nodes(multiallocation_chain &nodes)
  1099. {
  1100. this->priv_deallocate_nodes(nodes, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
  1101. }
  1102. void deallocate_free_blocks()
  1103. { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
  1104. //Deprecated, use deallocate_free_blocks
  1105. void deallocate_free_chunks()
  1106. { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
  1107. };
  1108. } //namespace dtl {
  1109. } //namespace container {
  1110. } //namespace boost {
  1111. #include <boost/container/detail/config_end.hpp>
  1112. #endif //#ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP