1 //////////////////////////////////////////////////////////////////////////////
2 //
3 // (C) Copyright Ion Gaztanaga 2005-2013. Distributed under the Boost
4 // Software License, Version 1.0. (See accompanying file
5 // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6 //
7 // See http://www.boost.org/libs/container for documentation.
8 //
9 //////////////////////////////////////////////////////////////////////////////
10
11 #ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
12 #define BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
13
14 #ifndef BOOST_CONFIG_HPP
15 # include <boost/config.hpp>
16 #endif
17
18 #if defined(BOOST_HAS_PRAGMA_ONCE)
19 # pragma once
20 #endif
21
22 #include <boost/container/detail/config_begin.hpp>
23 #include <boost/container/detail/workaround.hpp>
24
25 // container
26 #include <boost/container/container_fwd.hpp>
27 #include <boost/container/throw_exception.hpp>
28 // container/detail
29 #include <boost/container/detail/pool_common.hpp>
30 #include <boost/container/detail/iterator.hpp>
31 #include <boost/move/detail/iterator_to_raw_pointer.hpp>
32 #include <boost/container/detail/math_functions.hpp>
33 #include <boost/container/detail/placement_new.hpp>
34 #include <boost/container/detail/mpl.hpp>
35 #include <boost/move/detail/to_raw_pointer.hpp>
36 #include <boost/container/detail/type_traits.hpp>
37 // intrusive
38 #include <boost/intrusive/pointer_traits.hpp>
39 #include <boost/intrusive/set.hpp>
40 #include <boost/intrusive/list.hpp>
41 #include <boost/intrusive/slist.hpp>
42 // other
43 #include <boost/assert.hpp>
44 #include <boost/core/no_exceptions_support.hpp>
45 #include <cstddef>
46
47 namespace boost {
48 namespace container {
49
50 namespace adaptive_pool_flag {
51
52 static const unsigned int none = 0u;
53 static const unsigned int align_only = 1u << 0u;
54 static const unsigned int size_ordered = 1u << 1u;
55 static const unsigned int address_ordered = 1u << 2u;
56
57 } //namespace adaptive_pool_flag{
58
59 namespace dtl {
60
61 template<class size_type>
62 struct hdr_offset_holder_t
63 {
hdr_offset_holder_tboost::container::dtl::hdr_offset_holder_t64 hdr_offset_holder_t(size_type offset = 0)
65 : hdr_offset(offset)
66 {}
67 size_type hdr_offset;
68 };
69
70 template<class SizeType, unsigned int Flags>
71 struct less_func;
72
73 template<class SizeType>
74 struct less_func<SizeType, adaptive_pool_flag::none>
75 {
lessboost::container::dtl::less_func76 static bool less(SizeType, SizeType, const void *, const void *)
77 { return true; }
78 };
79
80 template<class SizeType>
81 struct less_func<SizeType, adaptive_pool_flag::size_ordered>
82 {
lessboost::container::dtl::less_func83 static bool less(SizeType ls, SizeType rs, const void *, const void *)
84 { return ls < rs; }
85 };
86
87 template<class SizeType>
88 struct less_func<SizeType, adaptive_pool_flag::address_ordered>
89 {
lessboost::container::dtl::less_func90 static bool less(SizeType, SizeType, const void *la, const void *ra)
91 { return la < ra; }
92 };
93
94 template<class SizeType>
95 struct less_func<SizeType, adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered>
96 {
lessboost::container::dtl::less_func97 static bool less(SizeType ls, SizeType rs, const void *la, const void *ra)
98 { return (ls < rs) || ((ls == rs) && (la < ra)); }
99 };
100
101 template<class VoidPointer, class SizeType, unsigned OrderFlags>
102 struct block_container_traits
103 {
104 typedef typename bi::make_set_base_hook
105 < bi::void_pointer<VoidPointer>
106 , bi::optimize_size<true>
107 , bi::link_mode<bi::normal_link> >::type hook_t;
108
109 template<class T>
110 struct container
111 {
112 typedef typename bi::make_multiset
113 <T, bi::base_hook<hook_t>, bi::size_type<SizeType> >::type type;
114 };
115
116 template<class Container>
reinsert_was_usedboost::container::dtl::block_container_traits117 static void reinsert_was_used(Container &container, typename Container::reference v, bool)
118 {
119 typedef typename Container::const_iterator const_block_iterator;
120 typedef typename Container::iterator block_iterator;
121 typedef typename Container::value_compare value_compare;
122
123 const block_iterator this_block(Container::s_iterator_to(v));
124 const const_block_iterator cendit(container.cend());
125 block_iterator next_block(this_block);
126
127 if(++next_block != cendit && value_compare()(*next_block, v)){
128 const_block_iterator next2_block(next_block);
129 //Test if v should be swapped with next (optimization)
130 if(++next2_block == cendit || !value_compare()(*next2_block, v)){
131 v.swap_nodes(*next_block);
132 BOOST_ASSERT(++next_block == this_block);
133 }
134 else{
135 container.erase(this_block);
136 container.insert(v);
137 }
138 }
139 }
140
141 template<class Container>
insert_was_emptyboost::container::dtl::block_container_traits142 static void insert_was_empty(Container &container, typename Container::value_type &v, bool)
143 {
144 container.insert(v);
145 }
146
147 template<class Container>
erase_firstboost::container::dtl::block_container_traits148 static void erase_first(Container &container)
149 {
150 container.erase(container.cbegin());
151 }
152
153 template<class Container>
erase_lastboost::container::dtl::block_container_traits154 static void erase_last(Container &container)
155 {
156 container.erase(--container.cend());
157 }
158 };
159
160 template<class VoidPointer, class SizeType>
161 struct block_container_traits<VoidPointer, SizeType, 0u>
162 {
163 typedef typename bi::make_list_base_hook
164 < bi::void_pointer<VoidPointer>
165 , bi::link_mode<bi::normal_link> >::type hook_t;
166
167 template<class T>
168 struct container
169 {
170 typedef typename bi::make_list
171 <T, bi::base_hook<hook_t>, bi::size_type<SizeType>, bi::constant_time_size<false> >::type type;
172 };
173
174 template<class Container>
reinsert_was_usedboost::container::dtl::block_container_traits175 static void reinsert_was_used(Container &container, typename Container::value_type &v, bool is_full)
176 {
177 if(is_full){
178 container.erase(Container::s_iterator_to(v));
179 container.push_back(v);
180 }
181 }
182
183 template<class Container>
insert_was_emptyboost::container::dtl::block_container_traits184 static void insert_was_empty(Container &container, typename Container::value_type &v, bool is_full)
185 {
186 if(is_full){
187 container.push_back(v);
188 }
189 else{
190 container.push_front(v);
191 }
192 }
193
194 template<class Container>
erase_firstboost::container::dtl::block_container_traits195 static void erase_first(Container &container)
196 {
197 container.pop_front();
198 }
199
200 template<class Container>
erase_lastboost::container::dtl::block_container_traits201 static void erase_last(Container &container)
202 {
203 container.pop_back();
204 }
205 };
206
207 /////////////////////////////
208 //
209 // adaptive_pool_types
210 //
211 /////////////////////////////
212 template<class MultiallocationChain, class VoidPointer, class SizeType, unsigned int Flags>
213 struct adaptive_pool_types
214 {
215 typedef VoidPointer void_pointer;
216 static const unsigned ordered = (Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered));
217 typedef block_container_traits<VoidPointer, SizeType, ordered> block_container_traits_t;
218 typedef typename block_container_traits_t::hook_t hook_t;
219 typedef hdr_offset_holder_t<SizeType> hdr_offset_holder;
220 static const unsigned int order_flags = Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered);
221 typedef MultiallocationChain free_nodes_t;
222
223 struct block_info_t
224 : public hdr_offset_holder,
225 public hook_t
226 {
227 //An intrusive list of free node from this block
228 free_nodes_t free_nodes;
operator <boost::container::dtl::adaptive_pool_types229 friend bool operator <(const block_info_t &l, const block_info_t &r)
230 {
231 return less_func<SizeType, order_flags>::
232 less(l.free_nodes.size(), r.free_nodes.size(), &l , &r);
233 }
234
operator ==boost::container::dtl::adaptive_pool_types235 friend bool operator ==(const block_info_t &l, const block_info_t &r)
236 { return &l == &r; }
237 };
238 typedef typename block_container_traits_t:: template container<block_info_t>::type block_container_t;
239 };
240
241
242 /////////////////////////////////////////////
243 //
244 // candidate_power_of_2_ct
245 //
246 /////////////////////////////////////////////
247 template< std::size_t alignment
248 , std::size_t real_node_size
249 , std::size_t payload_per_allocation
250 , std::size_t min_elements_per_block
251 , std::size_t hdr_size
252 , std::size_t hdr_offset_size
253 , std::size_t overhead_percent>
254 struct candidate_power_of_2_ct_helper
255 {
256 static const std::size_t hdr_subblock_elements_alone = (alignment - hdr_size - payload_per_allocation)/real_node_size;
257 static const std::size_t hdr_subblock_elements_first = (alignment - hdr_size - payload_per_allocation)/real_node_size;
258 static const std::size_t elements_per_b_subblock_mid = (alignment - hdr_offset_size)/real_node_size;
259 static const std::size_t elements_per_b_subblock_end = (alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
260 static const std::size_t num_b_subblock =
261 hdr_subblock_elements_alone >= min_elements_per_block
262 ? 0
263 : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
264 ? 1
265 : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
266 )
267 ;
268
269 static const std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
270
271 static const std::size_t total_nodes = (num_b_subblock == 0)
272 ? hdr_subblock_elements_alone
273 : ( (num_b_subblock == 1)
274 ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
275 : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
276 )
277 ;
278 static const std::size_t total_data = total_nodes*real_node_size;
279 static const std::size_t total_size = alignment*(num_b_subblock+1);
280 static const bool overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
281 };
282
283 template< std::size_t initial_alignment
284 , std::size_t real_node_size
285 , std::size_t payload_per_allocation
286 , std::size_t min_elements_per_block
287 , std::size_t hdr_size
288 , std::size_t hdr_offset_size
289 , std::size_t overhead_percent
290 , bool Loop = true>
291 struct candidate_power_of_2_ct
292 {
293 typedef candidate_power_of_2_ct_helper
294 < initial_alignment
295 , real_node_size
296 , payload_per_allocation
297 , min_elements_per_block
298 , hdr_size
299 , hdr_offset_size
300 , overhead_percent> helper_t;
301
302 static const std::size_t candidate_power_of_2 = initial_alignment << std::size_t(!helper_t::overhead_satisfied);
303
304 typedef typename candidate_power_of_2_ct
305 < candidate_power_of_2
306 , real_node_size
307 , payload_per_allocation
308 , min_elements_per_block
309 , hdr_size
310 , hdr_offset_size
311 , overhead_percent
312 , !helper_t::overhead_satisfied
313 >::type type;
314
315 static const std::size_t alignment = type::alignment;
316 static const std::size_t num_subblocks = type::num_subblocks;
317 static const std::size_t real_num_node = type::real_num_node;
318 };
319
320 template< std::size_t initial_alignment
321 , std::size_t real_node_size
322 , std::size_t payload_per_allocation
323 , std::size_t min_elements_per_block
324 , std::size_t hdr_size
325 , std::size_t hdr_offset_size
326 , std::size_t overhead_percent
327 >
328 struct candidate_power_of_2_ct
329 < initial_alignment
330 , real_node_size
331 , payload_per_allocation
332 , min_elements_per_block
333 , hdr_size
334 , hdr_offset_size
335 , overhead_percent
336 , false>
337 {
338 typedef candidate_power_of_2_ct
339 < initial_alignment
340 , real_node_size
341 , payload_per_allocation
342 , min_elements_per_block
343 , hdr_size
344 , hdr_offset_size
345 , overhead_percent
346 , false> type;
347
348 typedef candidate_power_of_2_ct_helper
349 < initial_alignment
350 , real_node_size
351 , payload_per_allocation
352 , min_elements_per_block
353 , hdr_size
354 , hdr_offset_size
355 , overhead_percent> helper_t;
356
357 static const std::size_t alignment = initial_alignment;
358 static const std::size_t num_subblocks = helper_t::num_b_subblock+1;
359 static const std::size_t real_num_node = helper_t::total_nodes;
360 };
361
362 /////////////////////////////////////////////
363 //
364 // candidate_power_of_2_rt
365 //
366 /////////////////////////////////////////////
candidate_power_of_2_rt(std::size_t initial_alignment,std::size_t real_node_size,std::size_t payload_per_allocation,std::size_t min_elements_per_block,std::size_t hdr_size,std::size_t hdr_offset_size,std::size_t overhead_percent,std::size_t & alignment,std::size_t & num_subblocks,std::size_t & real_num_node)367 inline void candidate_power_of_2_rt ( std::size_t initial_alignment
368 , std::size_t real_node_size
369 , std::size_t payload_per_allocation
370 , std::size_t min_elements_per_block
371 , std::size_t hdr_size
372 , std::size_t hdr_offset_size
373 , std::size_t overhead_percent
374 , std::size_t &alignment
375 , std::size_t &num_subblocks
376 , std::size_t &real_num_node)
377 {
378 bool overhead_satisfied = false;
379 std::size_t num_b_subblock = 0;
380 std::size_t total_nodes = 0;
381
382 while(!overhead_satisfied)
383 {
384 std::size_t hdr_subblock_elements_alone = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
385 std::size_t hdr_subblock_elements_first = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
386 std::size_t elements_per_b_subblock_mid = (initial_alignment - hdr_offset_size)/real_node_size;
387 std::size_t elements_per_b_subblock_end = (initial_alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
388
389 num_b_subblock =
390 hdr_subblock_elements_alone >= min_elements_per_block
391 ? 0
392 : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
393 ? 1
394 : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
395 )
396 ;
397
398 std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
399
400 total_nodes = (num_b_subblock == 0)
401 ? hdr_subblock_elements_alone
402 : ( (num_b_subblock == 1)
403 ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
404 : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
405 )
406 ;
407 std::size_t total_data = total_nodes*real_node_size;
408 std::size_t total_size = initial_alignment*(num_b_subblock+1);
409 overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
410 initial_alignment = initial_alignment << std::size_t(!overhead_satisfied);
411 }
412 alignment = initial_alignment;
413 num_subblocks = num_b_subblock+1;
414 real_num_node = total_nodes;
415 }
416
417 /////////////////////////////////////////////
418 //
419 // private_adaptive_node_pool_impl_common
420 //
421 /////////////////////////////////////////////
422 template< class SegmentManagerBase, unsigned int Flags>
423 class private_adaptive_node_pool_impl_common
424 {
425 public:
426 //!Segment manager typedef
427 typedef SegmentManagerBase segment_manager_base_type;
428 typedef typename SegmentManagerBase::multiallocation_chain multiallocation_chain;
429 typedef typename SegmentManagerBase::size_type size_type;
430 //Flags
431 //align_only
432 static const bool AlignOnly = (Flags & adaptive_pool_flag::align_only) != 0;
433 typedef bool_<AlignOnly> IsAlignOnly;
434 typedef true_ AlignOnlyTrue;
435 typedef false_ AlignOnlyFalse;
436
437 typedef typename SegmentManagerBase::void_pointer void_pointer;
438 static const typename SegmentManagerBase::
439 size_type PayloadPerAllocation = SegmentManagerBase::PayloadPerAllocation;
440
441 typedef typename boost::intrusive::pointer_traits
442 <void_pointer>::template rebind_pointer<segment_manager_base_type>::type segment_mngr_base_ptr_t;
443
444 protected:
445 typedef adaptive_pool_types
446 <multiallocation_chain, void_pointer, size_type, Flags> adaptive_pool_types_t;
447 typedef typename adaptive_pool_types_t::free_nodes_t free_nodes_t;
448 typedef typename adaptive_pool_types_t::block_info_t block_info_t;
449 typedef typename adaptive_pool_types_t::block_container_t block_container_t;
450 typedef typename adaptive_pool_types_t::block_container_traits_t block_container_traits_t;
451 typedef typename block_container_t::iterator block_iterator;
452 typedef typename block_container_t::const_iterator const_block_iterator;
453 typedef typename adaptive_pool_types_t::hdr_offset_holder hdr_offset_holder;
454 typedef private_adaptive_node_pool_impl_common this_type;
455
456 static const size_type MaxAlign = alignment_of<void_pointer>::value;
457 static const size_type HdrSize = ((sizeof(block_info_t)-1)/MaxAlign+1)*MaxAlign;
458 static const size_type HdrOffsetSize = ((sizeof(hdr_offset_holder)-1)/MaxAlign+1)*MaxAlign;
459
460 segment_mngr_base_ptr_t mp_segment_mngr_base; //Segment manager
461 block_container_t m_block_container; //Intrusive block list
462 size_type m_totally_free_blocks; //Free blocks
463
464 class block_destroyer;
465 friend class block_destroyer;
466
467 class block_destroyer
468 {
469 public:
block_destroyer(const this_type * impl,multiallocation_chain & chain,const size_type num_subblocks,const size_type real_block_alignment,const size_type real_num_node)470 block_destroyer(const this_type *impl, multiallocation_chain &chain, const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
471 : mp_impl(impl), m_chain(chain), m_num_subblocks(num_subblocks), m_real_block_alignment(real_block_alignment), m_real_num_node(real_num_node)
472 {}
473
operator ()(typename block_container_t::pointer to_deallocate)474 void operator()(typename block_container_t::pointer to_deallocate)
475 { return this->do_destroy(to_deallocate, IsAlignOnly()); }
476
477 private:
do_destroy(typename block_container_t::pointer to_deallocate,AlignOnlyTrue)478 void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyTrue)
479 {
480 BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
481 m_chain.push_back(to_deallocate);
482 }
483
do_destroy(typename block_container_t::pointer to_deallocate,AlignOnlyFalse)484 void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyFalse)
485 {
486 BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
487 BOOST_ASSERT(0 == to_deallocate->hdr_offset);
488 hdr_offset_holder *hdr_off_holder =
489 mp_impl->priv_first_subblock_from_block(boost::movelib::to_raw_pointer(to_deallocate), m_num_subblocks, m_real_block_alignment);
490 m_chain.push_back(hdr_off_holder);
491 }
492
493 const this_type *mp_impl;
494 multiallocation_chain &m_chain;
495 const size_type m_num_subblocks;
496 const size_type m_real_block_alignment;
497 const size_type m_real_num_node;
498 };
499
500 //This macro will activate invariant checking. Slow, but helpful for debugging the code.
501 //#define BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
priv_invariants(const size_type real_num_node,const size_type num_subblocks,const size_type real_block_alignment) const502 void priv_invariants(const size_type real_num_node, const size_type num_subblocks, const size_type real_block_alignment) const
503 {
504 (void)real_num_node; (void)num_subblocks; (void)real_block_alignment;
505 #ifdef BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
506 //Check that the total totally free blocks are correct
507 BOOST_ASSERT(m_block_container.size() >= m_totally_free_blocks);
508
509 const const_block_iterator itend(m_block_container.cend());
510 const const_block_iterator itbeg(m_block_container.cbegin());
511
512 { //Try to do checks in a single iteration
513 const_block_iterator it(itbeg);
514 size_type total_free_nodes = 0;
515 size_type total_free_blocks = 0u;
516 for(; it != itend; ++it){
517 if(it != itbeg){
518 //Check order invariant
519 const_block_iterator prev(it);
520 --prev;
521 BOOST_ASSERT(!(m_block_container.key_comp()(*it, *prev)));
522 (void)prev; (void)it;
523 }
524
525 //free_nodes invariant
526 const size_type free_nodes = it->free_nodes.size();
527 BOOST_ASSERT(free_nodes <= real_num_node);
528 BOOST_ASSERT(free_nodes != 0);
529
530 //Acummulate total_free_nodes and total_free_blocks
531 total_free_nodes += free_nodes;
532 total_free_blocks += it->free_nodes.size() == real_num_node;
533
534 if (!AlignOnly) {
535 //Check that header offsets are correct
536 hdr_offset_holder *hdr_off_holder = this->priv_first_subblock_from_block(const_cast<block_info_t *>(&*it), num_subblocks, real_block_alignment);
537 for (size_type i = 0, max = num_subblocks; i < max; ++i) {
538 const size_type offset = size_type(reinterpret_cast<char*>(const_cast<block_info_t *>(&*it)) - reinterpret_cast<char*>(hdr_off_holder));
539 (void)offset;
540 BOOST_ASSERT(hdr_off_holder->hdr_offset == offset);
541 BOOST_ASSERT(0 == (reinterpret_cast<std::size_t>(hdr_off_holder) & (real_block_alignment - 1)));
542 BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
543 hdr_off_holder = reinterpret_cast<hdr_offset_holder *>(reinterpret_cast<char*>(hdr_off_holder) + real_block_alignment);
544 }
545 }
546 }
547 BOOST_ASSERT(total_free_blocks == m_totally_free_blocks);
548 BOOST_ASSERT(total_free_nodes >= m_totally_free_blocks*real_num_node);
549 }
550 #endif
551 }
552
priv_deallocate_free_blocks(const size_type max_free_blocks,const size_type real_num_node,const size_type num_subblocks,const size_type real_block_alignment)553 void priv_deallocate_free_blocks( const size_type max_free_blocks, const size_type real_num_node
554 , const size_type num_subblocks, const size_type real_block_alignment)
555 { //Trampoline function to ease inlining
556 if(m_totally_free_blocks > max_free_blocks){
557 this->priv_deallocate_free_blocks_impl(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
558 }
559 }
560
priv_first_subblock_from_block(block_info_t * block,const size_type num_subblocks,const size_type real_block_alignment) const561 hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment) const
562 { return this->priv_first_subblock_from_block(block, num_subblocks, real_block_alignment, IsAlignOnly()); }
563
priv_first_subblock_from_block(block_info_t * block,const size_type num_subblocks,const size_type real_block_alignment,AlignOnlyFalse) const564 hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyFalse) const
565 {
566 hdr_offset_holder *const hdr_off_holder = reinterpret_cast<hdr_offset_holder*>
567 (reinterpret_cast<char*>(block) - (num_subblocks-1)*real_block_alignment);
568 BOOST_ASSERT(hdr_off_holder->hdr_offset == size_type(reinterpret_cast<char*>(block) - reinterpret_cast<char*>(hdr_off_holder)));
569 BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
570 BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
571 return hdr_off_holder;
572 }
573
priv_first_subblock_from_block(block_info_t * block,const size_type num_subblocks,const size_type real_block_alignment,AlignOnlyTrue) const574 hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyTrue) const
575 {
576 (void)num_subblocks; (void)real_block_alignment;
577 return reinterpret_cast<hdr_offset_holder*>(block);
578 }
579
priv_deallocate_free_blocks_impl(const size_type max_free_blocks,const size_type real_num_node,const size_type num_subblocks,const size_type real_block_alignment)580 void priv_deallocate_free_blocks_impl( const size_type max_free_blocks, const size_type real_num_node
581 , const size_type num_subblocks, const size_type real_block_alignment)
582 {
583 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
584 //Now check if we've reached the free nodes limit
585 //and check if we have free blocks. If so, deallocate as much
586 //as we can to stay below the limit
587 multiallocation_chain chain;
588 {
589 if(Flags & adaptive_pool_flag::size_ordered){
590 const_block_iterator it = m_block_container.cend();
591 --it;
592 size_type totally_free_blocks = m_totally_free_blocks;
593
594 for( ; totally_free_blocks > max_free_blocks; --totally_free_blocks){
595 BOOST_ASSERT(it->free_nodes.size() == real_num_node);
596 void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
597 --it;
598 block_container_traits_t::erase_last(m_block_container);
599 chain.push_front(void_pointer(addr));
600 }
601 }
602 else{
603 const_block_iterator it = m_block_container.cend();
604 size_type totally_free_blocks = m_totally_free_blocks;
605
606 while(totally_free_blocks > max_free_blocks){
607 --it;
608 if(it->free_nodes.size() == real_num_node){
609 void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
610 it = m_block_container.erase(it);
611 chain.push_front(void_pointer(addr));
612 --totally_free_blocks;
613 }
614 }
615 }
616 BOOST_ASSERT((m_totally_free_blocks - max_free_blocks) == chain.size());
617 m_totally_free_blocks = max_free_blocks;
618 }
619 this->mp_segment_mngr_base->deallocate_many(chain);
620 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
621 }
622
priv_fill_chain_remaining_to_block(multiallocation_chain & chain,size_type target_elem_in_chain,block_info_t & c_info,char * mem_address,size_type max_node_in_mem,const size_type real_node_size)623 void priv_fill_chain_remaining_to_block
624 ( multiallocation_chain &chain, size_type target_elem_in_chain, block_info_t &c_info
625 , char *mem_address, size_type max_node_in_mem
626 , const size_type real_node_size)
627 {
628 BOOST_ASSERT(chain.size() <= target_elem_in_chain);
629
630 //First add all possible nodes to the chain
631 const size_type left = target_elem_in_chain - chain.size();
632 const size_type add_to_chain = (max_node_in_mem < left) ? max_node_in_mem : left;
633 char *free_mem_address = static_cast<char *>(boost::movelib::to_raw_pointer
634 (chain.incorporate_after(chain.last(), void_pointer(mem_address), real_node_size, add_to_chain)));
635 //Now store remaining nodes in the free list
636 if(const size_type free = max_node_in_mem - add_to_chain){
637 free_nodes_t & free_nodes = c_info.free_nodes;
638 free_nodes.incorporate_after(free_nodes.last(), void_pointer(free_mem_address), real_node_size, free);
639 }
640 }
641
642 //!Allocates a several blocks of nodes. Can throw
priv_append_from_new_blocks(size_type min_elements,multiallocation_chain & chain,const size_type max_free_blocks,const size_type real_block_alignment,const size_type real_node_size,const size_type real_num_node,const size_type num_subblocks,AlignOnlyTrue)643 void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
644 , const size_type max_free_blocks
645 , const size_type real_block_alignment, const size_type real_node_size
646 , const size_type real_num_node, const size_type num_subblocks
647 , AlignOnlyTrue)
648 {
649 (void)num_subblocks;
650 BOOST_ASSERT(m_block_container.empty());
651 BOOST_ASSERT(min_elements > 0);
652 const size_type n = (min_elements - 1)/real_num_node + 1;
653 const size_type real_block_size = real_block_alignment - PayloadPerAllocation;
654 const size_type target_elem_in_chain = chain.size() + min_elements;
655 for(size_type i = 0; i != n; ++i){
656 //We allocate a new NodeBlock and put it the last
657 //element of the tree
658 char *mem_address = static_cast<char*>
659 (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
660 if(!mem_address){
661 //In case of error, free memory deallocating all nodes (the new ones allocated
662 //in this function plus previously stored nodes in chain).
663 this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
664 throw_bad_alloc();
665 }
666 block_info_t &c_info = *new(mem_address, boost_container_new_t())block_info_t();
667 mem_address += HdrSize;
668 this->priv_fill_chain_remaining_to_block(chain, target_elem_in_chain, c_info, mem_address, real_num_node, real_node_size);
669 const size_type free_nodes = c_info.free_nodes.size();
670 if(free_nodes){
671 const bool is_full = free_nodes == real_num_node;
672 BOOST_ASSERT(free_nodes < real_num_node);
673 m_totally_free_blocks += static_cast<size_type>(is_full);
674 block_container_traits_t::insert_was_empty(m_block_container, c_info, is_full);
675 }
676 }
677 }
678
priv_append_from_new_blocks(size_type min_elements,multiallocation_chain & chain,const size_type max_free_blocks,const size_type real_block_alignment,const size_type real_node_size,const size_type real_num_node,const size_type num_subblocks,AlignOnlyFalse)679 void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
680 , const size_type max_free_blocks
681 , const size_type real_block_alignment, const size_type real_node_size
682 , const size_type real_num_node, const size_type num_subblocks
683 , AlignOnlyFalse)
684 {
685 BOOST_ASSERT(m_block_container.empty());
686 BOOST_ASSERT(min_elements > 0);
687 const size_type n = (min_elements - 1)/real_num_node + 1;
688 const size_type real_block_size = real_block_alignment*num_subblocks - PayloadPerAllocation;
689 const size_type elements_per_subblock_mid = (real_block_alignment - HdrOffsetSize)/real_node_size;
690 const size_type elements_per_subblock_end = (real_block_alignment - HdrOffsetSize - PayloadPerAllocation) / real_node_size;
691 const size_type hdr_subblock_elements = (real_block_alignment - HdrSize - PayloadPerAllocation)/real_node_size;
692 const size_type target_elem_in_chain = chain.size() + min_elements;
693
694 for(size_type i = 0; i != n; ++i){
695 //We allocate a new NodeBlock and put it the last
696 //element of the tree
697 char *mem_address = static_cast<char*>
698 (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
699 if(!mem_address){
700 //In case of error, free memory deallocating all nodes (the new ones allocated
701 //in this function plus previously stored nodes in chain).
702 this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
703 throw_bad_alloc();
704 }
705 //First initialize header information on the last subblock
706 char *hdr_addr = mem_address + real_block_alignment*(num_subblocks-1);
707 block_info_t &c_info = *new(hdr_addr, boost_container_new_t())block_info_t();
708 //Some structural checks
709 BOOST_ASSERT(static_cast<void*>(&static_cast<hdr_offset_holder&>(c_info).hdr_offset) ==
710 static_cast<void*>(&c_info)); (void)c_info;
711 for( size_type subblock = 0, maxsubblock = num_subblocks - 1
712 ; subblock < maxsubblock
713 ; ++subblock, mem_address += real_block_alignment){
714 //Initialize header offset mark
715 new(mem_address, boost_container_new_t()) hdr_offset_holder(size_type(hdr_addr - mem_address));
716 const size_type elements_per_subblock = (subblock != (maxsubblock - 1)) ? elements_per_subblock_mid : elements_per_subblock_end;
717 this->priv_fill_chain_remaining_to_block
718 (chain, target_elem_in_chain, c_info, mem_address + HdrOffsetSize, elements_per_subblock, real_node_size);
719 }
720 this->priv_fill_chain_remaining_to_block
721 (chain, target_elem_in_chain, c_info, hdr_addr + HdrSize, hdr_subblock_elements, real_node_size);
722 m_totally_free_blocks += static_cast<size_type>(c_info.free_nodes.size() == real_num_node);
723 if (c_info.free_nodes.size())
724 m_block_container.push_front(c_info);
725 }
726 }
727
728 //!Allocates array of count elements. Can throw
priv_allocate_node(const size_type max_free_blocks,const size_type real_block_alignment,const size_type real_node_size,const size_type real_num_node,const size_type num_subblocks)729 void *priv_allocate_node( const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
730 , const size_type real_num_node, const size_type num_subblocks)
731 {
732 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
733 //If there are no free nodes we allocate a new block
734 if(!m_block_container.empty()){
735 //We take the first free node the multiset can't be empty
736 free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
737 BOOST_ASSERT(!free_nodes.empty());
738 const size_type free_nodes_count = free_nodes.size();
739 void *first_node = boost::movelib::to_raw_pointer(free_nodes.pop_front());
740 if(free_nodes.empty()){
741 block_container_traits_t::erase_first(m_block_container);
742 }
743 m_totally_free_blocks -= static_cast<size_type>(free_nodes_count == real_num_node);
744 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
745 return first_node;
746 }
747 else{
748 multiallocation_chain chain;
749 this->priv_append_from_new_blocks
750 (1, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
751 void *node = boost::movelib::to_raw_pointer(chain.pop_front());
752 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
753 return node;
754 }
755 }
756
priv_allocate_nodes(const size_type n,multiallocation_chain & chain,const size_type max_free_blocks,const size_type real_block_alignment,const size_type real_node_size,const size_type real_num_node,const size_type num_subblocks)757 void priv_allocate_nodes( const size_type n, multiallocation_chain &chain
758 , const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
759 , const size_type real_num_node, const size_type num_subblocks)
760 {
761 size_type i = 0;
762 BOOST_TRY{
763 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
764 while(i != n){
765 //If there are no free nodes we allocate all needed blocks
766 if (m_block_container.empty()){
767 this->priv_append_from_new_blocks
768 (n - i, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
769 BOOST_ASSERT(m_block_container.empty() || (++m_block_container.cbegin() == m_block_container.cend()));
770 BOOST_ASSERT(chain.size() == n);
771 break;
772 }
773 free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
774 const size_type free_nodes_count_before = free_nodes.size();
775 m_totally_free_blocks -= static_cast<size_type>(free_nodes_count_before == real_num_node);
776 const size_type num_left = n-i;
777 const size_type num_elems = (num_left < free_nodes_count_before) ? num_left : free_nodes_count_before;
778 typedef typename free_nodes_t::iterator free_nodes_iterator;
779
780 if(num_left < free_nodes_count_before){
781 const free_nodes_iterator it_bbeg(free_nodes.before_begin());
782 free_nodes_iterator it_bend(it_bbeg);
783 for(size_type j = 0; j != num_elems; ++j){
784 ++it_bend;
785 }
786 free_nodes_iterator it_end = it_bend; ++it_end;
787 free_nodes_iterator it_beg = it_bbeg; ++it_beg;
788 free_nodes.erase_after(it_bbeg, it_end, num_elems);
789 chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
790 //chain.splice_after(chain.last(), free_nodes, it_bbeg, it_bend, num_elems);
791 BOOST_ASSERT(!free_nodes.empty());
792 }
793 else{
794 const free_nodes_iterator it_beg(free_nodes.begin()), it_bend(free_nodes.last());
795 free_nodes.clear();
796 chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
797 block_container_traits_t::erase_first(m_block_container);
798 }
799 i += num_elems;
800 }
801 }
802 BOOST_CATCH(...){
803 this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
804 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
805 BOOST_RETHROW
806 }
807 BOOST_CATCH_END
808 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
809 }
810
811 //!Deallocates an array pointed by ptr. Never throws
priv_deallocate_node(void * pElem,const size_type max_free_blocks,const size_type real_num_node,const size_type num_subblocks,const size_type real_block_alignment)812 void priv_deallocate_node( void *pElem
813 , const size_type max_free_blocks, const size_type real_num_node
814 , const size_type num_subblocks, const size_type real_block_alignment)
815 {
816 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
817 block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
818 const size_type prev_free_nodes = block_info.free_nodes.size();
819 BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
820
821 //We put the node at the beginning of the free node list
822 block_info.free_nodes.push_back(void_pointer(pElem));
823
824 //The loop reinserts all blocks except the last one
825 this->priv_reinsert_block(block_info, prev_free_nodes == 0, real_num_node);
826 this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
827 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
828 }
829
priv_deallocate_nodes(multiallocation_chain & nodes,const size_type max_free_blocks,const size_type real_num_node,const size_type num_subblocks,const size_type real_block_alignment)830 void priv_deallocate_nodes( multiallocation_chain &nodes
831 , const size_type max_free_blocks, const size_type real_num_node
832 , const size_type num_subblocks, const size_type real_block_alignment)
833 {
834 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
835 //To take advantage of node locality, wait until two
836 //nodes belong to different blocks. Only then reinsert
837 //the block of the first node in the block tree.
838 //Cache of the previous block
839 block_info_t *prev_block_info = 0;
840
841 //If block was empty before this call, it's not already
842 //inserted in the block tree.
843 bool prev_block_was_empty = false;
844 typedef typename free_nodes_t::iterator free_nodes_iterator;
845 {
846 const free_nodes_iterator itbb(nodes.before_begin()), ite(nodes.end());
847 free_nodes_iterator itf(nodes.begin()), itbf(itbb);
848 size_type splice_node_count = size_type(-1);
849 while(itf != ite){
850 void *pElem = boost::movelib::to_raw_pointer(boost::movelib::iterator_to_raw_pointer(itf));
851 block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
852 BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
853 ++splice_node_count;
854
855 //If block change is detected calculate the cached block position in the tree
856 if(&block_info != prev_block_info){
857 if(prev_block_info){ //Make sure we skip the initial "dummy" cache
858 free_nodes_iterator it(itbb); ++it;
859 nodes.erase_after(itbb, itf, splice_node_count);
860 prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*it, &*itbf, splice_node_count);
861 this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
862 splice_node_count = 0;
863 }
864 //Update cache with new data
865 prev_block_was_empty = block_info.free_nodes.empty();
866 prev_block_info = &block_info;
867 }
868 itbf = itf;
869 ++itf;
870 }
871 }
872 if(prev_block_info){
873 //The loop reinserts all blocks except the last one
874 const free_nodes_iterator itfirst(nodes.begin()), itlast(nodes.last());
875 const size_type splice_node_count = nodes.size();
876 nodes.clear();
877 prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*itfirst, &*itlast, splice_node_count);
878 this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
879 this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
880 }
881 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
882 }
883
priv_reinsert_block(block_info_t & prev_block_info,const bool prev_block_was_empty,const size_type real_num_node)884 void priv_reinsert_block(block_info_t &prev_block_info, const bool prev_block_was_empty, const size_type real_num_node)
885 {
886 //Cache the free nodes from the block
887 const size_type this_block_free_nodes = prev_block_info.free_nodes.size();
888 const bool is_full = this_block_free_nodes == real_num_node;
889
890 //Update free block count
891 m_totally_free_blocks += static_cast<size_type>(is_full);
892 if(prev_block_was_empty){
893 block_container_traits_t::insert_was_empty(m_block_container, prev_block_info, is_full);
894 }
895 else{
896 block_container_traits_t::reinsert_was_used(m_block_container, prev_block_info, is_full);
897 }
898 }
899
priv_block_from_node(void * node,const size_type real_block_alignment,AlignOnlyFalse) const900 block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyFalse) const
901 {
902 hdr_offset_holder *hdr_off_holder =
903 reinterpret_cast<hdr_offset_holder*>((std::size_t)node & size_type(~(real_block_alignment - 1)));
904 BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
905 BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
906 block_info_t *block = reinterpret_cast<block_info_t *>
907 (reinterpret_cast<char*>(hdr_off_holder) + hdr_off_holder->hdr_offset);
908 BOOST_ASSERT(block->hdr_offset == 0);
909 return block;
910 }
911
priv_block_from_node(void * node,const size_type real_block_alignment,AlignOnlyTrue) const912 block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyTrue) const
913 {
914 return (block_info_t *)((std::size_t)node & std::size_t(~(real_block_alignment - 1)));
915 }
916
priv_block_from_node(void * node,const size_type real_block_alignment) const917 block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment) const
918 { return this->priv_block_from_node(node, real_block_alignment, IsAlignOnly()); }
919
920 //!Deallocates all used memory. Never throws
priv_clear(const size_type num_subblocks,const size_type real_block_alignment,const size_type real_num_node)921 void priv_clear(const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
922 {
923 #ifndef NDEBUG
924 block_iterator it = m_block_container.begin();
925 block_iterator itend = m_block_container.end();
926 size_type n_free_nodes = 0;
927 for(; it != itend; ++it){
928 //Check for memory leak
929 BOOST_ASSERT(it->free_nodes.size() == real_num_node);
930 ++n_free_nodes;
931 }
932 BOOST_ASSERT(n_free_nodes == m_totally_free_blocks);
933 #endif
934 //Check for memory leaks
935 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
936 multiallocation_chain chain;
937 m_block_container.clear_and_dispose(block_destroyer(this, chain, num_subblocks, real_block_alignment, real_num_node));
938 this->mp_segment_mngr_base->deallocate_many(chain);
939 m_totally_free_blocks = 0;
940 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
941 }
942
943 public:
private_adaptive_node_pool_impl_common(segment_manager_base_type * segment_mngr_base)944 private_adaptive_node_pool_impl_common(segment_manager_base_type *segment_mngr_base)
945 //General purpose allocator
946 : mp_segment_mngr_base(segment_mngr_base)
947 , m_block_container()
948 , m_totally_free_blocks(0)
949 {}
950
num_free_nodes()951 size_type num_free_nodes()
952 {
953 typedef typename block_container_t::const_iterator citerator;
954 size_type count = 0;
955 citerator it (m_block_container.begin()), itend(m_block_container.end());
956 for(; it != itend; ++it){
957 count += it->free_nodes.size();
958 }
959 return count;
960 }
961
swap(private_adaptive_node_pool_impl_common & other)962 void swap(private_adaptive_node_pool_impl_common &other)
963 {
964 std::swap(mp_segment_mngr_base, other.mp_segment_mngr_base);
965 std::swap(m_totally_free_blocks, other.m_totally_free_blocks);
966 m_block_container.swap(other.m_block_container);
967 }
968
969 //!Returns the segment manager. Never throws
get_segment_manager_base() const970 segment_manager_base_type* get_segment_manager_base()const
971 { return boost::movelib::to_raw_pointer(mp_segment_mngr_base); }
972 };
973
974 template< class SizeType
975 , std::size_t HdrSize
976 , std::size_t PayloadPerAllocation
977 , std::size_t RealNodeSize
978 , std::size_t NodesPerBlock
979 , std::size_t HdrOffsetSize
980 , std::size_t OverheadPercent
981 , bool AlignOnly>
982 struct calculate_alignment_ct
983 {
984 static const std::size_t alignment = upper_power_of_2_ct<SizeType, HdrSize + RealNodeSize*NodesPerBlock>::value;
985 static const std::size_t num_subblocks = 0;
986 static const std::size_t real_num_node = (alignment - PayloadPerAllocation - HdrSize)/RealNodeSize;
987 };
988
989 template< class SizeType
990 , std::size_t HdrSize
991 , std::size_t PayloadPerAllocation
992 , std::size_t RealNodeSize
993 , std::size_t NodesPerBlock
994 , std::size_t HdrOffsetSize
995 , std::size_t OverheadPercent>
996 struct calculate_alignment_ct
997 < SizeType
998 , HdrSize
999 , PayloadPerAllocation
1000 , RealNodeSize
1001 , NodesPerBlock
1002 , HdrOffsetSize
1003 , OverheadPercent
1004 , false>
1005 {
1006 typedef typename candidate_power_of_2_ct
1007 < upper_power_of_2_ct<SizeType, HdrSize + PayloadPerAllocation + RealNodeSize>::value
1008 , RealNodeSize
1009 , PayloadPerAllocation
1010 , NodesPerBlock
1011 , HdrSize
1012 , HdrOffsetSize
1013 , OverheadPercent
1014 >::type type;
1015
1016 static const std::size_t alignment = type::alignment;
1017 static const std::size_t num_subblocks = type::num_subblocks;
1018 static const std::size_t real_num_node = type::real_num_node;
1019 };
1020
1021
1022 /////////////////////////////////////////////
1023 //
1024 // private_adaptive_node_pool_impl_ct
1025 //
1026 /////////////////////////////////////////////
1027 template< class SegmentManagerBase
1028 , std::size_t MaxFreeBlocks
1029 , std::size_t NodeSize
1030 , std::size_t NodesPerBlock
1031 , std::size_t OverheadPercent
1032 , unsigned int Flags>
1033 class private_adaptive_node_pool_impl_ct
1034 : public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
1035 {
1036 typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> base_t;
1037
1038 //Non-copyable
1039 private_adaptive_node_pool_impl_ct();
1040 private_adaptive_node_pool_impl_ct(const private_adaptive_node_pool_impl_ct &);
1041 private_adaptive_node_pool_impl_ct &operator=(const private_adaptive_node_pool_impl_ct &);
1042
1043 public:
1044 typedef typename base_t::void_pointer void_pointer;
1045 typedef typename base_t::size_type size_type;
1046 typedef typename base_t::multiallocation_chain multiallocation_chain;
1047 typedef typename base_t::segment_manager_base_type segment_manager_base_type;
1048
1049 static const typename base_t::size_type PayloadPerAllocation = base_t::PayloadPerAllocation;
1050
1051 //align_only
1052 static const bool AlignOnly = base_t::AlignOnly;
1053
1054 private:
1055 static const size_type MaxAlign = base_t::MaxAlign;
1056 static const size_type HdrSize = base_t::HdrSize;
1057 static const size_type HdrOffsetSize = base_t::HdrOffsetSize;
1058
1059 static const size_type RealNodeSize = lcm_ct<NodeSize, alignment_of<void_pointer>::value>::value;
1060
1061 typedef calculate_alignment_ct
1062 < size_type, HdrSize, PayloadPerAllocation
1063 , RealNodeSize, NodesPerBlock, HdrOffsetSize, OverheadPercent, AlignOnly> data_t;
1064
1065 //Round the size to a power of two value.
1066 //This is the total memory size (including payload) that we want to
1067 //allocate from the general-purpose allocator
1068 static const size_type NumSubBlocks = data_t::num_subblocks;
1069 static const size_type RealNumNode = data_t::real_num_node;
1070 static const size_type RealBlockAlignment = data_t::alignment;
1071
1072 public:
1073
1074 //!Constructor from a segment manager. Never throws
private_adaptive_node_pool_impl_ct(typename base_t::segment_manager_base_type * segment_mngr_base)1075 private_adaptive_node_pool_impl_ct(typename base_t::segment_manager_base_type *segment_mngr_base)
1076 //General purpose allocator
1077 : base_t(segment_mngr_base)
1078 {}
1079
1080 //!Destructor. Deallocates all allocated blocks. Never throws
~private_adaptive_node_pool_impl_ct()1081 ~private_adaptive_node_pool_impl_ct()
1082 { this->priv_clear(NumSubBlocks, data_t::alignment, RealNumNode); }
1083
get_real_num_node() const1084 size_type get_real_num_node() const
1085 { return RealNumNode; }
1086
1087 //!Allocates array of count elements. Can throw
allocate_node()1088 void *allocate_node()
1089 {
1090 return this->priv_allocate_node
1091 (MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
1092 }
1093
1094 //!Allocates n nodes.
1095 //!Can throw
allocate_nodes(const size_type n,multiallocation_chain & chain)1096 void allocate_nodes(const size_type n, multiallocation_chain &chain)
1097 {
1098 this->priv_allocate_nodes
1099 (n, chain, MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
1100 }
1101
1102 //!Deallocates an array pointed by ptr. Never throws
deallocate_node(void * pElem)1103 void deallocate_node(void *pElem)
1104 {
1105 this->priv_deallocate_node(pElem, MaxFreeBlocks, RealNumNode, NumSubBlocks, RealBlockAlignment);
1106 }
1107
1108 //!Deallocates a linked list of nodes. Never throws
deallocate_nodes(multiallocation_chain & nodes)1109 void deallocate_nodes(multiallocation_chain &nodes)
1110 {
1111 this->priv_deallocate_nodes(nodes, MaxFreeBlocks, RealNumNode, NumSubBlocks, data_t::alignment);
1112 }
1113
deallocate_free_blocks()1114 void deallocate_free_blocks()
1115 { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
1116
1117 //Deprecated, use deallocate_free_blocks
deallocate_free_chunks()1118 void deallocate_free_chunks()
1119 { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
1120 };
1121
1122 /////////////////////////////////////////////
1123 //
1124 // private_adaptive_node_pool_impl_rt
1125 //
1126 /////////////////////////////////////////////
1127 template<class SizeType>
1128 struct private_adaptive_node_pool_impl_rt_data
1129 {
1130 typedef SizeType size_type;
1131
private_adaptive_node_pool_impl_rt_databoost::container::dtl::private_adaptive_node_pool_impl_rt_data1132 private_adaptive_node_pool_impl_rt_data(size_type max_free_blocks, size_type real_node_size)
1133 : m_max_free_blocks(max_free_blocks), m_real_node_size(real_node_size)
1134 , m_real_block_alignment(), m_num_subblocks(), m_real_num_node()
1135 {}
1136
1137 const size_type m_max_free_blocks;
1138 const size_type m_real_node_size;
1139 //Round the size to a power of two value.
1140 //This is the total memory size (including payload) that we want to
1141 //allocate from the general-purpose allocator
1142 size_type m_real_block_alignment;
1143 size_type m_num_subblocks;
1144 //This is the real number of nodes per block
1145 size_type m_real_num_node;
1146 };
1147
1148
1149 template<class SegmentManagerBase, unsigned int Flags>
1150 class private_adaptive_node_pool_impl_rt
1151 : private private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type>
1152 , public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
1153 {
1154 typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> impl_t;
1155 typedef private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type> data_t;
1156
1157 //Non-copyable
1158 private_adaptive_node_pool_impl_rt();
1159 private_adaptive_node_pool_impl_rt(const private_adaptive_node_pool_impl_rt &);
1160 private_adaptive_node_pool_impl_rt &operator=(const private_adaptive_node_pool_impl_rt &);
1161
1162 protected:
1163
1164 typedef typename impl_t::void_pointer void_pointer;
1165 typedef typename impl_t::size_type size_type;
1166 typedef typename impl_t::multiallocation_chain multiallocation_chain;
1167
1168 static const typename impl_t::size_type PayloadPerAllocation = impl_t::PayloadPerAllocation;
1169
1170 //Flags
1171 //align_only
1172 static const bool AlignOnly = impl_t::AlignOnly;
1173
1174 static const size_type HdrSize = impl_t::HdrSize;
1175 static const size_type HdrOffsetSize = impl_t::HdrOffsetSize;
1176
1177 public:
1178
1179 //!Segment manager typedef
1180 typedef SegmentManagerBase segment_manager_base_type;
1181
1182 //!Constructor from a segment manager. Never throws
private_adaptive_node_pool_impl_rt(segment_manager_base_type * segment_mngr_base,size_type node_size,size_type nodes_per_block,size_type max_free_blocks,unsigned char overhead_percent)1183 private_adaptive_node_pool_impl_rt
1184 ( segment_manager_base_type *segment_mngr_base
1185 , size_type node_size
1186 , size_type nodes_per_block
1187 , size_type max_free_blocks
1188 , unsigned char overhead_percent
1189 )
1190 : data_t(max_free_blocks, lcm(node_size, size_type(alignment_of<void_pointer>::value)))
1191 , impl_t(segment_mngr_base)
1192 {
1193 if(AlignOnly){
1194 this->m_real_block_alignment = upper_power_of_2(HdrSize + this->m_real_node_size*nodes_per_block);
1195 this->m_real_num_node = (this->m_real_block_alignment - PayloadPerAllocation - HdrSize)/this->m_real_node_size;
1196 }
1197 else{
1198 candidate_power_of_2_rt ( upper_power_of_2(HdrSize + PayloadPerAllocation + this->m_real_node_size)
1199 , this->m_real_node_size
1200 , PayloadPerAllocation
1201 , nodes_per_block
1202 , HdrSize
1203 , HdrOffsetSize
1204 , overhead_percent
1205 , this->m_real_block_alignment
1206 , this->m_num_subblocks
1207 , this->m_real_num_node);
1208 }
1209 }
1210
1211 //!Destructor. Deallocates all allocated blocks. Never throws
~private_adaptive_node_pool_impl_rt()1212 ~private_adaptive_node_pool_impl_rt()
1213 { this->priv_clear(this->m_num_subblocks, this->m_real_block_alignment, this->m_real_num_node); }
1214
get_real_num_node() const1215 size_type get_real_num_node() const
1216 { return this->m_real_num_node; }
1217
1218 //!Allocates array of count elements. Can throw
allocate_node()1219 void *allocate_node()
1220 {
1221 return this->priv_allocate_node
1222 (this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
1223 }
1224
1225 //!Allocates n nodes.
1226 //!Can throw
allocate_nodes(const size_type n,multiallocation_chain & chain)1227 void allocate_nodes(const size_type n, multiallocation_chain &chain)
1228 {
1229
1230 this->priv_allocate_nodes
1231 (n, chain, this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
1232 }
1233
1234 //!Deallocates an array pointed by ptr. Never throws
deallocate_node(void * pElem)1235 void deallocate_node(void *pElem)
1236 {
1237 this->priv_deallocate_node(pElem, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
1238 }
1239
1240 //!Deallocates a linked list of nodes. Never throws
deallocate_nodes(multiallocation_chain & nodes)1241 void deallocate_nodes(multiallocation_chain &nodes)
1242 {
1243 this->priv_deallocate_nodes(nodes, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
1244 }
1245
deallocate_free_blocks()1246 void deallocate_free_blocks()
1247 { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
1248
1249 //Deprecated, use deallocate_free_blocks
deallocate_free_chunks()1250 void deallocate_free_chunks()
1251 { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
1252 };
1253
1254 } //namespace dtl {
1255 } //namespace container {
1256 } //namespace boost {
1257
1258 #include <boost/container/detail/config_end.hpp>
1259
1260 #endif //#ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
1261