1 //  lock-free freelist
2 //
3 //  Copyright (C) 2008-2016 Tim Blechmann
4 //
5 //  Distributed under the Boost Software License, Version 1.0. (See
6 //  accompanying file LICENSE_1_0.txt or copy at
7 //  http://www.boost.org/LICENSE_1_0.txt)
8 
9 #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
10 #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
11 
12 #include <limits>
13 #include <memory>
14 
15 #include <boost/array.hpp>
16 #include <boost/config.hpp>
17 #include <boost/cstdint.hpp>
18 #include <boost/noncopyable.hpp>
19 #include <boost/static_assert.hpp>
20 
21 #include <boost/align/align_up.hpp>
22 #include <boost/align/aligned_allocator_adaptor.hpp>
23 
24 #include <boost/lockfree/detail/atomic.hpp>
25 #include <boost/lockfree/detail/parameter.hpp>
26 #include <boost/lockfree/detail/tagged_ptr.hpp>
27 
28 #if defined(_MSC_VER)
29 #pragma warning(push)
30 #pragma warning(disable: 4100) // unreferenced formal parameter
31 #pragma warning(disable: 4127) // conditional expression is constant
32 #endif
33 
34 namespace boost    {
35 namespace lockfree {
36 namespace detail   {
37 
38 template <typename T,
39           typename Alloc = std::allocator<T>
40          >
41 class freelist_stack:
42     Alloc
43 {
44     struct freelist_node
45     {
46         tagged_ptr<freelist_node> next;
47     };
48 
49     typedef tagged_ptr<freelist_node> tagged_node_ptr;
50 
51 public:
52     typedef T *           index_t;
53     typedef tagged_ptr<T> tagged_node_handle;
54 
55     template <typename Allocator>
freelist_stack(Allocator const & alloc,std::size_t n=0)56     freelist_stack (Allocator const & alloc, std::size_t n = 0):
57         Alloc(alloc),
58         pool_(tagged_node_ptr(NULL))
59     {
60         for (std::size_t i = 0; i != n; ++i) {
61             T * node = Alloc::allocate(1);
62 #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
63             destruct<false>(node);
64 #else
65             deallocate<false>(node);
66 #endif
67         }
68     }
69 
70     template <bool ThreadSafe>
reserve(std::size_t count)71     void reserve (std::size_t count)
72     {
73         for (std::size_t i = 0; i != count; ++i) {
74             T * node = Alloc::allocate(1);
75             deallocate<ThreadSafe>(node);
76         }
77     }
78 
79     template <bool ThreadSafe, bool Bounded>
construct(void)80     T * construct (void)
81     {
82         T * node = allocate<ThreadSafe, Bounded>();
83         if (node)
84             new(node) T();
85         return node;
86     }
87 
88     template <bool ThreadSafe, bool Bounded, typename ArgumentType>
construct(ArgumentType const & arg)89     T * construct (ArgumentType const & arg)
90     {
91         T * node = allocate<ThreadSafe, Bounded>();
92         if (node)
93             new(node) T(arg);
94         return node;
95     }
96 
97     template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
construct(ArgumentType1 const & arg1,ArgumentType2 const & arg2)98     T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
99     {
100         T * node = allocate<ThreadSafe, Bounded>();
101         if (node)
102             new(node) T(arg1, arg2);
103         return node;
104     }
105 
106     template <bool ThreadSafe>
destruct(tagged_node_handle const & tagged_ptr)107     void destruct (tagged_node_handle const & tagged_ptr)
108     {
109         T * n = tagged_ptr.get_ptr();
110         n->~T();
111         deallocate<ThreadSafe>(n);
112     }
113 
114     template <bool ThreadSafe>
destruct(T * n)115     void destruct (T * n)
116     {
117         n->~T();
118         deallocate<ThreadSafe>(n);
119     }
120 
~freelist_stack(void)121     ~freelist_stack(void)
122     {
123         tagged_node_ptr current = pool_.load();
124 
125         while (current) {
126             freelist_node * current_ptr = current.get_ptr();
127             if (current_ptr)
128                 current = current_ptr->next;
129             Alloc::deallocate((T*)current_ptr, 1);
130         }
131     }
132 
is_lock_free(void) const133     bool is_lock_free(void) const
134     {
135         return pool_.is_lock_free();
136     }
137 
get_handle(T * pointer) const138     T * get_handle(T * pointer) const
139     {
140         return pointer;
141     }
142 
get_handle(tagged_node_handle const & handle) const143     T * get_handle(tagged_node_handle const & handle) const
144     {
145         return get_pointer(handle);
146     }
147 
get_pointer(tagged_node_handle const & tptr) const148     T * get_pointer(tagged_node_handle const & tptr) const
149     {
150         return tptr.get_ptr();
151     }
152 
get_pointer(T * pointer) const153     T * get_pointer(T * pointer) const
154     {
155         return pointer;
156     }
157 
null_handle(void) const158     T * null_handle(void) const
159     {
160         return NULL;
161     }
162 
163 protected: // allow use from subclasses
164     template <bool ThreadSafe, bool Bounded>
allocate(void)165     T * allocate (void)
166     {
167         if (ThreadSafe)
168             return allocate_impl<Bounded>();
169         else
170             return allocate_impl_unsafe<Bounded>();
171     }
172 
173 private:
174     template <bool Bounded>
allocate_impl(void)175     T * allocate_impl (void)
176     {
177         tagged_node_ptr old_pool = pool_.load(memory_order_consume);
178 
179         for(;;) {
180             if (!old_pool.get_ptr()) {
181                 if (!Bounded)
182                     return Alloc::allocate(1);
183                 else
184                     return 0;
185             }
186 
187             freelist_node * new_pool_ptr = old_pool->next.get_ptr();
188             tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
189 
190             if (pool_.compare_exchange_weak(old_pool, new_pool)) {
191                 void * ptr = old_pool.get_ptr();
192                 return reinterpret_cast<T*>(ptr);
193             }
194         }
195     }
196 
197     template <bool Bounded>
allocate_impl_unsafe(void)198     T * allocate_impl_unsafe (void)
199     {
200         tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
201 
202         if (!old_pool.get_ptr()) {
203             if (!Bounded)
204                 return Alloc::allocate(1);
205             else
206                 return 0;
207         }
208 
209         freelist_node * new_pool_ptr = old_pool->next.get_ptr();
210         tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
211 
212         pool_.store(new_pool, memory_order_relaxed);
213         void * ptr = old_pool.get_ptr();
214         return reinterpret_cast<T*>(ptr);
215     }
216 
217 protected:
218     template <bool ThreadSafe>
deallocate(T * n)219     void deallocate (T * n)
220     {
221         if (ThreadSafe)
222             deallocate_impl(n);
223         else
224             deallocate_impl_unsafe(n);
225     }
226 
227 private:
deallocate_impl(T * n)228     void deallocate_impl (T * n)
229     {
230         void * node = n;
231         tagged_node_ptr old_pool = pool_.load(memory_order_consume);
232         freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
233 
234         for(;;) {
235             tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
236             new_pool->next.set_ptr(old_pool.get_ptr());
237 
238             if (pool_.compare_exchange_weak(old_pool, new_pool))
239                 return;
240         }
241     }
242 
deallocate_impl_unsafe(T * n)243     void deallocate_impl_unsafe (T * n)
244     {
245         void * node = n;
246         tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
247         freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
248 
249         tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
250         new_pool->next.set_ptr(old_pool.get_ptr());
251 
252         pool_.store(new_pool, memory_order_relaxed);
253     }
254 
255     atomic<tagged_node_ptr> pool_;
256 };
257 
258 class
259 BOOST_ALIGNMENT( 4 ) // workaround for bugs in MSVC
260 tagged_index
261 {
262 public:
263     typedef boost::uint16_t tag_t;
264     typedef boost::uint16_t index_t;
265 
266     /** uninitialized constructor */
tagged_index(void)267     tagged_index(void) BOOST_NOEXCEPT //: index(0), tag(0)
268     {}
269 
270     /** copy constructor */
271 #ifdef BOOST_NO_CXX11_DEFAULTED_FUNCTIONS
tagged_index(tagged_index const & rhs)272     tagged_index(tagged_index const & rhs):
273         index(rhs.index), tag(rhs.tag)
274     {}
275 #else
276     tagged_index(tagged_index const & rhs) = default;
277 #endif
278 
tagged_index(index_t i,tag_t t=0)279     explicit tagged_index(index_t i, tag_t t = 0):
280         index(i), tag(t)
281     {}
282 
283     /** index access */
284     /* @{ */
get_index() const285     index_t get_index() const
286     {
287         return index;
288     }
289 
set_index(index_t i)290     void set_index(index_t i)
291     {
292         index = i;
293     }
294     /* @} */
295 
296     /** tag access */
297     /* @{ */
get_tag() const298     tag_t get_tag() const
299     {
300         return tag;
301     }
302 
get_next_tag() const303     tag_t get_next_tag() const
304     {
305         tag_t next = (get_tag() + 1u) & (std::numeric_limits<tag_t>::max)();
306         return next;
307     }
308 
set_tag(tag_t t)309     void set_tag(tag_t t)
310     {
311         tag = t;
312     }
313     /* @} */
314 
operator ==(tagged_index const & rhs) const315     bool operator==(tagged_index const & rhs) const
316     {
317         return (index == rhs.index) && (tag == rhs.tag);
318     }
319 
operator !=(tagged_index const & rhs) const320     bool operator!=(tagged_index const & rhs) const
321     {
322         return !operator==(rhs);
323     }
324 
325 protected:
326     index_t index;
327     tag_t tag;
328 };
329 
330 template <typename T,
331           std::size_t size>
332 struct compiletime_sized_freelist_storage
333 {
334     // array-based freelists only support a 16bit address space.
335     BOOST_STATIC_ASSERT(size < 65536);
336 
337     boost::array<char, size * sizeof(T) + 64> data;
338 
339     // unused ... only for API purposes
340     template <typename Allocator>
compiletime_sized_freelist_storageboost::lockfree::detail::compiletime_sized_freelist_storage341     compiletime_sized_freelist_storage(Allocator const & /* alloc */, std::size_t /* count */)
342     {}
343 
nodesboost::lockfree::detail::compiletime_sized_freelist_storage344     T * nodes(void) const
345     {
346         char * data_pointer = const_cast<char*>(data.data());
347         return reinterpret_cast<T*>( boost::alignment::align_up( data_pointer, BOOST_LOCKFREE_CACHELINE_BYTES ) );
348     }
349 
node_countboost::lockfree::detail::compiletime_sized_freelist_storage350     std::size_t node_count(void) const
351     {
352         return size;
353     }
354 };
355 
356 template <typename T,
357           typename Alloc = std::allocator<T> >
358 struct runtime_sized_freelist_storage:
359     boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES >
360 {
361     typedef boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES > allocator_type;
362     T * nodes_;
363     std::size_t node_count_;
364 
365     template <typename Allocator>
runtime_sized_freelist_storageboost::lockfree::detail::runtime_sized_freelist_storage366     runtime_sized_freelist_storage(Allocator const & alloc, std::size_t count):
367         allocator_type(alloc), node_count_(count)
368     {
369         if (count > 65535)
370             boost::throw_exception(std::runtime_error("boost.lockfree: freelist size is limited to a maximum of 65535 objects"));
371         nodes_ = allocator_type::allocate(count);
372     }
373 
~runtime_sized_freelist_storageboost::lockfree::detail::runtime_sized_freelist_storage374     ~runtime_sized_freelist_storage(void)
375     {
376         allocator_type::deallocate(nodes_, node_count_);
377     }
378 
nodesboost::lockfree::detail::runtime_sized_freelist_storage379     T * nodes(void) const
380     {
381         return nodes_;
382     }
383 
node_countboost::lockfree::detail::runtime_sized_freelist_storage384     std::size_t node_count(void) const
385     {
386         return node_count_;
387     }
388 };
389 
390 
391 template <typename T,
392           typename NodeStorage = runtime_sized_freelist_storage<T>
393          >
394 class fixed_size_freelist:
395     NodeStorage
396 {
397     struct freelist_node
398     {
399         tagged_index next;
400     };
401 
initialize(void)402     void initialize(void)
403     {
404         T * nodes = NodeStorage::nodes();
405         for (std::size_t i = 0; i != NodeStorage::node_count(); ++i) {
406             tagged_index * next_index = reinterpret_cast<tagged_index*>(nodes + i);
407             next_index->set_index(null_handle());
408 
409 #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
410             destruct<false>(nodes + i);
411 #else
412             deallocate<false>(static_cast<index_t>(i));
413 #endif
414         }
415     }
416 
417 public:
418     typedef tagged_index tagged_node_handle;
419     typedef tagged_index::index_t index_t;
420 
421     template <typename Allocator>
fixed_size_freelist(Allocator const & alloc,std::size_t count)422     fixed_size_freelist (Allocator const & alloc, std::size_t count):
423         NodeStorage(alloc, count),
424         pool_(tagged_index(static_cast<index_t>(count), 0))
425     {
426         initialize();
427     }
428 
fixed_size_freelist(void)429     fixed_size_freelist (void):
430         pool_(tagged_index(NodeStorage::node_count(), 0))
431     {
432         initialize();
433     }
434 
435     template <bool ThreadSafe, bool Bounded>
construct(void)436     T * construct (void)
437     {
438         index_t node_index = allocate<ThreadSafe>();
439         if (node_index == null_handle())
440             return NULL;
441 
442         T * node = NodeStorage::nodes() + node_index;
443         new(node) T();
444         return node;
445     }
446 
447     template <bool ThreadSafe, bool Bounded, typename ArgumentType>
construct(ArgumentType const & arg)448     T * construct (ArgumentType const & arg)
449     {
450         index_t node_index = allocate<ThreadSafe>();
451         if (node_index == null_handle())
452             return NULL;
453 
454         T * node = NodeStorage::nodes() + node_index;
455         new(node) T(arg);
456         return node;
457     }
458 
459     template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
construct(ArgumentType1 const & arg1,ArgumentType2 const & arg2)460     T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
461     {
462         index_t node_index = allocate<ThreadSafe>();
463         if (node_index == null_handle())
464             return NULL;
465 
466         T * node = NodeStorage::nodes() + node_index;
467         new(node) T(arg1, arg2);
468         return node;
469     }
470 
471     template <bool ThreadSafe>
destruct(tagged_node_handle tagged_index)472     void destruct (tagged_node_handle tagged_index)
473     {
474         index_t index = tagged_index.get_index();
475         T * n = NodeStorage::nodes() + index;
476         (void)n; // silence msvc warning
477         n->~T();
478         deallocate<ThreadSafe>(index);
479     }
480 
481     template <bool ThreadSafe>
destruct(T * n)482     void destruct (T * n)
483     {
484         n->~T();
485         deallocate<ThreadSafe>(static_cast<index_t>(n - NodeStorage::nodes()));
486     }
487 
is_lock_free(void) const488     bool is_lock_free(void) const
489     {
490         return pool_.is_lock_free();
491     }
492 
null_handle(void) const493     index_t null_handle(void) const
494     {
495         return static_cast<index_t>(NodeStorage::node_count());
496     }
497 
get_handle(T * pointer) const498     index_t get_handle(T * pointer) const
499     {
500         if (pointer == NULL)
501             return null_handle();
502         else
503             return static_cast<index_t>(pointer - NodeStorage::nodes());
504     }
505 
get_handle(tagged_node_handle const & handle) const506     index_t get_handle(tagged_node_handle const & handle) const
507     {
508         return handle.get_index();
509     }
510 
get_pointer(tagged_node_handle const & tptr) const511     T * get_pointer(tagged_node_handle const & tptr) const
512     {
513         return get_pointer(tptr.get_index());
514     }
515 
get_pointer(index_t index) const516     T * get_pointer(index_t index) const
517     {
518         if (index == null_handle())
519             return 0;
520         else
521             return NodeStorage::nodes() + index;
522     }
523 
get_pointer(T * ptr) const524     T * get_pointer(T * ptr) const
525     {
526         return ptr;
527     }
528 
529 protected: // allow use from subclasses
530     template <bool ThreadSafe>
allocate(void)531     index_t allocate (void)
532     {
533         if (ThreadSafe)
534             return allocate_impl();
535         else
536             return allocate_impl_unsafe();
537     }
538 
539 private:
allocate_impl(void)540     index_t allocate_impl (void)
541     {
542         tagged_index old_pool = pool_.load(memory_order_consume);
543 
544         for(;;) {
545             index_t index = old_pool.get_index();
546             if (index == null_handle())
547                 return index;
548 
549             T * old_node = NodeStorage::nodes() + index;
550             tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
551 
552             tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
553 
554             if (pool_.compare_exchange_weak(old_pool, new_pool))
555                 return old_pool.get_index();
556         }
557     }
558 
allocate_impl_unsafe(void)559     index_t allocate_impl_unsafe (void)
560     {
561         tagged_index old_pool = pool_.load(memory_order_consume);
562 
563         index_t index = old_pool.get_index();
564         if (index == null_handle())
565             return index;
566 
567         T * old_node = NodeStorage::nodes() + index;
568         tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
569 
570         tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
571 
572         pool_.store(new_pool, memory_order_relaxed);
573         return old_pool.get_index();
574     }
575 
576     template <bool ThreadSafe>
deallocate(index_t index)577     void deallocate (index_t index)
578     {
579         if (ThreadSafe)
580             deallocate_impl(index);
581         else
582             deallocate_impl_unsafe(index);
583     }
584 
deallocate_impl(index_t index)585     void deallocate_impl (index_t index)
586     {
587         freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
588         tagged_index old_pool = pool_.load(memory_order_consume);
589 
590         for(;;) {
591             tagged_index new_pool (index, old_pool.get_tag());
592             new_pool_node->next.set_index(old_pool.get_index());
593 
594             if (pool_.compare_exchange_weak(old_pool, new_pool))
595                 return;
596         }
597     }
598 
deallocate_impl_unsafe(index_t index)599     void deallocate_impl_unsafe (index_t index)
600     {
601         freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
602         tagged_index old_pool = pool_.load(memory_order_consume);
603 
604         tagged_index new_pool (index, old_pool.get_tag());
605         new_pool_node->next.set_index(old_pool.get_index());
606 
607         pool_.store(new_pool);
608     }
609 
610     atomic<tagged_index> pool_;
611 };
612 
613 template <typename T,
614           typename Alloc,
615           bool IsCompileTimeSized,
616           bool IsFixedSize,
617           std::size_t Capacity
618           >
619 struct select_freelist
620 {
621     typedef typename mpl::if_c<IsCompileTimeSized,
622                                compiletime_sized_freelist_storage<T, Capacity>,
623                                runtime_sized_freelist_storage<T, Alloc>
624                               >::type fixed_sized_storage_type;
625 
626     typedef typename mpl::if_c<IsCompileTimeSized || IsFixedSize,
627                                fixed_size_freelist<T, fixed_sized_storage_type>,
628                                freelist_stack<T, Alloc>
629                               >::type type;
630 };
631 
632 template <typename T, bool IsNodeBased>
633 struct select_tagged_handle
634 {
635     typedef typename mpl::if_c<IsNodeBased,
636                                tagged_ptr<T>,
637                                tagged_index
638                               >::type tagged_handle_type;
639 
640     typedef typename mpl::if_c<IsNodeBased,
641                                T*,
642                                typename tagged_index::index_t
643                               >::type handle_type;
644 };
645 
646 
647 } /* namespace detail */
648 } /* namespace lockfree */
649 } /* namespace boost */
650 
651 #if defined(_MSC_VER)
652 #pragma warning(pop)
653 #endif
654 
655 
656 #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */
657