freelist.hpp 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668
  1. // lock-free freelist
  2. //
  3. // Copyright (C) 2008-2016 Tim Blechmann
  4. //
  5. // Distributed under the Boost Software License, Version 1.0. (See
  6. // accompanying file LICENSE_1_0.txt or copy at
  7. // http://www.boost.org/LICENSE_1_0.txt)
  8. #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  9. #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  10. #include <cstring>
  11. #include <limits>
  12. #include <memory>
  13. #include <boost/array.hpp>
  14. #include <boost/config.hpp>
  15. #include <boost/cstdint.hpp>
  16. #include <boost/noncopyable.hpp>
  17. #include <boost/static_assert.hpp>
  18. #include <boost/align/align_up.hpp>
  19. #include <boost/align/aligned_allocator_adaptor.hpp>
  20. #include <boost/lockfree/detail/atomic.hpp>
  21. #include <boost/lockfree/detail/parameter.hpp>
  22. #include <boost/lockfree/detail/tagged_ptr.hpp>
  23. #if defined(_MSC_VER)
  24. #pragma warning(push)
  25. #pragma warning(disable: 4100) // unreferenced formal parameter
  26. #pragma warning(disable: 4127) // conditional expression is constant
  27. #endif
  28. namespace boost {
  29. namespace lockfree {
  30. namespace detail {
  31. template <typename T,
  32. typename Alloc = std::allocator<T>
  33. >
  34. class freelist_stack:
  35. Alloc
  36. {
  37. struct freelist_node
  38. {
  39. tagged_ptr<freelist_node> next;
  40. };
  41. typedef tagged_ptr<freelist_node> tagged_node_ptr;
  42. public:
  43. typedef T * index_t;
  44. typedef tagged_ptr<T> tagged_node_handle;
  45. template <typename Allocator>
  46. freelist_stack (Allocator const & alloc, std::size_t n = 0):
  47. Alloc(alloc),
  48. pool_(tagged_node_ptr(NULL))
  49. {
  50. for (std::size_t i = 0; i != n; ++i) {
  51. T * node = Alloc::allocate(1);
  52. std::memset(node, 0, sizeof(T));
  53. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  54. destruct<false>(node);
  55. #else
  56. deallocate<false>(node);
  57. #endif
  58. }
  59. }
  60. template <bool ThreadSafe>
  61. void reserve (std::size_t count)
  62. {
  63. for (std::size_t i = 0; i != count; ++i) {
  64. T * node = Alloc::allocate(1);
  65. std::memset(node, 0, sizeof(T));
  66. deallocate<ThreadSafe>(node);
  67. }
  68. }
  69. template <bool ThreadSafe, bool Bounded>
  70. T * construct (void)
  71. {
  72. T * node = allocate<ThreadSafe, Bounded>();
  73. if (node)
  74. new(node) T();
  75. return node;
  76. }
  77. template <bool ThreadSafe, bool Bounded, typename ArgumentType>
  78. T * construct (ArgumentType const & arg)
  79. {
  80. T * node = allocate<ThreadSafe, Bounded>();
  81. if (node)
  82. new(node) T(arg);
  83. return node;
  84. }
  85. template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
  86. T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
  87. {
  88. T * node = allocate<ThreadSafe, Bounded>();
  89. if (node)
  90. new(node) T(arg1, arg2);
  91. return node;
  92. }
  93. template <bool ThreadSafe>
  94. void destruct (tagged_node_handle const & tagged_ptr)
  95. {
  96. T * n = tagged_ptr.get_ptr();
  97. n->~T();
  98. deallocate<ThreadSafe>(n);
  99. }
  100. template <bool ThreadSafe>
  101. void destruct (T * n)
  102. {
  103. n->~T();
  104. deallocate<ThreadSafe>(n);
  105. }
  106. ~freelist_stack(void)
  107. {
  108. tagged_node_ptr current = pool_.load();
  109. while (current) {
  110. freelist_node * current_ptr = current.get_ptr();
  111. if (current_ptr)
  112. current = current_ptr->next;
  113. Alloc::deallocate((T*)current_ptr, 1);
  114. }
  115. }
  116. bool is_lock_free(void) const
  117. {
  118. return pool_.is_lock_free();
  119. }
  120. T * get_handle(T * pointer) const
  121. {
  122. return pointer;
  123. }
  124. T * get_handle(tagged_node_handle const & handle) const
  125. {
  126. return get_pointer(handle);
  127. }
  128. T * get_pointer(tagged_node_handle const & tptr) const
  129. {
  130. return tptr.get_ptr();
  131. }
  132. T * get_pointer(T * pointer) const
  133. {
  134. return pointer;
  135. }
  136. T * null_handle(void) const
  137. {
  138. return NULL;
  139. }
  140. protected: // allow use from subclasses
  141. template <bool ThreadSafe, bool Bounded>
  142. T * allocate (void)
  143. {
  144. if (ThreadSafe)
  145. return allocate_impl<Bounded>();
  146. else
  147. return allocate_impl_unsafe<Bounded>();
  148. }
  149. private:
  150. template <bool Bounded>
  151. T * allocate_impl (void)
  152. {
  153. tagged_node_ptr old_pool = pool_.load(memory_order_consume);
  154. for(;;) {
  155. if (!old_pool.get_ptr()) {
  156. if (!Bounded) {
  157. T *ptr = Alloc::allocate(1);
  158. std::memset(ptr, 0, sizeof(T));
  159. return ptr;
  160. }
  161. else
  162. return 0;
  163. }
  164. freelist_node * new_pool_ptr = old_pool->next.get_ptr();
  165. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
  166. if (pool_.compare_exchange_weak(old_pool, new_pool)) {
  167. void * ptr = old_pool.get_ptr();
  168. return reinterpret_cast<T*>(ptr);
  169. }
  170. }
  171. }
  172. template <bool Bounded>
  173. T * allocate_impl_unsafe (void)
  174. {
  175. tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
  176. if (!old_pool.get_ptr()) {
  177. if (!Bounded) {
  178. T *ptr = Alloc::allocate(1);
  179. std::memset(ptr, 0, sizeof(T));
  180. return ptr;
  181. }
  182. else
  183. return 0;
  184. }
  185. freelist_node * new_pool_ptr = old_pool->next.get_ptr();
  186. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
  187. pool_.store(new_pool, memory_order_relaxed);
  188. void * ptr = old_pool.get_ptr();
  189. return reinterpret_cast<T*>(ptr);
  190. }
  191. protected:
  192. template <bool ThreadSafe>
  193. void deallocate (T * n)
  194. {
  195. if (ThreadSafe)
  196. deallocate_impl(n);
  197. else
  198. deallocate_impl_unsafe(n);
  199. }
  200. private:
  201. void deallocate_impl (T * n)
  202. {
  203. void * node = n;
  204. tagged_node_ptr old_pool = pool_.load(memory_order_consume);
  205. freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
  206. for(;;) {
  207. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
  208. new_pool->next.set_ptr(old_pool.get_ptr());
  209. if (pool_.compare_exchange_weak(old_pool, new_pool))
  210. return;
  211. }
  212. }
  213. void deallocate_impl_unsafe (T * n)
  214. {
  215. void * node = n;
  216. tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
  217. freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
  218. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
  219. new_pool->next.set_ptr(old_pool.get_ptr());
  220. pool_.store(new_pool, memory_order_relaxed);
  221. }
  222. atomic<tagged_node_ptr> pool_;
  223. };
  224. class
  225. BOOST_ALIGNMENT( 4 ) // workaround for bugs in MSVC
  226. tagged_index
  227. {
  228. public:
  229. typedef boost::uint16_t tag_t;
  230. typedef boost::uint16_t index_t;
  231. /** uninitialized constructor */
  232. tagged_index(void) BOOST_NOEXCEPT //: index(0), tag(0)
  233. {}
  234. /** copy constructor */
  235. #ifdef BOOST_NO_CXX11_DEFAULTED_FUNCTIONS
  236. tagged_index(tagged_index const & rhs):
  237. index(rhs.index), tag(rhs.tag)
  238. {}
  239. #else
  240. tagged_index(tagged_index const & rhs) = default;
  241. #endif
  242. explicit tagged_index(index_t i, tag_t t = 0):
  243. index(i), tag(t)
  244. {}
  245. /** index access */
  246. /* @{ */
  247. index_t get_index() const
  248. {
  249. return index;
  250. }
  251. void set_index(index_t i)
  252. {
  253. index = i;
  254. }
  255. /* @} */
  256. /** tag access */
  257. /* @{ */
  258. tag_t get_tag() const
  259. {
  260. return tag;
  261. }
  262. tag_t get_next_tag() const
  263. {
  264. tag_t next = (get_tag() + 1u) & (std::numeric_limits<tag_t>::max)();
  265. return next;
  266. }
  267. void set_tag(tag_t t)
  268. {
  269. tag = t;
  270. }
  271. /* @} */
  272. bool operator==(tagged_index const & rhs) const
  273. {
  274. return (index == rhs.index) && (tag == rhs.tag);
  275. }
  276. bool operator!=(tagged_index const & rhs) const
  277. {
  278. return !operator==(rhs);
  279. }
  280. protected:
  281. index_t index;
  282. tag_t tag;
  283. };
  284. template <typename T,
  285. std::size_t size>
  286. struct compiletime_sized_freelist_storage
  287. {
  288. // array-based freelists only support a 16bit address space.
  289. BOOST_STATIC_ASSERT(size < 65536);
  290. boost::array<char, size * sizeof(T) + 64> data;
  291. // unused ... only for API purposes
  292. template <typename Allocator>
  293. compiletime_sized_freelist_storage(Allocator const & /* alloc */, std::size_t /* count */)
  294. {
  295. data.fill(0);
  296. }
  297. T * nodes(void) const
  298. {
  299. char * data_pointer = const_cast<char*>(data.data());
  300. return reinterpret_cast<T*>( boost::alignment::align_up( data_pointer, BOOST_LOCKFREE_CACHELINE_BYTES ) );
  301. }
  302. std::size_t node_count(void) const
  303. {
  304. return size;
  305. }
  306. };
  307. template <typename T,
  308. typename Alloc = std::allocator<T> >
  309. struct runtime_sized_freelist_storage:
  310. boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES >
  311. {
  312. typedef boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES > allocator_type;
  313. T * nodes_;
  314. std::size_t node_count_;
  315. template <typename Allocator>
  316. runtime_sized_freelist_storage(Allocator const & alloc, std::size_t count):
  317. allocator_type(alloc), node_count_(count)
  318. {
  319. if (count > 65535)
  320. boost::throw_exception(std::runtime_error("boost.lockfree: freelist size is limited to a maximum of 65535 objects"));
  321. nodes_ = allocator_type::allocate(count);
  322. std::memset(nodes_, 0, sizeof(T) * count);
  323. }
  324. ~runtime_sized_freelist_storage(void)
  325. {
  326. allocator_type::deallocate(nodes_, node_count_);
  327. }
  328. T * nodes(void) const
  329. {
  330. return nodes_;
  331. }
  332. std::size_t node_count(void) const
  333. {
  334. return node_count_;
  335. }
  336. };
  337. template <typename T,
  338. typename NodeStorage = runtime_sized_freelist_storage<T>
  339. >
  340. class fixed_size_freelist:
  341. NodeStorage
  342. {
  343. struct freelist_node
  344. {
  345. tagged_index next;
  346. };
  347. void initialize(void)
  348. {
  349. T * nodes = NodeStorage::nodes();
  350. for (std::size_t i = 0; i != NodeStorage::node_count(); ++i) {
  351. tagged_index * next_index = reinterpret_cast<tagged_index*>(nodes + i);
  352. next_index->set_index(null_handle());
  353. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  354. destruct<false>(nodes + i);
  355. #else
  356. deallocate<false>(static_cast<index_t>(i));
  357. #endif
  358. }
  359. }
  360. public:
  361. typedef tagged_index tagged_node_handle;
  362. typedef tagged_index::index_t index_t;
  363. template <typename Allocator>
  364. fixed_size_freelist (Allocator const & alloc, std::size_t count):
  365. NodeStorage(alloc, count),
  366. pool_(tagged_index(static_cast<index_t>(count), 0))
  367. {
  368. initialize();
  369. }
  370. fixed_size_freelist (void):
  371. pool_(tagged_index(NodeStorage::node_count(), 0))
  372. {
  373. initialize();
  374. }
  375. template <bool ThreadSafe, bool Bounded>
  376. T * construct (void)
  377. {
  378. index_t node_index = allocate<ThreadSafe>();
  379. if (node_index == null_handle())
  380. return NULL;
  381. T * node = NodeStorage::nodes() + node_index;
  382. new(node) T();
  383. return node;
  384. }
  385. template <bool ThreadSafe, bool Bounded, typename ArgumentType>
  386. T * construct (ArgumentType const & arg)
  387. {
  388. index_t node_index = allocate<ThreadSafe>();
  389. if (node_index == null_handle())
  390. return NULL;
  391. T * node = NodeStorage::nodes() + node_index;
  392. new(node) T(arg);
  393. return node;
  394. }
  395. template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
  396. T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
  397. {
  398. index_t node_index = allocate<ThreadSafe>();
  399. if (node_index == null_handle())
  400. return NULL;
  401. T * node = NodeStorage::nodes() + node_index;
  402. new(node) T(arg1, arg2);
  403. return node;
  404. }
  405. template <bool ThreadSafe>
  406. void destruct (tagged_node_handle tagged_index)
  407. {
  408. index_t index = tagged_index.get_index();
  409. T * n = NodeStorage::nodes() + index;
  410. (void)n; // silence msvc warning
  411. n->~T();
  412. deallocate<ThreadSafe>(index);
  413. }
  414. template <bool ThreadSafe>
  415. void destruct (T * n)
  416. {
  417. n->~T();
  418. deallocate<ThreadSafe>(static_cast<index_t>(n - NodeStorage::nodes()));
  419. }
  420. bool is_lock_free(void) const
  421. {
  422. return pool_.is_lock_free();
  423. }
  424. index_t null_handle(void) const
  425. {
  426. return static_cast<index_t>(NodeStorage::node_count());
  427. }
  428. index_t get_handle(T * pointer) const
  429. {
  430. if (pointer == NULL)
  431. return null_handle();
  432. else
  433. return static_cast<index_t>(pointer - NodeStorage::nodes());
  434. }
  435. index_t get_handle(tagged_node_handle const & handle) const
  436. {
  437. return handle.get_index();
  438. }
  439. T * get_pointer(tagged_node_handle const & tptr) const
  440. {
  441. return get_pointer(tptr.get_index());
  442. }
  443. T * get_pointer(index_t index) const
  444. {
  445. if (index == null_handle())
  446. return 0;
  447. else
  448. return NodeStorage::nodes() + index;
  449. }
  450. T * get_pointer(T * ptr) const
  451. {
  452. return ptr;
  453. }
  454. protected: // allow use from subclasses
  455. template <bool ThreadSafe>
  456. index_t allocate (void)
  457. {
  458. if (ThreadSafe)
  459. return allocate_impl();
  460. else
  461. return allocate_impl_unsafe();
  462. }
  463. private:
  464. index_t allocate_impl (void)
  465. {
  466. tagged_index old_pool = pool_.load(memory_order_consume);
  467. for(;;) {
  468. index_t index = old_pool.get_index();
  469. if (index == null_handle())
  470. return index;
  471. T * old_node = NodeStorage::nodes() + index;
  472. tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
  473. tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
  474. if (pool_.compare_exchange_weak(old_pool, new_pool))
  475. return old_pool.get_index();
  476. }
  477. }
  478. index_t allocate_impl_unsafe (void)
  479. {
  480. tagged_index old_pool = pool_.load(memory_order_consume);
  481. index_t index = old_pool.get_index();
  482. if (index == null_handle())
  483. return index;
  484. T * old_node = NodeStorage::nodes() + index;
  485. tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
  486. tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
  487. pool_.store(new_pool, memory_order_relaxed);
  488. return old_pool.get_index();
  489. }
  490. template <bool ThreadSafe>
  491. void deallocate (index_t index)
  492. {
  493. if (ThreadSafe)
  494. deallocate_impl(index);
  495. else
  496. deallocate_impl_unsafe(index);
  497. }
  498. void deallocate_impl (index_t index)
  499. {
  500. freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
  501. tagged_index old_pool = pool_.load(memory_order_consume);
  502. for(;;) {
  503. tagged_index new_pool (index, old_pool.get_tag());
  504. new_pool_node->next.set_index(old_pool.get_index());
  505. if (pool_.compare_exchange_weak(old_pool, new_pool))
  506. return;
  507. }
  508. }
  509. void deallocate_impl_unsafe (index_t index)
  510. {
  511. freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
  512. tagged_index old_pool = pool_.load(memory_order_consume);
  513. tagged_index new_pool (index, old_pool.get_tag());
  514. new_pool_node->next.set_index(old_pool.get_index());
  515. pool_.store(new_pool);
  516. }
  517. atomic<tagged_index> pool_;
  518. };
  519. template <typename T,
  520. typename Alloc,
  521. bool IsCompileTimeSized,
  522. bool IsFixedSize,
  523. std::size_t Capacity
  524. >
  525. struct select_freelist
  526. {
  527. typedef typename mpl::if_c<IsCompileTimeSized,
  528. compiletime_sized_freelist_storage<T, Capacity>,
  529. runtime_sized_freelist_storage<T, Alloc>
  530. >::type fixed_sized_storage_type;
  531. typedef typename mpl::if_c<IsCompileTimeSized || IsFixedSize,
  532. fixed_size_freelist<T, fixed_sized_storage_type>,
  533. freelist_stack<T, Alloc>
  534. >::type type;
  535. };
  536. template <typename T, bool IsNodeBased>
  537. struct select_tagged_handle
  538. {
  539. typedef typename mpl::if_c<IsNodeBased,
  540. tagged_ptr<T>,
  541. tagged_index
  542. >::type tagged_handle_type;
  543. typedef typename mpl::if_c<IsNodeBased,
  544. T*,
  545. typename tagged_index::index_t
  546. >::type handle_type;
  547. };
  548. } /* namespace detail */
  549. } /* namespace lockfree */
  550. } /* namespace boost */
  551. #if defined(_MSC_VER)
  552. #pragma warning(pop)
  553. #endif
  554. #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */