auto_buffer.hpp 36 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142
  1. // Copyright Thorsten Ottosen, 2009.
  2. // Distributed under the Boost Software License, Version 1.0. (See
  3. // accompanying file LICENSE_1_0.txt or copy at
  4. // http://www.boost.org/LICENSE_1_0.txt)
  5. #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  6. #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  7. #include <boost/detail/workaround.hpp>
  8. #if defined(_MSC_VER)
  9. # pragma once
  10. #endif
  11. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  12. #pragma warning(push)
  13. #pragma warning(disable:4996)
  14. #endif
  15. #include <boost/assert.hpp>
  16. #include <boost/config.hpp>
  17. #include <boost/core/allocator_access.hpp>
  18. #include <boost/iterator/reverse_iterator.hpp>
  19. #include <boost/iterator/iterator_traits.hpp>
  20. #include <boost/mpl/if.hpp>
  21. #include <boost/signals2/detail/scope_guard.hpp>
  22. #include <boost/swap.hpp>
  23. #include <boost/type_traits/aligned_storage.hpp>
  24. #include <boost/type_traits/alignment_of.hpp>
  25. #include <boost/type_traits/has_nothrow_copy.hpp>
  26. #include <boost/type_traits/has_nothrow_assign.hpp>
  27. #include <boost/type_traits/has_trivial_assign.hpp>
  28. #include <boost/type_traits/has_trivial_constructor.hpp>
  29. #include <boost/type_traits/has_trivial_destructor.hpp>
  30. #include <algorithm>
  31. #include <cstring>
  32. #include <iterator>
  33. #include <memory>
  34. #include <stdexcept>
  35. namespace boost
  36. {
  37. namespace signals2
  38. {
  39. namespace detail
  40. {
  41. //
  42. // Policies for creating the stack buffer.
  43. //
  44. template< unsigned N >
  45. struct store_n_objects
  46. {
  47. BOOST_STATIC_CONSTANT( unsigned, value = N );
  48. };
  49. template< unsigned N >
  50. struct store_n_bytes
  51. {
  52. BOOST_STATIC_CONSTANT( unsigned, value = N );
  53. };
  54. namespace auto_buffer_detail
  55. {
  56. template< class Policy, class T >
  57. struct compute_buffer_size
  58. {
  59. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
  60. };
  61. template< unsigned N, class T >
  62. struct compute_buffer_size< store_n_bytes<N>, T >
  63. {
  64. BOOST_STATIC_CONSTANT( unsigned, value = N );
  65. };
  66. template< class Policy, class T >
  67. struct compute_buffer_objects
  68. {
  69. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
  70. };
  71. template< unsigned N, class T >
  72. struct compute_buffer_objects< store_n_bytes<N>, T >
  73. {
  74. BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
  75. };
  76. }
  77. struct default_grow_policy
  78. {
  79. template< class SizeType >
  80. static SizeType new_capacity( SizeType capacity )
  81. {
  82. //
  83. // @remark: we grow the capacity quite agressively.
  84. // this is justified since we aim to minimize
  85. // heap-allocations, and because we mostly use
  86. // the buffer locally.
  87. return capacity * 4u;
  88. }
  89. template< class SizeType >
  90. static bool should_shrink( SizeType, SizeType )
  91. {
  92. //
  93. // @remark: when defining a new grow policy, one might
  94. // choose that if the waated space is less
  95. // than a certain percentage, then it is of
  96. // little use to shrink.
  97. //
  98. return true;
  99. }
  100. };
  101. template< class T,
  102. class StackBufferPolicy = store_n_objects<256>,
  103. class GrowPolicy = default_grow_policy,
  104. class Allocator = std::allocator<T> >
  105. class auto_buffer;
  106. template
  107. <
  108. class T,
  109. class StackBufferPolicy,
  110. class GrowPolicy,
  111. class Allocator
  112. >
  113. class auto_buffer : Allocator
  114. {
  115. private:
  116. enum { N = auto_buffer_detail::
  117. compute_buffer_objects<StackBufferPolicy,T>::value };
  118. BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
  119. typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
  120. local_buffer;
  121. public:
  122. typedef Allocator allocator_type;
  123. typedef T value_type;
  124. typedef typename boost::allocator_size_type<Allocator>::type size_type;
  125. typedef typename boost::allocator_difference_type<Allocator>::type difference_type;
  126. typedef T* pointer;
  127. typedef typename boost::allocator_pointer<Allocator>::type allocator_pointer;
  128. typedef const T* const_pointer;
  129. typedef T& reference;
  130. typedef const T& const_reference;
  131. typedef pointer iterator;
  132. typedef const_pointer const_iterator;
  133. typedef boost::reverse_iterator<iterator> reverse_iterator;
  134. typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
  135. typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
  136. && sizeof(T) <= sizeof(long double),
  137. const value_type,
  138. const_reference >::type
  139. optimized_const_reference;
  140. private:
  141. pointer allocate( size_type capacity_arg )
  142. {
  143. if( capacity_arg > N )
  144. return &*get_allocator().allocate( capacity_arg );
  145. else
  146. return static_cast<T*>( members_.address() );
  147. }
  148. void deallocate( pointer where, size_type capacity_arg )
  149. {
  150. if( capacity_arg <= N )
  151. return;
  152. get_allocator().deallocate( allocator_pointer(where), capacity_arg );
  153. }
  154. template< class I >
  155. static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
  156. {
  157. copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
  158. }
  159. static void copy_rai( const T* begin, const T* end,
  160. pointer where, const boost::true_type& )
  161. {
  162. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  163. }
  164. template< class I, bool b >
  165. static void copy_rai( I begin, I end,
  166. pointer where, const boost::integral_constant<bool, b>& )
  167. {
  168. std::uninitialized_copy( begin, end, where );
  169. }
  170. template< class I >
  171. static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
  172. {
  173. std::uninitialized_copy( begin, end, where );
  174. }
  175. template< class I >
  176. static void copy_impl( I begin, I end, pointer where )
  177. {
  178. copy_impl( begin, end, where,
  179. typename std::iterator_traits<I>::iterator_category() );
  180. }
  181. template< class I, class I2 >
  182. static void assign_impl( I begin, I end, I2 where )
  183. {
  184. assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
  185. }
  186. template< class I, class I2 >
  187. static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
  188. {
  189. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  190. }
  191. template< class I, class I2 >
  192. static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
  193. {
  194. for( ; begin != end; ++begin, ++where )
  195. *where = *begin;
  196. }
  197. void unchecked_push_back_n( size_type n, const boost::true_type& )
  198. {
  199. std::uninitialized_fill( end(), end() + n, T() );
  200. size_ += n;
  201. }
  202. void unchecked_push_back_n( size_type n, const boost::false_type& )
  203. {
  204. for( size_type i = 0u; i < n; ++i )
  205. unchecked_push_back();
  206. }
  207. void auto_buffer_destroy( pointer where, const boost::false_type& )
  208. {
  209. (*where).~T();
  210. }
  211. void auto_buffer_destroy( pointer, const boost::true_type& )
  212. { }
  213. void auto_buffer_destroy( pointer where )
  214. {
  215. auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
  216. }
  217. void auto_buffer_destroy()
  218. {
  219. BOOST_ASSERT( is_valid() );
  220. if( buffer_ ) // do we need this check? Yes, but only
  221. // for N = 0u + local instances in one_sided_swap()
  222. auto_buffer_destroy( boost::has_trivial_destructor<T>() );
  223. }
  224. void destroy_back_n( size_type n, const boost::false_type& )
  225. {
  226. BOOST_ASSERT( n > 0 );
  227. pointer buffer = buffer_ + size_ - 1u;
  228. pointer new_end = buffer - n;
  229. for( ; buffer > new_end; --buffer )
  230. auto_buffer_destroy( buffer );
  231. }
  232. void destroy_back_n( size_type, const boost::true_type& )
  233. { }
  234. void destroy_back_n( size_type n )
  235. {
  236. destroy_back_n( n, boost::has_trivial_destructor<T>() );
  237. }
  238. void auto_buffer_destroy( const boost::false_type& x )
  239. {
  240. if( size_ )
  241. destroy_back_n( size_, x );
  242. deallocate( buffer_, members_.capacity_ );
  243. }
  244. void auto_buffer_destroy( const boost::true_type& )
  245. {
  246. deallocate( buffer_, members_.capacity_ );
  247. }
  248. pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
  249. {
  250. pointer new_buffer = allocate( new_capacity ); // strong
  251. scope_guard guard = make_obj_guard( *this,
  252. &auto_buffer::deallocate,
  253. new_buffer,
  254. new_capacity );
  255. copy_impl( begin(), end(), new_buffer ); // strong
  256. guard.dismiss(); // nothrow
  257. return new_buffer;
  258. }
  259. pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
  260. {
  261. pointer new_buffer = allocate( new_capacity ); // strong
  262. copy_impl( begin(), end(), new_buffer ); // nothrow
  263. return new_buffer;
  264. }
  265. void reserve_impl( size_type new_capacity )
  266. {
  267. pointer new_buffer = move_to_new_buffer( new_capacity,
  268. boost::has_nothrow_copy<T>() );
  269. auto_buffer_destroy();
  270. buffer_ = new_buffer;
  271. members_.capacity_ = new_capacity;
  272. BOOST_ASSERT( size_ <= members_.capacity_ );
  273. }
  274. size_type new_capacity_impl( size_type n )
  275. {
  276. BOOST_ASSERT( n > members_.capacity_ );
  277. size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
  278. // @todo: consider to check for allocator.max_size()
  279. return (std::max)(new_capacity,n);
  280. }
  281. static void swap_helper( auto_buffer& l, auto_buffer& r,
  282. const boost::true_type& )
  283. {
  284. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  285. auto_buffer temp( l.begin(), l.end() );
  286. assign_impl( r.begin(), r.end(), l.begin() );
  287. assign_impl( temp.begin(), temp.end(), r.begin() );
  288. boost::swap( l.size_, r.size_ );
  289. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  290. }
  291. static void swap_helper( auto_buffer& l, auto_buffer& r,
  292. const boost::false_type& )
  293. {
  294. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  295. size_type min_size = (std::min)(l.size_,r.size_);
  296. size_type max_size = (std::max)(l.size_,r.size_);
  297. size_type diff = max_size - min_size;
  298. auto_buffer* smallest = l.size_ == min_size ? &l : &r;
  299. auto_buffer* largest = smallest == &l ? &r : &l;
  300. // @remark: the implementation below is not as fast
  301. // as it could be if we assumed T had a default
  302. // constructor.
  303. size_type i = 0u;
  304. for( ; i < min_size; ++i )
  305. boost::swap( (*smallest)[i], (*largest)[i] );
  306. for( ; i < max_size; ++i )
  307. smallest->unchecked_push_back( (*largest)[i] );
  308. largest->pop_back_n( diff );
  309. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  310. }
  311. void one_sided_swap( auto_buffer& temp ) // nothrow
  312. {
  313. BOOST_ASSERT( !temp.is_on_stack() );
  314. auto_buffer_destroy();
  315. // @remark: must be nothrow
  316. get_allocator() = temp.get_allocator();
  317. members_.capacity_ = temp.members_.capacity_;
  318. buffer_ = temp.buffer_;
  319. BOOST_ASSERT( temp.size_ >= size_ + 1u );
  320. size_ = temp.size_;
  321. temp.buffer_ = 0;
  322. BOOST_ASSERT( temp.is_valid() );
  323. }
  324. template< class I >
  325. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  326. std::input_iterator_tag )
  327. {
  328. for( ; begin_arg != end_arg; ++begin_arg )
  329. {
  330. before = insert( before, *begin_arg );
  331. ++before;
  332. }
  333. }
  334. void grow_back( size_type n, const boost::true_type& )
  335. {
  336. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  337. size_ += n;
  338. }
  339. void grow_back( size_type n, const boost::false_type& )
  340. {
  341. unchecked_push_back_n(n);
  342. }
  343. void grow_back( size_type n )
  344. {
  345. grow_back( n, boost::has_trivial_constructor<T>() );
  346. }
  347. void grow_back_one( const boost::true_type& )
  348. {
  349. BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
  350. size_ += 1;
  351. }
  352. void grow_back_one( const boost::false_type& )
  353. {
  354. unchecked_push_back();
  355. }
  356. void grow_back_one()
  357. {
  358. grow_back_one( boost::has_trivial_constructor<T>() );
  359. }
  360. template< class I >
  361. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  362. std::forward_iterator_tag )
  363. {
  364. difference_type n = std::distance(begin_arg, end_arg);
  365. if( size_ + n <= members_.capacity_ )
  366. {
  367. bool is_back_insertion = before == cend();
  368. if( !is_back_insertion )
  369. {
  370. grow_back( n );
  371. iterator where = const_cast<T*>(before);
  372. std::copy( before, cend() - n, where + n );
  373. assign_impl( begin_arg, end_arg, where );
  374. }
  375. else
  376. {
  377. unchecked_push_back( begin_arg, end_arg );
  378. }
  379. BOOST_ASSERT( is_valid() );
  380. return;
  381. }
  382. auto_buffer temp( new_capacity_impl( size_ + n ) );
  383. temp.unchecked_push_back( cbegin(), before );
  384. temp.unchecked_push_back( begin_arg, end_arg );
  385. temp.unchecked_push_back( before, cend() );
  386. one_sided_swap( temp );
  387. BOOST_ASSERT( is_valid() );
  388. }
  389. public:
  390. bool is_valid() const // invariant
  391. {
  392. // @remark: allowed for N==0 and when
  393. // using a locally instance
  394. // in insert()/one_sided_swap()
  395. if( buffer_ == 0 )
  396. return true;
  397. if( members_.capacity_ < N )
  398. return false;
  399. if( !is_on_stack() && members_.capacity_ <= N )
  400. return false;
  401. if( buffer_ == members_.address() )
  402. if( members_.capacity_ > N )
  403. return false;
  404. if( size_ > members_.capacity_ )
  405. return false;
  406. return true;
  407. }
  408. auto_buffer()
  409. : members_( N ),
  410. buffer_( static_cast<T*>(members_.address()) ),
  411. size_( 0u )
  412. {
  413. BOOST_ASSERT( is_valid() );
  414. }
  415. auto_buffer( const auto_buffer& r )
  416. : members_( (std::max)(r.size_,size_type(N)) ),
  417. buffer_( allocate( members_.capacity_ ) ),
  418. size_( 0 )
  419. {
  420. copy_impl( r.begin(), r.end(), buffer_ );
  421. size_ = r.size_;
  422. BOOST_ASSERT( is_valid() );
  423. }
  424. auto_buffer& operator=( const auto_buffer& r ) // basic
  425. {
  426. if( this == &r )
  427. return *this;
  428. difference_type diff = size_ - r.size_;
  429. if( diff >= 0 )
  430. {
  431. pop_back_n( static_cast<size_type>(diff) );
  432. assign_impl( r.begin(), r.end(), begin() );
  433. }
  434. else
  435. {
  436. if( members_.capacity_ >= r.size() )
  437. {
  438. unchecked_push_back_n( static_cast<size_type>(-diff) );
  439. assign_impl( r.begin(), r.end(), begin() );
  440. }
  441. else
  442. {
  443. // @remark: we release memory as early as possible
  444. // since we only give the basic guarantee
  445. auto_buffer_destroy();
  446. buffer_ = 0;
  447. pointer new_buffer = allocate( r.size() );
  448. scope_guard guard = make_obj_guard( *this,
  449. &auto_buffer::deallocate,
  450. new_buffer,
  451. r.size() );
  452. copy_impl( r.begin(), r.end(), new_buffer );
  453. guard.dismiss();
  454. buffer_ = new_buffer;
  455. members_.capacity_ = r.size();
  456. size_ = members_.capacity_;
  457. }
  458. }
  459. BOOST_ASSERT( size() == r.size() );
  460. BOOST_ASSERT( is_valid() );
  461. return *this;
  462. }
  463. explicit auto_buffer( size_type capacity_arg )
  464. : members_( (std::max)(capacity_arg, size_type(N)) ),
  465. buffer_( allocate(members_.capacity_) ),
  466. size_( 0 )
  467. {
  468. BOOST_ASSERT( is_valid() );
  469. }
  470. auto_buffer( size_type size_arg, optimized_const_reference init_value )
  471. : members_( (std::max)(size_arg, size_type(N)) ),
  472. buffer_( allocate(members_.capacity_) ),
  473. size_( 0 )
  474. {
  475. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  476. size_ = size_arg;
  477. BOOST_ASSERT( is_valid() );
  478. }
  479. auto_buffer( size_type capacity_arg, const allocator_type& a )
  480. : allocator_type( a ),
  481. members_( (std::max)(capacity_arg, size_type(N)) ),
  482. buffer_( allocate(members_.capacity_) ),
  483. size_( 0 )
  484. {
  485. BOOST_ASSERT( is_valid() );
  486. }
  487. auto_buffer( size_type size_arg, optimized_const_reference init_value,
  488. const allocator_type& a )
  489. : allocator_type( a ),
  490. members_( (std::max)(size_arg, size_type(N)) ),
  491. buffer_( allocate(members_.capacity_) ),
  492. size_( 0 )
  493. {
  494. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  495. size_ = size_arg;
  496. BOOST_ASSERT( is_valid() );
  497. }
  498. template< class ForwardIterator >
  499. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
  500. :
  501. members_( std::distance(begin_arg, end_arg) ),
  502. buffer_( allocate(members_.capacity_) ),
  503. size_( 0 )
  504. {
  505. copy_impl( begin_arg, end_arg, buffer_ );
  506. size_ = members_.capacity_;
  507. if( members_.capacity_ < N )
  508. members_.capacity_ = N;
  509. BOOST_ASSERT( is_valid() );
  510. }
  511. template< class ForwardIterator >
  512. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
  513. const allocator_type& a )
  514. : allocator_type( a ),
  515. members_( std::distance(begin_arg, end_arg) ),
  516. buffer_( allocate(members_.capacity_) ),
  517. size_( 0 )
  518. {
  519. copy_impl( begin_arg, end_arg, buffer_ );
  520. size_ = members_.capacity_;
  521. if( members_.capacity_ < N )
  522. members_.capacity_ = N;
  523. BOOST_ASSERT( is_valid() );
  524. }
  525. ~auto_buffer()
  526. {
  527. auto_buffer_destroy();
  528. }
  529. public:
  530. bool empty() const
  531. {
  532. return size_ == 0;
  533. }
  534. bool full() const
  535. {
  536. return size_ == members_.capacity_;
  537. }
  538. bool is_on_stack() const
  539. {
  540. return members_.capacity_ <= N;
  541. }
  542. size_type size() const
  543. {
  544. return size_;
  545. }
  546. size_type capacity() const
  547. {
  548. return members_.capacity_;
  549. }
  550. public:
  551. pointer data()
  552. {
  553. return buffer_;
  554. }
  555. const_pointer data() const
  556. {
  557. return buffer_;
  558. }
  559. allocator_type& get_allocator()
  560. {
  561. return static_cast<allocator_type&>(*this);
  562. }
  563. const allocator_type& get_allocator() const
  564. {
  565. return static_cast<const allocator_type&>(*this);
  566. }
  567. public:
  568. iterator begin()
  569. {
  570. return buffer_;
  571. }
  572. const_iterator begin() const
  573. {
  574. return buffer_;
  575. }
  576. iterator end()
  577. {
  578. return buffer_ + size_;
  579. }
  580. const_iterator end() const
  581. {
  582. return buffer_ + size_;
  583. }
  584. reverse_iterator rbegin()
  585. {
  586. return reverse_iterator(end());
  587. }
  588. const_reverse_iterator rbegin() const
  589. {
  590. return const_reverse_iterator(end());
  591. }
  592. reverse_iterator rend()
  593. {
  594. return reverse_iterator(begin());
  595. }
  596. const_reverse_iterator rend() const
  597. {
  598. return const_reverse_iterator(begin());
  599. }
  600. const_iterator cbegin() const
  601. {
  602. return const_cast<const auto_buffer*>(this)->begin();
  603. }
  604. const_iterator cend() const
  605. {
  606. return const_cast<const auto_buffer*>(this)->end();
  607. }
  608. const_reverse_iterator crbegin() const
  609. {
  610. return const_cast<const auto_buffer*>(this)->rbegin();
  611. }
  612. const_reverse_iterator crend() const
  613. {
  614. return const_cast<const auto_buffer*>(this)->rend();
  615. }
  616. public:
  617. reference front()
  618. {
  619. return buffer_[0];
  620. }
  621. optimized_const_reference front() const
  622. {
  623. return buffer_[0];
  624. }
  625. reference back()
  626. {
  627. return buffer_[size_-1];
  628. }
  629. optimized_const_reference back() const
  630. {
  631. return buffer_[size_-1];
  632. }
  633. reference operator[]( size_type n )
  634. {
  635. BOOST_ASSERT( n < size_ );
  636. return buffer_[n];
  637. }
  638. optimized_const_reference operator[]( size_type n ) const
  639. {
  640. BOOST_ASSERT( n < size_ );
  641. return buffer_[n];
  642. }
  643. void unchecked_push_back()
  644. {
  645. BOOST_ASSERT( !full() );
  646. new (buffer_ + size_) T;
  647. ++size_;
  648. }
  649. void unchecked_push_back_n( size_type n )
  650. {
  651. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  652. unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
  653. }
  654. void unchecked_push_back( optimized_const_reference x ) // non-growing
  655. {
  656. BOOST_ASSERT( !full() );
  657. new (buffer_ + size_) T( x );
  658. ++size_;
  659. }
  660. template< class ForwardIterator >
  661. void unchecked_push_back( ForwardIterator begin_arg,
  662. ForwardIterator end_arg ) // non-growing
  663. {
  664. BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
  665. copy_impl( begin_arg, end_arg, buffer_ + size_ );
  666. size_ += std::distance(begin_arg, end_arg);
  667. }
  668. void reserve_precisely( size_type n )
  669. {
  670. BOOST_ASSERT( members_.capacity_ >= N );
  671. if( n <= members_.capacity_ )
  672. return;
  673. reserve_impl( n );
  674. BOOST_ASSERT( members_.capacity_ == n );
  675. }
  676. void reserve( size_type n ) // strong
  677. {
  678. BOOST_ASSERT( members_.capacity_ >= N );
  679. if( n <= members_.capacity_ )
  680. return;
  681. reserve_impl( new_capacity_impl( n ) );
  682. BOOST_ASSERT( members_.capacity_ >= n );
  683. }
  684. void push_back()
  685. {
  686. if( size_ != members_.capacity_ )
  687. {
  688. unchecked_push_back();
  689. }
  690. else
  691. {
  692. reserve( size_ + 1u );
  693. unchecked_push_back();
  694. }
  695. }
  696. void push_back( optimized_const_reference x )
  697. {
  698. if( size_ != members_.capacity_ )
  699. {
  700. unchecked_push_back( x );
  701. }
  702. else
  703. {
  704. reserve( size_ + 1u );
  705. unchecked_push_back( x );
  706. }
  707. }
  708. template< class ForwardIterator >
  709. void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
  710. {
  711. difference_type diff = std::distance(begin_arg, end_arg);
  712. if( size_ + diff > members_.capacity_ )
  713. reserve( size_ + diff );
  714. unchecked_push_back( begin_arg, end_arg );
  715. }
  716. iterator insert( const_iterator before, optimized_const_reference x ) // basic
  717. {
  718. // @todo: consider if we want to support x in 'this'
  719. if( size_ < members_.capacity_ )
  720. {
  721. bool is_back_insertion = before == cend();
  722. iterator where = const_cast<T*>(before);
  723. if( !is_back_insertion )
  724. {
  725. grow_back_one();
  726. std::copy( before, cend() - 1u, where + 1u );
  727. *where = x;
  728. BOOST_ASSERT( is_valid() );
  729. }
  730. else
  731. {
  732. unchecked_push_back( x );
  733. }
  734. return where;
  735. }
  736. auto_buffer temp( new_capacity_impl( size_ + 1u ) );
  737. temp.unchecked_push_back( cbegin(), before );
  738. iterator result = temp.end();
  739. temp.unchecked_push_back( x );
  740. temp.unchecked_push_back( before, cend() );
  741. one_sided_swap( temp );
  742. BOOST_ASSERT( is_valid() );
  743. return result;
  744. }
  745. void insert( const_iterator before, size_type n,
  746. optimized_const_reference x )
  747. {
  748. // @todo: see problems above
  749. if( size_ + n <= members_.capacity_ )
  750. {
  751. grow_back( n );
  752. iterator where = const_cast<T*>(before);
  753. std::copy( before, cend() - n, where + n );
  754. std::fill( where, where + n, x );
  755. BOOST_ASSERT( is_valid() );
  756. return;
  757. }
  758. auto_buffer temp( new_capacity_impl( size_ + n ) );
  759. temp.unchecked_push_back( cbegin(), before );
  760. std::uninitialized_fill_n( temp.end(), n, x );
  761. temp.size_ += n;
  762. temp.unchecked_push_back( before, cend() );
  763. one_sided_swap( temp );
  764. BOOST_ASSERT( is_valid() );
  765. }
  766. template< class ForwardIterator >
  767. void insert( const_iterator before,
  768. ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
  769. {
  770. typedef typename std::iterator_traits<ForwardIterator>
  771. ::iterator_category category;
  772. insert_impl( before, begin_arg, end_arg, category() );
  773. }
  774. void pop_back()
  775. {
  776. BOOST_ASSERT( !empty() );
  777. auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
  778. --size_;
  779. }
  780. void pop_back_n( size_type n )
  781. {
  782. BOOST_ASSERT( n <= size_ );
  783. if( n )
  784. {
  785. destroy_back_n( n );
  786. size_ -= n;
  787. }
  788. }
  789. void clear()
  790. {
  791. pop_back_n( size_ );
  792. }
  793. iterator erase( const_iterator where )
  794. {
  795. BOOST_ASSERT( !empty() );
  796. BOOST_ASSERT( cbegin() <= where );
  797. BOOST_ASSERT( cend() > where );
  798. unsigned elements = cend() - where - 1u;
  799. if( elements > 0u )
  800. {
  801. const_iterator start = where + 1u;
  802. std::copy( start, start + elements,
  803. const_cast<T*>(where) );
  804. }
  805. pop_back();
  806. BOOST_ASSERT( !full() );
  807. iterator result = const_cast<T*>( where );
  808. BOOST_ASSERT( result <= end() );
  809. return result;
  810. }
  811. iterator erase( const_iterator from, const_iterator to )
  812. {
  813. BOOST_ASSERT( !(std::distance(from,to)>0) ||
  814. !empty() );
  815. BOOST_ASSERT( cbegin() <= from );
  816. BOOST_ASSERT( cend() >= to );
  817. unsigned elements = std::distance(to,cend());
  818. if( elements > 0u )
  819. {
  820. BOOST_ASSERT( elements > 0u );
  821. std::copy( to, to + elements,
  822. const_cast<T*>(from) );
  823. }
  824. pop_back_n( std::distance(from,to) );
  825. BOOST_ASSERT( !full() );
  826. iterator result = const_cast<T*>( from );
  827. BOOST_ASSERT( result <= end() );
  828. return result;
  829. }
  830. void shrink_to_fit()
  831. {
  832. if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
  833. return;
  834. reserve_impl( size_ );
  835. members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
  836. BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
  837. BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
  838. }
  839. pointer uninitialized_grow( size_type n ) // strong
  840. {
  841. if( size_ + n > members_.capacity_ )
  842. reserve( size_ + n );
  843. pointer res = end();
  844. size_ += n;
  845. return res;
  846. }
  847. void uninitialized_shrink( size_type n ) // nothrow
  848. {
  849. // @remark: test for wrap-around
  850. BOOST_ASSERT( size_ - n <= members_.capacity_ );
  851. size_ -= n;
  852. }
  853. void uninitialized_resize( size_type n )
  854. {
  855. if( n > size() )
  856. uninitialized_grow( n - size() );
  857. else if( n < size() )
  858. uninitialized_shrink( size() - n );
  859. BOOST_ASSERT( size() == n );
  860. }
  861. // nothrow - if both buffer are on the heap, or
  862. // - if one buffer is on the heap and one has
  863. // 'has_allocated_buffer() == false', or
  864. // - if copy-construction cannot throw
  865. // basic - otherwise (better guarantee impossible)
  866. // requirement: the allocator must be no-throw-swappable
  867. void swap( auto_buffer& r )
  868. {
  869. bool on_stack = is_on_stack();
  870. bool r_on_stack = r.is_on_stack();
  871. bool both_on_heap = !on_stack && !r_on_stack;
  872. if( both_on_heap )
  873. {
  874. boost::swap( get_allocator(), r.get_allocator() );
  875. boost::swap( members_.capacity_, r.members_.capacity_ );
  876. boost::swap( buffer_, r.buffer_ );
  877. boost::swap( size_, r.size_ );
  878. BOOST_ASSERT( is_valid() );
  879. BOOST_ASSERT( r.is_valid() );
  880. return;
  881. }
  882. BOOST_ASSERT( on_stack || r_on_stack );
  883. bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
  884. (!on_stack && r_on_stack);
  885. //
  886. // Remark: we now know that we can copy into
  887. // the unused stack buffer.
  888. //
  889. if( exactly_one_on_stack )
  890. {
  891. auto_buffer* one_on_stack = on_stack ? this : &r;
  892. auto_buffer* other = on_stack ? &r : this;
  893. pointer new_buffer = static_cast<T*>(other->members_.address());
  894. copy_impl( one_on_stack->begin(), one_on_stack->end(),
  895. new_buffer ); // strong
  896. one_on_stack->auto_buffer_destroy(); // nothrow
  897. boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
  898. boost::swap( members_.capacity_, r.members_.capacity_ );
  899. boost::swap( size_, r.size_ );
  900. one_on_stack->buffer_ = other->buffer_;
  901. other->buffer_ = new_buffer;
  902. BOOST_ASSERT( other->is_on_stack() );
  903. BOOST_ASSERT( !one_on_stack->is_on_stack() );
  904. BOOST_ASSERT( is_valid() );
  905. BOOST_ASSERT( r.is_valid() );
  906. return;
  907. }
  908. BOOST_ASSERT( on_stack && r_on_stack );
  909. swap_helper( *this, r, boost::has_trivial_assign<T>() );
  910. BOOST_ASSERT( is_valid() );
  911. BOOST_ASSERT( r.is_valid() );
  912. }
  913. private:
  914. typedef boost::aligned_storage< N * sizeof(T),
  915. boost::alignment_of<T>::value >
  916. storage;
  917. struct members_type : storage /* to enable EBO */
  918. {
  919. size_type capacity_;
  920. members_type( size_type capacity )
  921. : capacity_(capacity)
  922. { }
  923. void* address() const
  924. { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
  925. };
  926. members_type members_;
  927. pointer buffer_;
  928. size_type size_;
  929. };
  930. template< class T, class SBP, class GP, class A >
  931. inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
  932. {
  933. l.swap( r );
  934. }
  935. template< class T, class SBP, class GP, class A >
  936. inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
  937. const auto_buffer<T,SBP,GP,A>& r )
  938. {
  939. if( l.size() != r.size() )
  940. return false;
  941. return std::equal( l.begin(), l.end(), r.begin() );
  942. }
  943. template< class T, class SBP, class GP, class A >
  944. inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
  945. const auto_buffer<T,SBP,GP,A>& r )
  946. {
  947. return !(l == r);
  948. }
  949. template< class T, class SBP, class GP, class A >
  950. inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
  951. const auto_buffer<T,SBP,GP,A>& r )
  952. {
  953. return std::lexicographical_compare( l.begin(), l.end(),
  954. r.begin(), r.end() );
  955. }
  956. template< class T, class SBP, class GP, class A >
  957. inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
  958. const auto_buffer<T,SBP,GP,A>& r )
  959. {
  960. return (r < l);
  961. }
  962. template< class T, class SBP, class GP, class A >
  963. inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
  964. const auto_buffer<T,SBP,GP,A>& r )
  965. {
  966. return !(l > r);
  967. }
  968. template< class T, class SBP, class GP, class A >
  969. inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
  970. const auto_buffer<T,SBP,GP,A>& r )
  971. {
  972. return !(l < r);
  973. }
  974. } // namespace detail
  975. } // namespace signals2
  976. }
  977. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  978. #pragma warning(pop)
  979. #endif
  980. #endif