auto_buffer.hpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137
  1. // Copyright Thorsten Ottosen, 2009.
  2. // Distributed under the Boost Software License, Version 1.0. (See
  3. // accompanying file LICENSE_1_0.txt or copy at
  4. // http://www.boost.org/LICENSE_1_0.txt)
  5. #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  6. #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  7. #include <boost/detail/workaround.hpp>
  8. #if defined(_MSC_VER)
  9. # pragma once
  10. #endif
  11. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  12. #pragma warning(push)
  13. #pragma warning(disable:4996)
  14. #endif
  15. #include <boost/assert.hpp>
  16. #include <boost/iterator/reverse_iterator.hpp>
  17. #include <boost/iterator/iterator_traits.hpp>
  18. #include <boost/mpl/if.hpp>
  19. #include <boost/multi_index/detail/scope_guard.hpp>
  20. #include <boost/swap.hpp>
  21. #include <boost/type_traits/aligned_storage.hpp>
  22. #include <boost/type_traits/alignment_of.hpp>
  23. #include <boost/type_traits/has_nothrow_copy.hpp>
  24. #include <boost/type_traits/has_nothrow_assign.hpp>
  25. #include <boost/type_traits/has_trivial_assign.hpp>
  26. #include <boost/type_traits/has_trivial_constructor.hpp>
  27. #include <boost/type_traits/has_trivial_destructor.hpp>
  28. #include <algorithm>
  29. #include <cstring>
  30. #include <iterator>
  31. #include <memory>
  32. #include <stdexcept>
  33. namespace boost
  34. {
  35. namespace signals2
  36. {
  37. namespace detail
  38. {
  39. //
  40. // Policies for creating the stack buffer.
  41. //
  42. template< unsigned N >
  43. struct store_n_objects
  44. {
  45. BOOST_STATIC_CONSTANT( unsigned, value = N );
  46. };
  47. template< unsigned N >
  48. struct store_n_bytes
  49. {
  50. BOOST_STATIC_CONSTANT( unsigned, value = N );
  51. };
  52. namespace auto_buffer_detail
  53. {
  54. template< class Policy, class T >
  55. struct compute_buffer_size
  56. {
  57. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
  58. };
  59. template< unsigned N, class T >
  60. struct compute_buffer_size< store_n_bytes<N>, T >
  61. {
  62. BOOST_STATIC_CONSTANT( unsigned, value = N );
  63. };
  64. template< class Policy, class T >
  65. struct compute_buffer_objects
  66. {
  67. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
  68. };
  69. template< unsigned N, class T >
  70. struct compute_buffer_objects< store_n_bytes<N>, T >
  71. {
  72. BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
  73. };
  74. }
  75. struct default_grow_policy
  76. {
  77. template< class SizeType >
  78. static SizeType new_capacity( SizeType capacity )
  79. {
  80. //
  81. // @remark: we grow the capacity quite agressively.
  82. // this is justified since we aim to minimize
  83. // heap-allocations, and because we mostly use
  84. // the buffer locally.
  85. return capacity * 4u;
  86. }
  87. template< class SizeType >
  88. static bool should_shrink( SizeType, SizeType )
  89. {
  90. //
  91. // @remark: when defining a new grow policy, one might
  92. // choose that if the waated space is less
  93. // than a certain percentage, then it is of
  94. // little use to shrink.
  95. //
  96. return true;
  97. }
  98. };
  99. template< class T,
  100. class StackBufferPolicy = store_n_objects<256>,
  101. class GrowPolicy = default_grow_policy,
  102. class Allocator = std::allocator<T> >
  103. class auto_buffer;
  104. template
  105. <
  106. class T,
  107. class StackBufferPolicy,
  108. class GrowPolicy,
  109. class Allocator
  110. >
  111. class auto_buffer : Allocator
  112. {
  113. private:
  114. enum { N = auto_buffer_detail::
  115. compute_buffer_objects<StackBufferPolicy,T>::value };
  116. BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
  117. typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
  118. local_buffer;
  119. public:
  120. typedef Allocator allocator_type;
  121. typedef T value_type;
  122. typedef typename Allocator::size_type size_type;
  123. typedef typename Allocator::difference_type difference_type;
  124. typedef T* pointer;
  125. typedef typename Allocator::pointer allocator_pointer;
  126. typedef const T* const_pointer;
  127. typedef T& reference;
  128. typedef const T& const_reference;
  129. typedef pointer iterator;
  130. typedef const_pointer const_iterator;
  131. typedef boost::reverse_iterator<iterator> reverse_iterator;
  132. typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
  133. typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
  134. && sizeof(T) <= sizeof(long double),
  135. const value_type,
  136. const_reference >::type
  137. optimized_const_reference;
  138. private:
  139. pointer allocate( size_type capacity_arg )
  140. {
  141. if( capacity_arg > N )
  142. return &*get_allocator().allocate( capacity_arg );
  143. else
  144. return static_cast<T*>( members_.address() );
  145. }
  146. void deallocate( pointer where, size_type capacity_arg )
  147. {
  148. if( capacity_arg <= N )
  149. return;
  150. get_allocator().deallocate( allocator_pointer(where), capacity_arg );
  151. }
  152. template< class I >
  153. static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
  154. {
  155. copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
  156. }
  157. static void copy_rai( const T* begin, const T* end,
  158. pointer where, const boost::true_type& )
  159. {
  160. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  161. }
  162. template< class I, bool b >
  163. static void copy_rai( I begin, I end,
  164. pointer where, const boost::integral_constant<bool, b>& )
  165. {
  166. std::uninitialized_copy( begin, end, where );
  167. }
  168. template< class I >
  169. static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
  170. {
  171. std::uninitialized_copy( begin, end, where );
  172. }
  173. template< class I >
  174. static void copy_impl( I begin, I end, pointer where )
  175. {
  176. copy_impl( begin, end, where,
  177. typename std::iterator_traits<I>::iterator_category() );
  178. }
  179. template< class I, class I2 >
  180. static void assign_impl( I begin, I end, I2 where )
  181. {
  182. assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
  183. }
  184. template< class I, class I2 >
  185. static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
  186. {
  187. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  188. }
  189. template< class I, class I2 >
  190. static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
  191. {
  192. for( ; begin != end; ++begin, ++where )
  193. *where = *begin;
  194. }
  195. void unchecked_push_back_n( size_type n, const boost::true_type& )
  196. {
  197. std::uninitialized_fill( end(), end() + n, T() );
  198. size_ += n;
  199. }
  200. void unchecked_push_back_n( size_type n, const boost::false_type& )
  201. {
  202. for( size_type i = 0u; i < n; ++i )
  203. unchecked_push_back();
  204. }
  205. void auto_buffer_destroy( pointer where, const boost::false_type& )
  206. {
  207. (*where).~T();
  208. }
  209. void auto_buffer_destroy( pointer, const boost::true_type& )
  210. { }
  211. void auto_buffer_destroy( pointer where )
  212. {
  213. auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
  214. }
  215. void destroy_back_n( size_type n, const boost::false_type& )
  216. {
  217. BOOST_ASSERT( n > 0 );
  218. pointer buffer = buffer_ + size_ - 1u;
  219. pointer new_end = buffer - n;
  220. for( ; buffer > new_end; --buffer )
  221. auto_buffer_destroy( buffer );
  222. }
  223. void destroy_back_n( size_type, const boost::true_type& )
  224. { }
  225. void destroy_back_n( size_type n )
  226. {
  227. destroy_back_n( n, boost::has_trivial_destructor<T>() );
  228. }
  229. void auto_buffer_destroy( const boost::false_type& x )
  230. {
  231. if( size_ )
  232. destroy_back_n( size_, x );
  233. deallocate( buffer_, members_.capacity_ );
  234. }
  235. void auto_buffer_destroy( const boost::true_type& )
  236. {
  237. deallocate( buffer_, members_.capacity_ );
  238. }
  239. pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
  240. {
  241. pointer new_buffer = allocate( new_capacity ); // strong
  242. boost::multi_index::detail::scope_guard guard =
  243. boost::multi_index::detail::make_obj_guard( *this,
  244. &auto_buffer::deallocate,
  245. new_buffer,
  246. new_capacity );
  247. copy_impl( begin(), end(), new_buffer ); // strong
  248. guard.dismiss(); // nothrow
  249. return new_buffer;
  250. }
  251. pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
  252. {
  253. pointer new_buffer = allocate( new_capacity ); // strong
  254. copy_impl( begin(), end(), new_buffer ); // nothrow
  255. return new_buffer;
  256. }
  257. void reserve_impl( size_type new_capacity )
  258. {
  259. pointer new_buffer = move_to_new_buffer( new_capacity,
  260. boost::has_nothrow_copy<T>() );
  261. (*this).~auto_buffer();
  262. buffer_ = new_buffer;
  263. members_.capacity_ = new_capacity;
  264. BOOST_ASSERT( size_ <= members_.capacity_ );
  265. }
  266. size_type new_capacity_impl( size_type n )
  267. {
  268. BOOST_ASSERT( n > members_.capacity_ );
  269. size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
  270. // @todo: consider to check for allocator.max_size()
  271. return (std::max)(new_capacity,n);
  272. }
  273. static void swap_helper( auto_buffer& l, auto_buffer& r,
  274. const boost::true_type& )
  275. {
  276. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  277. auto_buffer temp( l.begin(), l.end() );
  278. assign_impl( r.begin(), r.end(), l.begin() );
  279. assign_impl( temp.begin(), temp.end(), r.begin() );
  280. boost::swap( l.size_, r.size_ );
  281. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  282. }
  283. static void swap_helper( auto_buffer& l, auto_buffer& r,
  284. const boost::false_type& )
  285. {
  286. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  287. size_type min_size = (std::min)(l.size_,r.size_);
  288. size_type max_size = (std::max)(l.size_,r.size_);
  289. size_type diff = max_size - min_size;
  290. auto_buffer* smallest = l.size_ == min_size ? &l : &r;
  291. auto_buffer* largest = smallest == &l ? &r : &l;
  292. // @remark: the implementation below is not as fast
  293. // as it could be if we assumed T had a default
  294. // constructor.
  295. size_type i = 0u;
  296. for( ; i < min_size; ++i )
  297. boost::swap( (*smallest)[i], (*largest)[i] );
  298. for( ; i < max_size; ++i )
  299. smallest->unchecked_push_back( (*largest)[i] );
  300. largest->pop_back_n( diff );
  301. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  302. }
  303. void one_sided_swap( auto_buffer& temp ) // nothrow
  304. {
  305. BOOST_ASSERT( !temp.is_on_stack() );
  306. this->~auto_buffer();
  307. // @remark: must be nothrow
  308. get_allocator() = temp.get_allocator();
  309. members_.capacity_ = temp.members_.capacity_;
  310. buffer_ = temp.buffer_;
  311. BOOST_ASSERT( temp.size_ >= size_ + 1u );
  312. size_ = temp.size_;
  313. temp.buffer_ = 0;
  314. BOOST_ASSERT( temp.is_valid() );
  315. }
  316. template< class I >
  317. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  318. std::input_iterator_tag )
  319. {
  320. for( ; begin_arg != end_arg; ++begin_arg )
  321. {
  322. before = insert( before, *begin_arg );
  323. ++before;
  324. }
  325. }
  326. void grow_back( size_type n, const boost::true_type& )
  327. {
  328. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  329. size_ += n;
  330. }
  331. void grow_back( size_type n, const boost::false_type& )
  332. {
  333. unchecked_push_back_n(n);
  334. }
  335. void grow_back( size_type n )
  336. {
  337. grow_back( n, boost::has_trivial_constructor<T>() );
  338. }
  339. void grow_back_one( const boost::true_type& )
  340. {
  341. BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
  342. size_ += 1;
  343. }
  344. void grow_back_one( const boost::false_type& )
  345. {
  346. unchecked_push_back();
  347. }
  348. void grow_back_one()
  349. {
  350. grow_back_one( boost::has_trivial_constructor<T>() );
  351. }
  352. template< class I >
  353. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  354. std::forward_iterator_tag )
  355. {
  356. difference_type n = std::distance(begin_arg, end_arg);
  357. if( size_ + n <= members_.capacity_ )
  358. {
  359. bool is_back_insertion = before == cend();
  360. if( !is_back_insertion )
  361. {
  362. grow_back( n );
  363. iterator where = const_cast<T*>(before);
  364. std::copy( before, cend() - n, where + n );
  365. assign_impl( begin_arg, end_arg, where );
  366. }
  367. else
  368. {
  369. unchecked_push_back( begin_arg, end_arg );
  370. }
  371. BOOST_ASSERT( is_valid() );
  372. return;
  373. }
  374. auto_buffer temp( new_capacity_impl( size_ + n ) );
  375. temp.unchecked_push_back( cbegin(), before );
  376. temp.unchecked_push_back( begin_arg, end_arg );
  377. temp.unchecked_push_back( before, cend() );
  378. one_sided_swap( temp );
  379. BOOST_ASSERT( is_valid() );
  380. }
  381. public:
  382. bool is_valid() const // invariant
  383. {
  384. // @remark: allowed for N==0 and when
  385. // using a locally instance
  386. // in insert()/one_sided_swap()
  387. if( buffer_ == 0 )
  388. return true;
  389. if( members_.capacity_ < N )
  390. return false;
  391. if( !is_on_stack() && members_.capacity_ <= N )
  392. return false;
  393. if( buffer_ == members_.address() )
  394. if( members_.capacity_ > N )
  395. return false;
  396. if( size_ > members_.capacity_ )
  397. return false;
  398. return true;
  399. }
  400. auto_buffer()
  401. : members_( N ),
  402. buffer_( static_cast<T*>(members_.address()) ),
  403. size_( 0u )
  404. {
  405. BOOST_ASSERT( is_valid() );
  406. }
  407. auto_buffer( const auto_buffer& r )
  408. : members_( (std::max)(r.size_,size_type(N)) ),
  409. buffer_( allocate( members_.capacity_ ) ),
  410. size_( 0 )
  411. {
  412. copy_impl( r.begin(), r.end(), buffer_ );
  413. size_ = r.size_;
  414. BOOST_ASSERT( is_valid() );
  415. }
  416. auto_buffer& operator=( const auto_buffer& r ) // basic
  417. {
  418. if( this == &r )
  419. return *this;
  420. difference_type diff = size_ - r.size_;
  421. if( diff >= 0 )
  422. {
  423. pop_back_n( static_cast<size_type>(diff) );
  424. assign_impl( r.begin(), r.end(), begin() );
  425. }
  426. else
  427. {
  428. if( members_.capacity_ >= r.size() )
  429. {
  430. unchecked_push_back_n( static_cast<size_type>(-diff) );
  431. assign_impl( r.begin(), r.end(), begin() );
  432. }
  433. else
  434. {
  435. // @remark: we release memory as early as possible
  436. // since we only give the basic guarantee
  437. (*this).~auto_buffer();
  438. buffer_ = 0;
  439. pointer new_buffer = allocate( r.size() );
  440. boost::multi_index::detail::scope_guard guard =
  441. boost::multi_index::detail::make_obj_guard( *this,
  442. &auto_buffer::deallocate,
  443. new_buffer,
  444. r.size() );
  445. copy_impl( r.begin(), r.end(), new_buffer );
  446. guard.dismiss();
  447. buffer_ = new_buffer;
  448. members_.capacity_ = r.size();
  449. size_ = members_.capacity_;
  450. }
  451. }
  452. BOOST_ASSERT( size() == r.size() );
  453. BOOST_ASSERT( is_valid() );
  454. return *this;
  455. }
  456. explicit auto_buffer( size_type capacity_arg )
  457. : members_( (std::max)(capacity_arg, size_type(N)) ),
  458. buffer_( allocate(members_.capacity_) ),
  459. size_( 0 )
  460. {
  461. BOOST_ASSERT( is_valid() );
  462. }
  463. auto_buffer( size_type size_arg, optimized_const_reference init_value )
  464. : members_( (std::max)(size_arg, size_type(N)) ),
  465. buffer_( allocate(members_.capacity_) ),
  466. size_( 0 )
  467. {
  468. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  469. size_ = size_arg;
  470. BOOST_ASSERT( is_valid() );
  471. }
  472. auto_buffer( size_type capacity_arg, const allocator_type& a )
  473. : allocator_type( a ),
  474. members_( (std::max)(capacity_arg, size_type(N)) ),
  475. buffer_( allocate(members_.capacity_) ),
  476. size_( 0 )
  477. {
  478. BOOST_ASSERT( is_valid() );
  479. }
  480. auto_buffer( size_type size_arg, optimized_const_reference init_value,
  481. const allocator_type& a )
  482. : allocator_type( a ),
  483. members_( (std::max)(size_arg, size_type(N)) ),
  484. buffer_( allocate(members_.capacity_) ),
  485. size_( 0 )
  486. {
  487. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  488. size_ = size_arg;
  489. BOOST_ASSERT( is_valid() );
  490. }
  491. template< class ForwardIterator >
  492. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
  493. :
  494. members_( std::distance(begin_arg, end_arg) ),
  495. buffer_( allocate(members_.capacity_) ),
  496. size_( 0 )
  497. {
  498. copy_impl( begin_arg, end_arg, buffer_ );
  499. size_ = members_.capacity_;
  500. if( members_.capacity_ < N )
  501. members_.capacity_ = N;
  502. BOOST_ASSERT( is_valid() );
  503. }
  504. template< class ForwardIterator >
  505. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
  506. const allocator_type& a )
  507. : allocator_type( a ),
  508. members_( std::distance(begin_arg, end_arg) ),
  509. buffer_( allocate(members_.capacity_) ),
  510. size_( 0 )
  511. {
  512. copy_impl( begin_arg, end_arg, buffer_ );
  513. size_ = members_.capacity_;
  514. if( members_.capacity_ < N )
  515. members_.capacity_ = N;
  516. BOOST_ASSERT( is_valid() );
  517. }
  518. ~auto_buffer()
  519. {
  520. BOOST_ASSERT( is_valid() );
  521. if( buffer_ ) // do we need this check? Yes, but only
  522. // for N = 0u + local instances in one_sided_swap()
  523. auto_buffer_destroy( boost::has_trivial_destructor<T>() );
  524. }
  525. public:
  526. bool empty() const
  527. {
  528. return size_ == 0;
  529. }
  530. bool full() const
  531. {
  532. return size_ == members_.capacity_;
  533. }
  534. bool is_on_stack() const
  535. {
  536. return members_.capacity_ <= N;
  537. }
  538. size_type size() const
  539. {
  540. return size_;
  541. }
  542. size_type capacity() const
  543. {
  544. return members_.capacity_;
  545. }
  546. public:
  547. pointer data()
  548. {
  549. return buffer_;
  550. }
  551. const_pointer data() const
  552. {
  553. return buffer_;
  554. }
  555. allocator_type& get_allocator()
  556. {
  557. return static_cast<allocator_type&>(*this);
  558. }
  559. const allocator_type& get_allocator() const
  560. {
  561. return static_cast<const allocator_type&>(*this);
  562. }
  563. public:
  564. iterator begin()
  565. {
  566. return buffer_;
  567. }
  568. const_iterator begin() const
  569. {
  570. return buffer_;
  571. }
  572. iterator end()
  573. {
  574. return buffer_ + size_;
  575. }
  576. const_iterator end() const
  577. {
  578. return buffer_ + size_;
  579. }
  580. reverse_iterator rbegin()
  581. {
  582. return reverse_iterator(end());
  583. }
  584. const_reverse_iterator rbegin() const
  585. {
  586. return const_reverse_iterator(end());
  587. }
  588. reverse_iterator rend()
  589. {
  590. return reverse_iterator(begin());
  591. }
  592. const_reverse_iterator rend() const
  593. {
  594. return const_reverse_iterator(begin());
  595. }
  596. const_iterator cbegin() const
  597. {
  598. return const_cast<const auto_buffer*>(this)->begin();
  599. }
  600. const_iterator cend() const
  601. {
  602. return const_cast<const auto_buffer*>(this)->end();
  603. }
  604. const_reverse_iterator crbegin() const
  605. {
  606. return const_cast<const auto_buffer*>(this)->rbegin();
  607. }
  608. const_reverse_iterator crend() const
  609. {
  610. return const_cast<const auto_buffer*>(this)->rend();
  611. }
  612. public:
  613. reference front()
  614. {
  615. return buffer_[0];
  616. }
  617. optimized_const_reference front() const
  618. {
  619. return buffer_[0];
  620. }
  621. reference back()
  622. {
  623. return buffer_[size_-1];
  624. }
  625. optimized_const_reference back() const
  626. {
  627. return buffer_[size_-1];
  628. }
  629. reference operator[]( size_type n )
  630. {
  631. BOOST_ASSERT( n < size_ );
  632. return buffer_[n];
  633. }
  634. optimized_const_reference operator[]( size_type n ) const
  635. {
  636. BOOST_ASSERT( n < size_ );
  637. return buffer_[n];
  638. }
  639. void unchecked_push_back()
  640. {
  641. BOOST_ASSERT( !full() );
  642. new (buffer_ + size_) T;
  643. ++size_;
  644. }
  645. void unchecked_push_back_n( size_type n )
  646. {
  647. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  648. unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
  649. }
  650. void unchecked_push_back( optimized_const_reference x ) // non-growing
  651. {
  652. BOOST_ASSERT( !full() );
  653. new (buffer_ + size_) T( x );
  654. ++size_;
  655. }
  656. template< class ForwardIterator >
  657. void unchecked_push_back( ForwardIterator begin_arg,
  658. ForwardIterator end_arg ) // non-growing
  659. {
  660. BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
  661. copy_impl( begin_arg, end_arg, buffer_ + size_ );
  662. size_ += std::distance(begin_arg, end_arg);
  663. }
  664. void reserve_precisely( size_type n )
  665. {
  666. BOOST_ASSERT( members_.capacity_ >= N );
  667. if( n <= members_.capacity_ )
  668. return;
  669. reserve_impl( n );
  670. BOOST_ASSERT( members_.capacity_ == n );
  671. }
  672. void reserve( size_type n ) // strong
  673. {
  674. BOOST_ASSERT( members_.capacity_ >= N );
  675. if( n <= members_.capacity_ )
  676. return;
  677. reserve_impl( new_capacity_impl( n ) );
  678. BOOST_ASSERT( members_.capacity_ >= n );
  679. }
  680. void push_back()
  681. {
  682. if( size_ != members_.capacity_ )
  683. {
  684. unchecked_push_back();
  685. }
  686. else
  687. {
  688. reserve( size_ + 1u );
  689. unchecked_push_back();
  690. }
  691. }
  692. void push_back( optimized_const_reference x )
  693. {
  694. if( size_ != members_.capacity_ )
  695. {
  696. unchecked_push_back( x );
  697. }
  698. else
  699. {
  700. reserve( size_ + 1u );
  701. unchecked_push_back( x );
  702. }
  703. }
  704. template< class ForwardIterator >
  705. void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
  706. {
  707. difference_type diff = std::distance(begin_arg, end_arg);
  708. if( size_ + diff > members_.capacity_ )
  709. reserve( size_ + diff );
  710. unchecked_push_back( begin_arg, end_arg );
  711. }
  712. iterator insert( const_iterator before, optimized_const_reference x ) // basic
  713. {
  714. // @todo: consider if we want to support x in 'this'
  715. if( size_ < members_.capacity_ )
  716. {
  717. bool is_back_insertion = before == cend();
  718. iterator where = const_cast<T*>(before);
  719. if( !is_back_insertion )
  720. {
  721. grow_back_one();
  722. std::copy( before, cend() - 1u, where + 1u );
  723. *where = x;
  724. BOOST_ASSERT( is_valid() );
  725. }
  726. else
  727. {
  728. unchecked_push_back( x );
  729. }
  730. return where;
  731. }
  732. auto_buffer temp( new_capacity_impl( size_ + 1u ) );
  733. temp.unchecked_push_back( cbegin(), before );
  734. iterator result = temp.end();
  735. temp.unchecked_push_back( x );
  736. temp.unchecked_push_back( before, cend() );
  737. one_sided_swap( temp );
  738. BOOST_ASSERT( is_valid() );
  739. return result;
  740. }
  741. void insert( const_iterator before, size_type n,
  742. optimized_const_reference x )
  743. {
  744. // @todo: see problems above
  745. if( size_ + n <= members_.capacity_ )
  746. {
  747. grow_back( n );
  748. iterator where = const_cast<T*>(before);
  749. std::copy( before, cend() - n, where + n );
  750. std::fill( where, where + n, x );
  751. BOOST_ASSERT( is_valid() );
  752. return;
  753. }
  754. auto_buffer temp( new_capacity_impl( size_ + n ) );
  755. temp.unchecked_push_back( cbegin(), before );
  756. std::uninitialized_fill_n( temp.end(), n, x );
  757. temp.size_ += n;
  758. temp.unchecked_push_back( before, cend() );
  759. one_sided_swap( temp );
  760. BOOST_ASSERT( is_valid() );
  761. }
  762. template< class ForwardIterator >
  763. void insert( const_iterator before,
  764. ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
  765. {
  766. typedef typename std::iterator_traits<ForwardIterator>
  767. ::iterator_category category;
  768. insert_impl( before, begin_arg, end_arg, category() );
  769. }
  770. void pop_back()
  771. {
  772. BOOST_ASSERT( !empty() );
  773. auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
  774. --size_;
  775. }
  776. void pop_back_n( size_type n )
  777. {
  778. BOOST_ASSERT( n <= size_ );
  779. if( n )
  780. {
  781. destroy_back_n( n );
  782. size_ -= n;
  783. }
  784. }
  785. void clear()
  786. {
  787. pop_back_n( size_ );
  788. }
  789. iterator erase( const_iterator where )
  790. {
  791. BOOST_ASSERT( !empty() );
  792. BOOST_ASSERT( cbegin() <= where );
  793. BOOST_ASSERT( cend() > where );
  794. unsigned elements = cend() - where - 1u;
  795. if( elements > 0u )
  796. {
  797. const_iterator start = where + 1u;
  798. std::copy( start, start + elements,
  799. const_cast<T*>(where) );
  800. }
  801. pop_back();
  802. BOOST_ASSERT( !full() );
  803. iterator result = const_cast<T*>( where );
  804. BOOST_ASSERT( result <= end() );
  805. return result;
  806. }
  807. iterator erase( const_iterator from, const_iterator to )
  808. {
  809. BOOST_ASSERT( !(std::distance(from,to)>0) ||
  810. !empty() );
  811. BOOST_ASSERT( cbegin() <= from );
  812. BOOST_ASSERT( cend() >= to );
  813. unsigned elements = std::distance(to,cend());
  814. if( elements > 0u )
  815. {
  816. BOOST_ASSERT( elements > 0u );
  817. std::copy( to, to + elements,
  818. const_cast<T*>(from) );
  819. }
  820. pop_back_n( std::distance(from,to) );
  821. BOOST_ASSERT( !full() );
  822. iterator result = const_cast<T*>( from );
  823. BOOST_ASSERT( result <= end() );
  824. return result;
  825. }
  826. void shrink_to_fit()
  827. {
  828. if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
  829. return;
  830. reserve_impl( size_ );
  831. members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
  832. BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
  833. BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
  834. }
  835. pointer uninitialized_grow( size_type n ) // strong
  836. {
  837. if( size_ + n <= members_.capacity_ )
  838. reserve( size_ + n );
  839. pointer res = end();
  840. size_ += n;
  841. return res;
  842. }
  843. void uninitialized_shrink( size_type n ) // nothrow
  844. {
  845. // @remark: test for wrap-around
  846. BOOST_ASSERT( size_ - n <= members_.capacity_ );
  847. size_ -= n;
  848. }
  849. void uninitialized_resize( size_type n )
  850. {
  851. if( n > size() )
  852. uninitialized_grow( n - size() );
  853. else if( n < size() )
  854. uninitialized_shrink( size() - n );
  855. BOOST_ASSERT( size() == n );
  856. }
  857. // nothrow - if both buffer are on the heap, or
  858. // - if one buffer is on the heap and one has
  859. // 'has_allocated_buffer() == false', or
  860. // - if copy-construction cannot throw
  861. // basic - otherwise (better guarantee impossible)
  862. // requirement: the allocator must be no-throw-swappable
  863. void swap( auto_buffer& r )
  864. {
  865. bool on_stack = is_on_stack();
  866. bool r_on_stack = r.is_on_stack();
  867. bool both_on_heap = !on_stack && !r_on_stack;
  868. if( both_on_heap )
  869. {
  870. boost::swap( get_allocator(), r.get_allocator() );
  871. boost::swap( members_.capacity_, r.members_.capacity_ );
  872. boost::swap( buffer_, r.buffer_ );
  873. boost::swap( size_, r.size_ );
  874. BOOST_ASSERT( is_valid() );
  875. BOOST_ASSERT( r.is_valid() );
  876. return;
  877. }
  878. BOOST_ASSERT( on_stack || r_on_stack );
  879. bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
  880. (!on_stack && r_on_stack);
  881. //
  882. // Remark: we now know that we can copy into
  883. // the unused stack buffer.
  884. //
  885. if( exactly_one_on_stack )
  886. {
  887. auto_buffer* one_on_stack = on_stack ? this : &r;
  888. auto_buffer* other = on_stack ? &r : this;
  889. pointer new_buffer = static_cast<T*>(other->members_.address());
  890. copy_impl( one_on_stack->begin(), one_on_stack->end(),
  891. new_buffer ); // strong
  892. one_on_stack->~auto_buffer(); // nothrow
  893. boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
  894. boost::swap( members_.capacity_, r.members_.capacity_ );
  895. boost::swap( size_, r.size_ );
  896. one_on_stack->buffer_ = other->buffer_;
  897. other->buffer_ = new_buffer;
  898. BOOST_ASSERT( other->is_on_stack() );
  899. BOOST_ASSERT( !one_on_stack->is_on_stack() );
  900. BOOST_ASSERT( is_valid() );
  901. BOOST_ASSERT( r.is_valid() );
  902. return;
  903. }
  904. BOOST_ASSERT( on_stack && r_on_stack );
  905. swap_helper( *this, r, boost::has_trivial_assign<T>() );
  906. BOOST_ASSERT( is_valid() );
  907. BOOST_ASSERT( r.is_valid() );
  908. }
  909. private:
  910. typedef boost::aligned_storage< N * sizeof(T),
  911. boost::alignment_of<T>::value >
  912. storage;
  913. struct members_type : storage /* to enable EBO */
  914. {
  915. size_type capacity_;
  916. members_type( size_type capacity )
  917. : capacity_(capacity)
  918. { }
  919. void* address() const
  920. { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
  921. };
  922. members_type members_;
  923. pointer buffer_;
  924. size_type size_;
  925. };
  926. template< class T, class SBP, class GP, class A >
  927. inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
  928. {
  929. l.swap( r );
  930. }
  931. template< class T, class SBP, class GP, class A >
  932. inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
  933. const auto_buffer<T,SBP,GP,A>& r )
  934. {
  935. if( l.size() != r.size() )
  936. return false;
  937. return std::equal( l.begin(), l.end(), r.begin() );
  938. }
  939. template< class T, class SBP, class GP, class A >
  940. inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
  941. const auto_buffer<T,SBP,GP,A>& r )
  942. {
  943. return !(l == r);
  944. }
  945. template< class T, class SBP, class GP, class A >
  946. inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
  947. const auto_buffer<T,SBP,GP,A>& r )
  948. {
  949. return std::lexicographical_compare( l.begin(), l.end(),
  950. r.begin(), r.end() );
  951. }
  952. template< class T, class SBP, class GP, class A >
  953. inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
  954. const auto_buffer<T,SBP,GP,A>& r )
  955. {
  956. return (r < l);
  957. }
  958. template< class T, class SBP, class GP, class A >
  959. inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
  960. const auto_buffer<T,SBP,GP,A>& r )
  961. {
  962. return !(r > l);
  963. }
  964. template< class T, class SBP, class GP, class A >
  965. inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
  966. const auto_buffer<T,SBP,GP,A>& r )
  967. {
  968. return !(l < r);
  969. }
  970. } // namespace detail
  971. } // namespace signals2
  972. }
  973. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  974. #pragma warning(pop)
  975. #endif
  976. #endif