circular_deque.h 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116
  1. // Copyright 2017 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #ifndef BASE_CONTAINERS_CIRCULAR_DEQUE_H_
  5. #define BASE_CONTAINERS_CIRCULAR_DEQUE_H_
  6. #include <algorithm>
  7. #include <cstddef>
  8. #include <iterator>
  9. #include <type_traits>
  10. #include <utility>
  11. #include "base/check_op.h"
  12. #include "base/containers/vector_buffer.h"
  13. #include "base/macros.h"
  14. #include "base/stl_util.h"
  15. #include "base/template_util.h"
  16. // base::circular_deque is similar to std::deque. Unlike std::deque, the
  17. // storage is provided in a flat circular buffer conceptually similar to a
  18. // vector. The beginning and end will wrap around as necessary so that
  19. // pushes and pops will be constant time as long as a capacity expansion is
  20. // not required.
  21. //
  22. // The API should be identical to std::deque with the following differences:
  23. //
  24. // - ITERATORS ARE NOT STABLE. Mutating the container will invalidate all
  25. // iterators.
  26. //
  27. // - Insertions may resize the vector and so are not constant time (std::deque
  28. // guarantees constant time for insertions at the ends).
  29. //
  30. // - Container-wide comparisons are not implemented. If you want to compare
  31. // two containers, use an algorithm so the expensive iteration is explicit.
  32. //
  33. // If you want a similar container with only a queue API, use base::queue in
  34. // base/containers/queue.h.
  35. //
  36. // Constructors:
  37. // circular_deque();
  38. // circular_deque(size_t count);
  39. // circular_deque(size_t count, const T& value);
  40. // circular_deque(InputIterator first, InputIterator last);
  41. // circular_deque(const circular_deque&);
  42. // circular_deque(circular_deque&&);
  43. // circular_deque(std::initializer_list<value_type>);
  44. //
  45. // Assignment functions:
  46. // circular_deque& operator=(const circular_deque&);
  47. // circular_deque& operator=(circular_deque&&);
  48. // circular_deque& operator=(std::initializer_list<T>);
  49. // void assign(size_t count, const T& value);
  50. // void assign(InputIterator first, InputIterator last);
  51. // void assign(std::initializer_list<T> value);
  52. //
  53. // Random accessors:
  54. // T& at(size_t);
  55. // const T& at(size_t) const;
  56. // T& operator[](size_t);
  57. // const T& operator[](size_t) const;
  58. //
  59. // End accessors:
  60. // T& front();
  61. // const T& front() const;
  62. // T& back();
  63. // const T& back() const;
  64. //
  65. // Iterator functions:
  66. // iterator begin();
  67. // const_iterator begin() const;
  68. // const_iterator cbegin() const;
  69. // iterator end();
  70. // const_iterator end() const;
  71. // const_iterator cend() const;
  72. // reverse_iterator rbegin();
  73. // const_reverse_iterator rbegin() const;
  74. // const_reverse_iterator crbegin() const;
  75. // reverse_iterator rend();
  76. // const_reverse_iterator rend() const;
  77. // const_reverse_iterator crend() const;
  78. //
  79. // Memory management:
  80. // void reserve(size_t); // SEE IMPLEMENTATION FOR SOME GOTCHAS.
  81. // size_t capacity() const;
  82. // void shrink_to_fit();
  83. //
  84. // Size management:
  85. // void clear();
  86. // bool empty() const;
  87. // size_t size() const;
  88. // void resize(size_t);
  89. // void resize(size_t count, const T& value);
  90. //
  91. // Positional insert and erase:
  92. // void insert(const_iterator pos, size_type count, const T& value);
  93. // void insert(const_iterator pos,
  94. // InputIterator first, InputIterator last);
  95. // iterator insert(const_iterator pos, const T& value);
  96. // iterator insert(const_iterator pos, T&& value);
  97. // iterator emplace(const_iterator pos, Args&&... args);
  98. // iterator erase(const_iterator pos);
  99. // iterator erase(const_iterator first, const_iterator last);
  100. //
  101. // End insert and erase:
  102. // void push_front(const T&);
  103. // void push_front(T&&);
  104. // void push_back(const T&);
  105. // void push_back(T&&);
  106. // T& emplace_front(Args&&...);
  107. // T& emplace_back(Args&&...);
  108. // void pop_front();
  109. // void pop_back();
  110. //
  111. // General:
  112. // void swap(circular_deque&);
  113. namespace base {
  114. template <class T>
  115. class circular_deque;
  116. namespace internal {
  117. // Start allocating nonempty buffers with this many entries. This is the
  118. // external capacity so the internal buffer will be one larger (= 4) which is
  119. // more even for the allocator. See the descriptions of internal vs. external
  120. // capacity on the comment above the buffer_ variable below.
  121. constexpr size_t kCircularBufferInitialCapacity = 3;
  122. template <typename T>
  123. class circular_deque_const_iterator {
  124. public:
  125. using difference_type = std::ptrdiff_t;
  126. using value_type = T;
  127. using pointer = const T*;
  128. using reference = const T&;
  129. using iterator_category = std::random_access_iterator_tag;
  130. circular_deque_const_iterator() : parent_deque_(nullptr), index_(0) {
  131. #if DCHECK_IS_ON()
  132. created_generation_ = 0;
  133. #endif // DCHECK_IS_ON()
  134. }
  135. // Dereferencing.
  136. const T& operator*() const {
  137. CheckUnstableUsage();
  138. parent_deque_->CheckValidIndex(index_);
  139. return parent_deque_->buffer_[index_];
  140. }
  141. const T* operator->() const {
  142. CheckUnstableUsage();
  143. parent_deque_->CheckValidIndex(index_);
  144. return &parent_deque_->buffer_[index_];
  145. }
  146. const value_type& operator[](difference_type i) const { return *(*this + i); }
  147. // Increment and decrement.
  148. circular_deque_const_iterator& operator++() {
  149. Increment();
  150. return *this;
  151. }
  152. circular_deque_const_iterator operator++(int) {
  153. circular_deque_const_iterator ret = *this;
  154. Increment();
  155. return ret;
  156. }
  157. circular_deque_const_iterator& operator--() {
  158. Decrement();
  159. return *this;
  160. }
  161. circular_deque_const_iterator operator--(int) {
  162. circular_deque_const_iterator ret = *this;
  163. Decrement();
  164. return ret;
  165. }
  166. // Random access mutation.
  167. friend circular_deque_const_iterator operator+(
  168. const circular_deque_const_iterator& iter,
  169. difference_type offset) {
  170. circular_deque_const_iterator ret = iter;
  171. ret.Add(offset);
  172. return ret;
  173. }
  174. circular_deque_const_iterator& operator+=(difference_type offset) {
  175. Add(offset);
  176. return *this;
  177. }
  178. friend circular_deque_const_iterator operator-(
  179. const circular_deque_const_iterator& iter,
  180. difference_type offset) {
  181. circular_deque_const_iterator ret = iter;
  182. ret.Add(-offset);
  183. return ret;
  184. }
  185. circular_deque_const_iterator& operator-=(difference_type offset) {
  186. Add(-offset);
  187. return *this;
  188. }
  189. friend std::ptrdiff_t operator-(const circular_deque_const_iterator& lhs,
  190. const circular_deque_const_iterator& rhs) {
  191. lhs.CheckComparable(rhs);
  192. return lhs.OffsetFromBegin() - rhs.OffsetFromBegin();
  193. }
  194. // Comparisons.
  195. friend bool operator==(const circular_deque_const_iterator& lhs,
  196. const circular_deque_const_iterator& rhs) {
  197. lhs.CheckComparable(rhs);
  198. return lhs.index_ == rhs.index_;
  199. }
  200. friend bool operator!=(const circular_deque_const_iterator& lhs,
  201. const circular_deque_const_iterator& rhs) {
  202. return !(lhs == rhs);
  203. }
  204. friend bool operator<(const circular_deque_const_iterator& lhs,
  205. const circular_deque_const_iterator& rhs) {
  206. lhs.CheckComparable(rhs);
  207. return lhs.OffsetFromBegin() < rhs.OffsetFromBegin();
  208. }
  209. friend bool operator<=(const circular_deque_const_iterator& lhs,
  210. const circular_deque_const_iterator& rhs) {
  211. return !(lhs > rhs);
  212. }
  213. friend bool operator>(const circular_deque_const_iterator& lhs,
  214. const circular_deque_const_iterator& rhs) {
  215. lhs.CheckComparable(rhs);
  216. return lhs.OffsetFromBegin() > rhs.OffsetFromBegin();
  217. }
  218. friend bool operator>=(const circular_deque_const_iterator& lhs,
  219. const circular_deque_const_iterator& rhs) {
  220. return !(lhs < rhs);
  221. }
  222. protected:
  223. friend class circular_deque<T>;
  224. circular_deque_const_iterator(const circular_deque<T>* parent, size_t index)
  225. : parent_deque_(parent), index_(index) {
  226. #if DCHECK_IS_ON()
  227. created_generation_ = parent->generation_;
  228. #endif // DCHECK_IS_ON()
  229. }
  230. // Returns the offset from the beginning index of the buffer to the current
  231. // item.
  232. size_t OffsetFromBegin() const {
  233. if (index_ >= parent_deque_->begin_)
  234. return index_ - parent_deque_->begin_; // On the same side as begin.
  235. return parent_deque_->buffer_.capacity() - parent_deque_->begin_ + index_;
  236. }
  237. // Most uses will be ++ and -- so use a simplified implementation.
  238. void Increment() {
  239. CheckUnstableUsage();
  240. parent_deque_->CheckValidIndex(index_);
  241. index_++;
  242. if (index_ == parent_deque_->buffer_.capacity())
  243. index_ = 0;
  244. }
  245. void Decrement() {
  246. CheckUnstableUsage();
  247. parent_deque_->CheckValidIndexOrEnd(index_);
  248. if (index_ == 0)
  249. index_ = parent_deque_->buffer_.capacity() - 1;
  250. else
  251. index_--;
  252. }
  253. void Add(difference_type delta) {
  254. CheckUnstableUsage();
  255. #if DCHECK_IS_ON()
  256. if (delta <= 0)
  257. parent_deque_->CheckValidIndexOrEnd(index_);
  258. else
  259. parent_deque_->CheckValidIndex(index_);
  260. #endif
  261. // It should be valid to add 0 to any iterator, even if the container is
  262. // empty and the iterator points to end(). The modulo below will divide
  263. // by 0 if the buffer capacity is empty, so it's important to check for
  264. // this case explicitly.
  265. if (delta == 0)
  266. return;
  267. difference_type new_offset = OffsetFromBegin() + delta;
  268. DCHECK(new_offset >= 0 &&
  269. new_offset <= static_cast<difference_type>(parent_deque_->size()));
  270. index_ = (new_offset + parent_deque_->begin_) %
  271. parent_deque_->buffer_.capacity();
  272. }
  273. #if DCHECK_IS_ON()
  274. void CheckUnstableUsage() const {
  275. DCHECK(parent_deque_);
  276. // Since circular_deque doesn't guarantee stability, any attempt to
  277. // dereference this iterator after a mutation (i.e. the generation doesn't
  278. // match the original) in the container is illegal.
  279. DCHECK_EQ(created_generation_, parent_deque_->generation_)
  280. << "circular_deque iterator dereferenced after mutation.";
  281. }
  282. void CheckComparable(const circular_deque_const_iterator& other) const {
  283. DCHECK_EQ(parent_deque_, other.parent_deque_);
  284. // Since circular_deque doesn't guarantee stability, two iterators that
  285. // are compared must have been generated without mutating the container.
  286. // If this fires, the container was mutated between generating the two
  287. // iterators being compared.
  288. DCHECK_EQ(created_generation_, other.created_generation_);
  289. }
  290. #else
  291. inline void CheckUnstableUsage() const {}
  292. inline void CheckComparable(const circular_deque_const_iterator&) const {}
  293. #endif // DCHECK_IS_ON()
  294. const circular_deque<T>* parent_deque_;
  295. size_t index_;
  296. #if DCHECK_IS_ON()
  297. // The generation of the parent deque when this iterator was created. The
  298. // container will update the generation for every modification so we can
  299. // test if the container was modified by comparing them.
  300. uint64_t created_generation_;
  301. #endif // DCHECK_IS_ON()
  302. };
  303. template <typename T>
  304. class circular_deque_iterator : public circular_deque_const_iterator<T> {
  305. using base = circular_deque_const_iterator<T>;
  306. public:
  307. friend class circular_deque<T>;
  308. using difference_type = std::ptrdiff_t;
  309. using value_type = T;
  310. using pointer = T*;
  311. using reference = T&;
  312. using iterator_category = std::random_access_iterator_tag;
  313. // Expose the base class' constructor.
  314. circular_deque_iterator() : circular_deque_const_iterator<T>() {}
  315. // Dereferencing.
  316. T& operator*() const { return const_cast<T&>(base::operator*()); }
  317. T* operator->() const { return const_cast<T*>(base::operator->()); }
  318. T& operator[](difference_type i) {
  319. return const_cast<T&>(base::operator[](i));
  320. }
  321. // Random access mutation.
  322. friend circular_deque_iterator operator+(const circular_deque_iterator& iter,
  323. difference_type offset) {
  324. circular_deque_iterator ret = iter;
  325. ret.Add(offset);
  326. return ret;
  327. }
  328. circular_deque_iterator& operator+=(difference_type offset) {
  329. base::Add(offset);
  330. return *this;
  331. }
  332. friend circular_deque_iterator operator-(const circular_deque_iterator& iter,
  333. difference_type offset) {
  334. circular_deque_iterator ret = iter;
  335. ret.Add(-offset);
  336. return ret;
  337. }
  338. circular_deque_iterator& operator-=(difference_type offset) {
  339. base::Add(-offset);
  340. return *this;
  341. }
  342. // Increment and decrement.
  343. circular_deque_iterator& operator++() {
  344. base::Increment();
  345. return *this;
  346. }
  347. circular_deque_iterator operator++(int) {
  348. circular_deque_iterator ret = *this;
  349. base::Increment();
  350. return ret;
  351. }
  352. circular_deque_iterator& operator--() {
  353. base::Decrement();
  354. return *this;
  355. }
  356. circular_deque_iterator operator--(int) {
  357. circular_deque_iterator ret = *this;
  358. base::Decrement();
  359. return ret;
  360. }
  361. private:
  362. circular_deque_iterator(const circular_deque<T>* parent, size_t index)
  363. : circular_deque_const_iterator<T>(parent, index) {}
  364. };
  365. } // namespace internal
  366. template <typename T>
  367. class circular_deque {
  368. private:
  369. using VectorBuffer = internal::VectorBuffer<T>;
  370. public:
  371. using value_type = T;
  372. using size_type = std::size_t;
  373. using difference_type = std::ptrdiff_t;
  374. using reference = value_type&;
  375. using const_reference = const value_type&;
  376. using pointer = value_type*;
  377. using const_pointer = const value_type*;
  378. using iterator = internal::circular_deque_iterator<T>;
  379. using const_iterator = internal::circular_deque_const_iterator<T>;
  380. using reverse_iterator = std::reverse_iterator<iterator>;
  381. using const_reverse_iterator = std::reverse_iterator<const_iterator>;
  382. // ---------------------------------------------------------------------------
  383. // Constructor
  384. constexpr circular_deque() = default;
  385. // Constructs with |count| copies of |value| or default constructed version.
  386. circular_deque(size_type count) { resize(count); }
  387. circular_deque(size_type count, const T& value) { resize(count, value); }
  388. // Range constructor.
  389. template <class InputIterator>
  390. circular_deque(InputIterator first, InputIterator last) {
  391. assign(first, last);
  392. }
  393. // Copy/move.
  394. circular_deque(const circular_deque& other) : buffer_(other.size() + 1) {
  395. assign(other.begin(), other.end());
  396. }
  397. circular_deque(circular_deque&& other) noexcept
  398. : buffer_(std::move(other.buffer_)),
  399. begin_(other.begin_),
  400. end_(other.end_) {
  401. other.begin_ = 0;
  402. other.end_ = 0;
  403. }
  404. circular_deque(std::initializer_list<value_type> init) { assign(init); }
  405. ~circular_deque() { DestructRange(begin_, end_); }
  406. // ---------------------------------------------------------------------------
  407. // Assignments.
  408. //
  409. // All of these may invalidate iterators and references.
  410. circular_deque& operator=(const circular_deque& other) {
  411. if (&other == this)
  412. return *this;
  413. reserve(other.size());
  414. assign(other.begin(), other.end());
  415. return *this;
  416. }
  417. circular_deque& operator=(circular_deque&& other) noexcept {
  418. if (&other == this)
  419. return *this;
  420. // We're about to overwrite the buffer, so don't free it in clear to
  421. // avoid doing it twice.
  422. ClearRetainCapacity();
  423. buffer_ = std::move(other.buffer_);
  424. begin_ = other.begin_;
  425. end_ = other.end_;
  426. other.begin_ = 0;
  427. other.end_ = 0;
  428. IncrementGeneration();
  429. return *this;
  430. }
  431. circular_deque& operator=(std::initializer_list<value_type> ilist) {
  432. reserve(ilist.size());
  433. assign(std::begin(ilist), std::end(ilist));
  434. return *this;
  435. }
  436. void assign(size_type count, const value_type& value) {
  437. ClearRetainCapacity();
  438. reserve(count);
  439. for (size_t i = 0; i < count; i++)
  440. emplace_back(value);
  441. IncrementGeneration();
  442. }
  443. // This variant should be enabled only when InputIterator is an iterator.
  444. template <typename InputIterator>
  445. typename std::enable_if<::base::internal::is_iterator<InputIterator>::value,
  446. void>::type
  447. assign(InputIterator first, InputIterator last) {
  448. // Possible future enhancement, dispatch on iterator tag type. For forward
  449. // iterators we can use std::difference to preallocate the space required
  450. // and only do one copy.
  451. ClearRetainCapacity();
  452. for (; first != last; ++first)
  453. emplace_back(*first);
  454. IncrementGeneration();
  455. }
  456. void assign(std::initializer_list<value_type> value) {
  457. reserve(std::distance(value.begin(), value.end()));
  458. assign(value.begin(), value.end());
  459. }
  460. // ---------------------------------------------------------------------------
  461. // Accessors.
  462. //
  463. // Since this class assumes no exceptions, at() and operator[] are equivalent.
  464. const value_type& at(size_type i) const {
  465. DCHECK(i < size());
  466. size_t right_size = buffer_.capacity() - begin_;
  467. if (begin_ <= end_ || i < right_size)
  468. return buffer_[begin_ + i];
  469. return buffer_[i - right_size];
  470. }
  471. value_type& at(size_type i) {
  472. return const_cast<value_type&>(base::as_const(*this).at(i));
  473. }
  474. value_type& operator[](size_type i) {
  475. return const_cast<value_type&>(base::as_const(*this)[i]);
  476. }
  477. const value_type& operator[](size_type i) const { return at(i); }
  478. value_type& front() {
  479. DCHECK(!empty());
  480. return buffer_[begin_];
  481. }
  482. const value_type& front() const {
  483. DCHECK(!empty());
  484. return buffer_[begin_];
  485. }
  486. value_type& back() {
  487. DCHECK(!empty());
  488. return *(--end());
  489. }
  490. const value_type& back() const {
  491. DCHECK(!empty());
  492. return *(--end());
  493. }
  494. // ---------------------------------------------------------------------------
  495. // Iterators.
  496. iterator begin() { return iterator(this, begin_); }
  497. const_iterator begin() const { return const_iterator(this, begin_); }
  498. const_iterator cbegin() const { return const_iterator(this, begin_); }
  499. iterator end() { return iterator(this, end_); }
  500. const_iterator end() const { return const_iterator(this, end_); }
  501. const_iterator cend() const { return const_iterator(this, end_); }
  502. reverse_iterator rbegin() { return reverse_iterator(end()); }
  503. const_reverse_iterator rbegin() const {
  504. return const_reverse_iterator(end());
  505. }
  506. const_reverse_iterator crbegin() const { return rbegin(); }
  507. reverse_iterator rend() { return reverse_iterator(begin()); }
  508. const_reverse_iterator rend() const {
  509. return const_reverse_iterator(begin());
  510. }
  511. const_reverse_iterator crend() const { return rend(); }
  512. // ---------------------------------------------------------------------------
  513. // Memory management.
  514. // IMPORTANT NOTE ON reserve(...): This class implements auto-shrinking of
  515. // the buffer when elements are deleted and there is "too much" wasted space.
  516. // So if you call reserve() with a large size in anticipation of pushing many
  517. // elements, but pop an element before the queue is full, the capacity you
  518. // reserved may be lost.
  519. //
  520. // As a result, it's only worthwhile to call reserve() when you're adding
  521. // many things at once with no intermediate operations.
  522. void reserve(size_type new_capacity) {
  523. if (new_capacity > capacity())
  524. SetCapacityTo(new_capacity);
  525. }
  526. size_type capacity() const {
  527. // One item is wasted to indicate end().
  528. return buffer_.capacity() == 0 ? 0 : buffer_.capacity() - 1;
  529. }
  530. void shrink_to_fit() {
  531. if (empty()) {
  532. // Optimize empty case to really delete everything if there was
  533. // something.
  534. if (buffer_.capacity())
  535. buffer_ = VectorBuffer();
  536. } else {
  537. SetCapacityTo(size());
  538. }
  539. }
  540. // ---------------------------------------------------------------------------
  541. // Size management.
  542. // This will additionally reset the capacity() to 0.
  543. void clear() {
  544. // This can't resize(0) because that requires a default constructor to
  545. // compile, which not all contained classes may implement.
  546. ClearRetainCapacity();
  547. buffer_ = VectorBuffer();
  548. }
  549. bool empty() const { return begin_ == end_; }
  550. size_type size() const {
  551. if (begin_ <= end_)
  552. return end_ - begin_;
  553. return buffer_.capacity() - begin_ + end_;
  554. }
  555. // When reducing size, the elements are deleted from the end. When expanding
  556. // size, elements are added to the end with |value| or the default
  557. // constructed version. Even when using resize(count) to shrink, a default
  558. // constructor is required for the code to compile, even though it will not
  559. // be called.
  560. //
  561. // There are two versions rather than using a default value to avoid
  562. // creating a temporary when shrinking (when it's not needed). Plus if
  563. // the default constructor is desired when expanding usually just calling it
  564. // for each element is faster than making a default-constructed temporary and
  565. // copying it.
  566. void resize(size_type count) {
  567. // SEE BELOW VERSION if you change this. The code is mostly the same.
  568. if (count > size()) {
  569. // This could be slighly more efficient but expanding a queue with
  570. // identical elements is unusual and the extra computations of emplacing
  571. // one-by-one will typically be small relative to calling the constructor
  572. // for every item.
  573. ExpandCapacityIfNecessary(count - size());
  574. while (size() < count)
  575. emplace_back();
  576. } else if (count < size()) {
  577. size_t new_end = (begin_ + count) % buffer_.capacity();
  578. DestructRange(new_end, end_);
  579. end_ = new_end;
  580. ShrinkCapacityIfNecessary();
  581. }
  582. IncrementGeneration();
  583. }
  584. void resize(size_type count, const value_type& value) {
  585. // SEE ABOVE VERSION if you change this. The code is mostly the same.
  586. if (count > size()) {
  587. ExpandCapacityIfNecessary(count - size());
  588. while (size() < count)
  589. emplace_back(value);
  590. } else if (count < size()) {
  591. size_t new_end = (begin_ + count) % buffer_.capacity();
  592. DestructRange(new_end, end_);
  593. end_ = new_end;
  594. ShrinkCapacityIfNecessary();
  595. }
  596. IncrementGeneration();
  597. }
  598. // ---------------------------------------------------------------------------
  599. // Insert and erase.
  600. //
  601. // Insertion and deletion in the middle is O(n) and invalidates all existing
  602. // iterators.
  603. //
  604. // The implementation of insert isn't optimized as much as it could be. If
  605. // the insertion requires that the buffer be grown, it will first be grown
  606. // and everything moved, and then the items will be inserted, potentially
  607. // moving some items twice. This simplifies the implemntation substantially
  608. // and means less generated templatized code. Since this is an uncommon
  609. // operation for deques, and already relatively slow, it doesn't seem worth
  610. // the benefit to optimize this.
  611. void insert(const_iterator pos, size_type count, const T& value) {
  612. ValidateIterator(pos);
  613. // Optimize insert at the beginning.
  614. if (pos == begin()) {
  615. ExpandCapacityIfNecessary(count);
  616. for (size_t i = 0; i < count; i++)
  617. push_front(value);
  618. return;
  619. }
  620. iterator insert_cur(this, pos.index_);
  621. iterator insert_end;
  622. MakeRoomFor(count, &insert_cur, &insert_end);
  623. while (insert_cur < insert_end) {
  624. new (&buffer_[insert_cur.index_]) T(value);
  625. ++insert_cur;
  626. }
  627. IncrementGeneration();
  628. }
  629. // This enable_if keeps this call from getting confused with the (pos, count,
  630. // value) version when value is an integer.
  631. template <class InputIterator>
  632. typename std::enable_if<::base::internal::is_iterator<InputIterator>::value,
  633. void>::type
  634. insert(const_iterator pos, InputIterator first, InputIterator last) {
  635. ValidateIterator(pos);
  636. size_t inserted_items = std::distance(first, last);
  637. if (inserted_items == 0)
  638. return; // Can divide by 0 when doing modulo below, so return early.
  639. // Make a hole to copy the items into.
  640. iterator insert_cur;
  641. iterator insert_end;
  642. if (pos == begin()) {
  643. // Optimize insert at the beginning, nothing needs to be shifted and the
  644. // hole is the |inserted_items| block immediately before |begin_|.
  645. ExpandCapacityIfNecessary(inserted_items);
  646. insert_end = begin();
  647. begin_ =
  648. (begin_ + buffer_.capacity() - inserted_items) % buffer_.capacity();
  649. insert_cur = begin();
  650. } else {
  651. insert_cur = iterator(this, pos.index_);
  652. MakeRoomFor(inserted_items, &insert_cur, &insert_end);
  653. }
  654. // Copy the items.
  655. while (insert_cur < insert_end) {
  656. new (&buffer_[insert_cur.index_]) T(*first);
  657. ++insert_cur;
  658. ++first;
  659. }
  660. IncrementGeneration();
  661. }
  662. // These all return an iterator to the inserted item. Existing iterators will
  663. // be invalidated.
  664. iterator insert(const_iterator pos, const T& value) {
  665. return emplace(pos, value);
  666. }
  667. iterator insert(const_iterator pos, T&& value) {
  668. return emplace(pos, std::move(value));
  669. }
  670. template <class... Args>
  671. iterator emplace(const_iterator pos, Args&&... args) {
  672. ValidateIterator(pos);
  673. // Optimize insert at beginning which doesn't require shifting.
  674. if (pos == cbegin()) {
  675. emplace_front(std::forward<Args>(args)...);
  676. return begin();
  677. }
  678. // Do this before we make the new iterators we return.
  679. IncrementGeneration();
  680. iterator insert_begin(this, pos.index_);
  681. iterator insert_end;
  682. MakeRoomFor(1, &insert_begin, &insert_end);
  683. new (&buffer_[insert_begin.index_]) T(std::forward<Args>(args)...);
  684. return insert_begin;
  685. }
  686. // Calling erase() won't automatically resize the buffer smaller like resize
  687. // or the pop functions. Erase is slow and relatively uncommon, and for
  688. // normal deque usage a pop will normally be done on a regular basis that
  689. // will prevent excessive buffer usage over long periods of time. It's not
  690. // worth having the extra code for every template instantiation of erase()
  691. // to resize capacity downward to a new buffer.
  692. iterator erase(const_iterator pos) { return erase(pos, pos + 1); }
  693. iterator erase(const_iterator first, const_iterator last) {
  694. ValidateIterator(first);
  695. ValidateIterator(last);
  696. IncrementGeneration();
  697. // First, call the destructor on the deleted items.
  698. if (first.index_ == last.index_) {
  699. // Nothing deleted. Need to return early to avoid falling through to
  700. // moving items on top of themselves.
  701. return iterator(this, first.index_);
  702. } else if (first.index_ < last.index_) {
  703. // Contiguous range.
  704. buffer_.DestructRange(&buffer_[first.index_], &buffer_[last.index_]);
  705. } else {
  706. // Deleted range wraps around.
  707. buffer_.DestructRange(&buffer_[first.index_],
  708. &buffer_[buffer_.capacity()]);
  709. buffer_.DestructRange(&buffer_[0], &buffer_[last.index_]);
  710. }
  711. if (first.index_ == begin_) {
  712. // This deletion is from the beginning. Nothing needs to be copied, only
  713. // begin_ needs to be updated.
  714. begin_ = last.index_;
  715. return iterator(this, last.index_);
  716. }
  717. // In an erase operation, the shifted items all move logically to the left,
  718. // so move them from left-to-right.
  719. iterator move_src(this, last.index_);
  720. iterator move_src_end = end();
  721. iterator move_dest(this, first.index_);
  722. for (; move_src < move_src_end; move_src++, move_dest++) {
  723. buffer_.MoveRange(&buffer_[move_src.index_],
  724. &buffer_[move_src.index_ + 1],
  725. &buffer_[move_dest.index_]);
  726. }
  727. end_ = move_dest.index_;
  728. // Since we did not reallocate and only changed things after the erase
  729. // element(s), the input iterator's index points to the thing following the
  730. // deletion.
  731. return iterator(this, first.index_);
  732. }
  733. // ---------------------------------------------------------------------------
  734. // Begin/end operations.
  735. void push_front(const T& value) { emplace_front(value); }
  736. void push_front(T&& value) { emplace_front(std::move(value)); }
  737. void push_back(const T& value) { emplace_back(value); }
  738. void push_back(T&& value) { emplace_back(std::move(value)); }
  739. template <class... Args>
  740. reference emplace_front(Args&&... args) {
  741. ExpandCapacityIfNecessary(1);
  742. if (begin_ == 0)
  743. begin_ = buffer_.capacity() - 1;
  744. else
  745. begin_--;
  746. IncrementGeneration();
  747. new (&buffer_[begin_]) T(std::forward<Args>(args)...);
  748. return front();
  749. }
  750. template <class... Args>
  751. reference emplace_back(Args&&... args) {
  752. ExpandCapacityIfNecessary(1);
  753. new (&buffer_[end_]) T(std::forward<Args>(args)...);
  754. if (end_ == buffer_.capacity() - 1)
  755. end_ = 0;
  756. else
  757. end_++;
  758. IncrementGeneration();
  759. return back();
  760. }
  761. void pop_front() {
  762. DCHECK(size());
  763. buffer_.DestructRange(&buffer_[begin_], &buffer_[begin_ + 1]);
  764. begin_++;
  765. if (begin_ == buffer_.capacity())
  766. begin_ = 0;
  767. ShrinkCapacityIfNecessary();
  768. // Technically popping will not invalidate any iterators since the
  769. // underlying buffer will be stable. But in the future we may want to add a
  770. // feature that resizes the buffer smaller if there is too much wasted
  771. // space. This ensures we can make such a change safely.
  772. IncrementGeneration();
  773. }
  774. void pop_back() {
  775. DCHECK(size());
  776. if (end_ == 0)
  777. end_ = buffer_.capacity() - 1;
  778. else
  779. end_--;
  780. buffer_.DestructRange(&buffer_[end_], &buffer_[end_ + 1]);
  781. ShrinkCapacityIfNecessary();
  782. // See pop_front comment about why this is here.
  783. IncrementGeneration();
  784. }
  785. // ---------------------------------------------------------------------------
  786. // General operations.
  787. void swap(circular_deque& other) {
  788. std::swap(buffer_, other.buffer_);
  789. std::swap(begin_, other.begin_);
  790. std::swap(end_, other.end_);
  791. IncrementGeneration();
  792. }
  793. friend void swap(circular_deque& lhs, circular_deque& rhs) { lhs.swap(rhs); }
  794. private:
  795. friend internal::circular_deque_iterator<T>;
  796. friend internal::circular_deque_const_iterator<T>;
  797. // Moves the items in the given circular buffer to the current one. The
  798. // source is moved from so will become invalid. The destination buffer must
  799. // have already been allocated with enough size.
  800. static void MoveBuffer(VectorBuffer& from_buf,
  801. size_t from_begin,
  802. size_t from_end,
  803. VectorBuffer* to_buf,
  804. size_t* to_begin,
  805. size_t* to_end) {
  806. size_t from_capacity = from_buf.capacity();
  807. *to_begin = 0;
  808. if (from_begin < from_end) {
  809. // Contiguous.
  810. from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_end],
  811. to_buf->begin());
  812. *to_end = from_end - from_begin;
  813. } else if (from_begin > from_end) {
  814. // Discontiguous, copy the right side to the beginning of the new buffer.
  815. from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_capacity],
  816. to_buf->begin());
  817. size_t right_size = from_capacity - from_begin;
  818. // Append the left side.
  819. from_buf.MoveRange(&from_buf[0], &from_buf[from_end],
  820. &(*to_buf)[right_size]);
  821. *to_end = right_size + from_end;
  822. } else {
  823. // No items.
  824. *to_end = 0;
  825. }
  826. }
  827. // Expands the buffer size. This assumes the size is larger than the
  828. // number of elements in the vector (it won't call delete on anything).
  829. void SetCapacityTo(size_t new_capacity) {
  830. // Use the capacity + 1 as the internal buffer size to differentiate
  831. // empty and full (see definition of buffer_ below).
  832. VectorBuffer new_buffer(new_capacity + 1);
  833. MoveBuffer(buffer_, begin_, end_, &new_buffer, &begin_, &end_);
  834. buffer_ = std::move(new_buffer);
  835. }
  836. void ExpandCapacityIfNecessary(size_t additional_elts) {
  837. size_t min_new_capacity = size() + additional_elts;
  838. if (capacity() >= min_new_capacity)
  839. return; // Already enough room.
  840. min_new_capacity =
  841. std::max(min_new_capacity, internal::kCircularBufferInitialCapacity);
  842. // std::vector always grows by at least 50%. WTF::Deque grows by at least
  843. // 25%. We expect queue workloads to generally stay at a similar size and
  844. // grow less than a vector might, so use 25%.
  845. size_t new_capacity =
  846. std::max(min_new_capacity, capacity() + capacity() / 4);
  847. SetCapacityTo(new_capacity);
  848. }
  849. void ShrinkCapacityIfNecessary() {
  850. // Don't auto-shrink below this size.
  851. if (capacity() <= internal::kCircularBufferInitialCapacity)
  852. return;
  853. // Shrink when 100% of the size() is wasted.
  854. size_t sz = size();
  855. size_t empty_spaces = capacity() - sz;
  856. if (empty_spaces < sz)
  857. return;
  858. // Leave 1/4 the size as free capacity, not going below the initial
  859. // capacity.
  860. size_t new_capacity =
  861. std::max(internal::kCircularBufferInitialCapacity, sz + sz / 4);
  862. if (new_capacity < capacity()) {
  863. // Count extra item to convert to internal capacity.
  864. SetCapacityTo(new_capacity);
  865. }
  866. }
  867. // Backend for clear() but does not resize the internal buffer.
  868. void ClearRetainCapacity() {
  869. // This can't resize(0) because that requires a default constructor to
  870. // compile, which not all contained classes may implement.
  871. DestructRange(begin_, end_);
  872. begin_ = 0;
  873. end_ = 0;
  874. IncrementGeneration();
  875. }
  876. // Calls destructors for the given begin->end indices. The indices may wrap
  877. // around. The buffer is not resized, and the begin_ and end_ members are
  878. // not changed.
  879. void DestructRange(size_t begin, size_t end) {
  880. if (end == begin) {
  881. return;
  882. } else if (end > begin) {
  883. buffer_.DestructRange(&buffer_[begin], &buffer_[end]);
  884. } else {
  885. buffer_.DestructRange(&buffer_[begin], &buffer_[buffer_.capacity()]);
  886. buffer_.DestructRange(&buffer_[0], &buffer_[end]);
  887. }
  888. }
  889. // Makes room for |count| items starting at |*insert_begin|. Since iterators
  890. // are not stable across buffer resizes, |*insert_begin| will be updated to
  891. // point to the beginning of the newly opened position in the new array (it's
  892. // in/out), and the end of the newly opened position (it's out-only).
  893. void MakeRoomFor(size_t count, iterator* insert_begin, iterator* insert_end) {
  894. if (count == 0) {
  895. *insert_end = *insert_begin;
  896. return;
  897. }
  898. // The offset from the beginning will be stable across reallocations.
  899. size_t begin_offset = insert_begin->OffsetFromBegin();
  900. ExpandCapacityIfNecessary(count);
  901. insert_begin->index_ = (begin_ + begin_offset) % buffer_.capacity();
  902. *insert_end =
  903. iterator(this, (insert_begin->index_ + count) % buffer_.capacity());
  904. // Update the new end and prepare the iterators for copying.
  905. iterator src = end();
  906. end_ = (end_ + count) % buffer_.capacity();
  907. iterator dest = end();
  908. // Move the elements. This will always involve shifting logically to the
  909. // right, so move in a right-to-left order.
  910. while (true) {
  911. if (src == *insert_begin)
  912. break;
  913. --src;
  914. --dest;
  915. buffer_.MoveRange(&buffer_[src.index_], &buffer_[src.index_ + 1],
  916. &buffer_[dest.index_]);
  917. }
  918. }
  919. #if DCHECK_IS_ON()
  920. // Asserts the given index is dereferencable. The index is an index into the
  921. // buffer, not an index used by operator[] or at() which will be offsets from
  922. // begin.
  923. void CheckValidIndex(size_t i) const {
  924. if (begin_ <= end_)
  925. DCHECK(i >= begin_ && i < end_);
  926. else
  927. DCHECK((i >= begin_ && i < buffer_.capacity()) || i < end_);
  928. }
  929. // Asserts the given index is either dereferencable or points to end().
  930. void CheckValidIndexOrEnd(size_t i) const {
  931. if (i != end_)
  932. CheckValidIndex(i);
  933. }
  934. void ValidateIterator(const const_iterator& i) const {
  935. DCHECK(i.parent_deque_ == this);
  936. i.CheckUnstableUsage();
  937. }
  938. // See generation_ below.
  939. void IncrementGeneration() { generation_++; }
  940. #else
  941. // No-op versions of these functions for release builds.
  942. void CheckValidIndex(size_t) const {}
  943. void CheckValidIndexOrEnd(size_t) const {}
  944. void ValidateIterator(const const_iterator& i) const {}
  945. void IncrementGeneration() {}
  946. #endif
  947. // Danger, the buffer_.capacity() is the "internal capacity" which is
  948. // capacity() + 1 since there is an extra item to indicate the end. Otherwise
  949. // being completely empty and completely full are indistinguishable (begin ==
  950. // end). We could add a separate flag to avoid it, but that adds significant
  951. // extra complexity since every computation will have to check for it. Always
  952. // keeping one extra unused element in the buffer makes iterator computations
  953. // much simpler.
  954. //
  955. // Container internal code will want to use buffer_.capacity() for offset
  956. // computations rather than capacity().
  957. VectorBuffer buffer_;
  958. size_type begin_ = 0;
  959. size_type end_ = 0;
  960. #if DCHECK_IS_ON()
  961. // Incremented every time a modification is made that could affect iterator
  962. // invalidations.
  963. uint64_t generation_ = 0;
  964. #endif
  965. };
  966. // Implementations of base::Erase[If] (see base/stl_util.h).
  967. template <class T, class Value>
  968. size_t Erase(circular_deque<T>& container, const Value& value) {
  969. auto it = std::remove(container.begin(), container.end(), value);
  970. size_t removed = std::distance(it, container.end());
  971. container.erase(it, container.end());
  972. return removed;
  973. }
  974. template <class T, class Predicate>
  975. size_t EraseIf(circular_deque<T>& container, Predicate pred) {
  976. auto it = std::remove_if(container.begin(), container.end(), pred);
  977. size_t removed = std::distance(it, container.end());
  978. container.erase(it, container.end());
  979. return removed;
  980. }
  981. } // namespace base
  982. #endif // BASE_CONTAINERS_CIRCULAR_DEQUE_H_