unlimited_storage.hpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636
  1. // Copyright 2015-2019 Hans Dembinski
  2. // Copyright 2019 Glen Joseph Fernandes (glenjofe@gmail.com)
  3. //
  4. // Distributed under the Boost Software License, Version 1.0.
  5. // (See accompanying file LICENSE_1_0.txt
  6. // or copy at http://www.boost.org/LICENSE_1_0.txt)
  7. #ifndef BOOST_HISTOGRAM_UNLIMTED_STORAGE_HPP
  8. #define BOOST_HISTOGRAM_UNLIMTED_STORAGE_HPP
  9. #include <algorithm>
  10. #include <boost/core/alloc_construct.hpp>
  11. #include <boost/core/exchange.hpp>
  12. #include <boost/core/nvp.hpp>
  13. #include <boost/histogram/detail/array_wrapper.hpp>
  14. #include <boost/histogram/detail/iterator_adaptor.hpp>
  15. #include <boost/histogram/detail/large_int.hpp>
  16. #include <boost/histogram/detail/operators.hpp>
  17. #include <boost/histogram/detail/safe_comparison.hpp>
  18. #include <boost/histogram/fwd.hpp>
  19. #include <boost/mp11/algorithm.hpp>
  20. #include <boost/mp11/list.hpp>
  21. #include <boost/mp11/utility.hpp>
  22. #include <cassert>
  23. #include <cmath>
  24. #include <cstdint>
  25. #include <functional>
  26. #include <iterator>
  27. #include <memory>
  28. #include <type_traits>
  29. namespace boost {
  30. namespace histogram {
  31. namespace detail {
  32. template <class T>
  33. struct is_large_int : std::false_type {};
  34. template <class A>
  35. struct is_large_int<large_int<A>> : std::true_type {};
  36. template <class T, class ReturnType>
  37. using if_arithmetic_or_large_int =
  38. std::enable_if_t<(std::is_arithmetic<T>::value || is_large_int<T>::value),
  39. ReturnType>;
  40. template <class L, class T>
  41. using next_type = mp11::mp_at_c<L, (mp11::mp_find<L, T>::value + 1)>;
  42. template <class Allocator>
  43. class construct_guard {
  44. public:
  45. using pointer = typename std::allocator_traits<Allocator>::pointer;
  46. construct_guard(Allocator& a, pointer p, std::size_t n) noexcept
  47. : a_(a), p_(p), n_(n) {}
  48. ~construct_guard() {
  49. if (p_) { a_.deallocate(p_, n_); }
  50. }
  51. void release() { p_ = pointer(); }
  52. construct_guard(const construct_guard&) = delete;
  53. construct_guard& operator=(const construct_guard&) = delete;
  54. private:
  55. Allocator& a_;
  56. pointer p_;
  57. std::size_t n_;
  58. };
  59. template <class Allocator>
  60. void* buffer_create(Allocator& a, std::size_t n) {
  61. auto ptr = a.allocate(n); // may throw
  62. static_assert(std::is_trivially_copyable<decltype(ptr)>::value,
  63. "ptr must be trivially copyable");
  64. construct_guard<Allocator> guard(a, ptr, n);
  65. boost::alloc_construct_n(a, ptr, n);
  66. guard.release();
  67. return static_cast<void*>(ptr);
  68. }
  69. template <class Allocator, class Iterator>
  70. auto buffer_create(Allocator& a, std::size_t n, Iterator iter) {
  71. assert(n > 0u);
  72. auto ptr = a.allocate(n); // may throw
  73. static_assert(std::is_trivially_copyable<decltype(ptr)>::value,
  74. "ptr must be trivially copyable");
  75. construct_guard<Allocator> guard(a, ptr, n);
  76. using T = typename std::allocator_traits<Allocator>::value_type;
  77. struct casting_iterator {
  78. void operator++() noexcept { ++iter_; }
  79. T operator*() noexcept {
  80. return static_cast<T>(*iter_);
  81. } // silence conversion warnings
  82. Iterator iter_;
  83. };
  84. boost::alloc_construct_n(a, ptr, n, casting_iterator{iter});
  85. guard.release();
  86. return ptr;
  87. }
  88. template <class Allocator>
  89. void buffer_destroy(Allocator& a, typename std::allocator_traits<Allocator>::pointer p,
  90. std::size_t n) {
  91. assert(p);
  92. assert(n > 0u);
  93. boost::alloc_destroy_n(a, p, n);
  94. a.deallocate(p, n);
  95. }
  96. } // namespace detail
  97. /**
  98. Memory-efficient storage for integral counters which cannot overflow.
  99. This storage provides a no-overflow-guarantee if the counters are incremented with
  100. integer weights. It maintains a contiguous array of elemental counters, one for each
  101. cell. If an operation is requested which would overflow a counter, the array is
  102. replaced with another of a wider integral type, then the operation is executed. The
  103. storage uses integers of 8, 16, 32, 64 bits, and then switches to a multiprecision
  104. integral type, similar to those in
  105. [Boost.Multiprecision](https://www.boost.org/doc/libs/develop/libs/multiprecision/doc/html/index.html).
  106. A scaling operation or adding a floating point number triggers a conversion of the
  107. elemental counters into doubles, which voids the no-overflow-guarantee.
  108. */
  109. template <class Allocator>
  110. class unlimited_storage {
  111. static_assert(
  112. std::is_same<typename std::allocator_traits<Allocator>::pointer,
  113. typename std::allocator_traits<Allocator>::value_type*>::value,
  114. "unlimited_storage requires allocator with trivial pointer type");
  115. using U8 = std::uint8_t;
  116. using U16 = std::uint16_t;
  117. using U32 = std::uint32_t;
  118. using U64 = std::uint64_t;
  119. public:
  120. static constexpr bool has_threading_support = false;
  121. using allocator_type = Allocator;
  122. using value_type = double;
  123. using large_int = detail::large_int<
  124. typename std::allocator_traits<allocator_type>::template rebind_alloc<U64>>;
  125. struct buffer_type {
  126. // cannot be moved outside of scope of unlimited_storage, large_int is dependent type
  127. using types = mp11::mp_list<U8, U16, U32, U64, large_int, double>;
  128. template <class T>
  129. static constexpr unsigned type_index() noexcept {
  130. return static_cast<unsigned>(mp11::mp_find<types, T>::value);
  131. }
  132. template <class F, class... Ts>
  133. decltype(auto) visit(F&& f, Ts&&... ts) const {
  134. // this is intentionally not a switch, the if-chain is faster in benchmarks
  135. if (type == type_index<U8>())
  136. return f(static_cast<U8*>(ptr), std::forward<Ts>(ts)...);
  137. if (type == type_index<U16>())
  138. return f(static_cast<U16*>(ptr), std::forward<Ts>(ts)...);
  139. if (type == type_index<U32>())
  140. return f(static_cast<U32*>(ptr), std::forward<Ts>(ts)...);
  141. if (type == type_index<U64>())
  142. return f(static_cast<U64*>(ptr), std::forward<Ts>(ts)...);
  143. if (type == type_index<large_int>())
  144. return f(static_cast<large_int*>(ptr), std::forward<Ts>(ts)...);
  145. return f(static_cast<double*>(ptr), std::forward<Ts>(ts)...);
  146. }
  147. buffer_type(const allocator_type& a = {}) : alloc(a) {}
  148. buffer_type(buffer_type&& o) noexcept
  149. : alloc(std::move(o.alloc))
  150. , size(boost::exchange(o.size, 0))
  151. , type(boost::exchange(o.type, 0))
  152. , ptr(boost::exchange(o.ptr, nullptr)) {}
  153. buffer_type& operator=(buffer_type&& o) noexcept {
  154. using std::swap;
  155. swap(alloc, o.alloc);
  156. swap(size, o.size);
  157. swap(type, o.type);
  158. swap(ptr, o.ptr);
  159. return *this;
  160. }
  161. buffer_type(const buffer_type& x) : alloc(x.alloc) {
  162. x.visit([this, n = x.size](const auto* xp) {
  163. using T = std::decay_t<decltype(*xp)>;
  164. this->template make<T>(n, xp);
  165. });
  166. }
  167. buffer_type& operator=(const buffer_type& o) {
  168. *this = buffer_type(o);
  169. return *this;
  170. }
  171. ~buffer_type() noexcept { destroy(); }
  172. void destroy() noexcept {
  173. assert((ptr == nullptr) == (size == 0));
  174. if (ptr == nullptr) return;
  175. visit([this](auto* p) {
  176. using T = std::decay_t<decltype(*p)>;
  177. using alloc_type =
  178. typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
  179. alloc_type a(alloc); // rebind allocator
  180. detail::buffer_destroy(a, p, this->size);
  181. });
  182. size = 0;
  183. type = 0;
  184. ptr = nullptr;
  185. }
  186. template <class T>
  187. void make(std::size_t n) {
  188. // note: order of commands is to not leave buffer in invalid state upon throw
  189. destroy();
  190. if (n > 0) {
  191. // rebind allocator
  192. using alloc_type =
  193. typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
  194. alloc_type a(alloc);
  195. ptr = detail::buffer_create(a, n); // may throw
  196. }
  197. size = n;
  198. type = type_index<T>();
  199. }
  200. template <class T, class U>
  201. void make(std::size_t n, U iter) {
  202. // note: iter may be current ptr, so create new buffer before deleting old buffer
  203. void* new_ptr = nullptr;
  204. const auto new_type = type_index<T>();
  205. if (n > 0) {
  206. // rebind allocator
  207. using alloc_type =
  208. typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
  209. alloc_type a(alloc);
  210. new_ptr = detail::buffer_create(a, n, iter); // may throw
  211. }
  212. destroy();
  213. size = n;
  214. type = new_type;
  215. ptr = new_ptr;
  216. }
  217. allocator_type alloc;
  218. std::size_t size = 0;
  219. unsigned type = 0;
  220. mutable void* ptr = nullptr;
  221. };
  222. class reference; // forward declare to make friend of const_reference
  223. /// implementation detail
  224. class const_reference
  225. : detail::partially_ordered<const_reference, const_reference, void> {
  226. public:
  227. const_reference(buffer_type& b, std::size_t i) noexcept : bref_(b), idx_(i) {
  228. assert(idx_ < bref_.size);
  229. }
  230. const_reference(const const_reference&) noexcept = default;
  231. // no assignment for const_references
  232. const_reference& operator=(const const_reference&) = delete;
  233. const_reference& operator=(const_reference&&) = delete;
  234. operator double() const noexcept {
  235. return bref_.visit(
  236. [this](const auto* p) { return static_cast<double>(p[this->idx_]); });
  237. }
  238. bool operator<(const const_reference& o) const noexcept {
  239. return apply_binary<detail::safe_less>(o);
  240. }
  241. bool operator==(const const_reference& o) const noexcept {
  242. return apply_binary<detail::safe_equal>(o);
  243. }
  244. template <class U>
  245. detail::if_arithmetic_or_large_int<U, bool> operator<(const U& o) const noexcept {
  246. return apply_binary<detail::safe_less>(o);
  247. }
  248. template <class U>
  249. detail::if_arithmetic_or_large_int<U, bool> operator>(const U& o) const noexcept {
  250. return apply_binary<detail::safe_greater>(o);
  251. }
  252. template <class U>
  253. detail::if_arithmetic_or_large_int<U, bool> operator==(const U& o) const noexcept {
  254. return apply_binary<detail::safe_equal>(o);
  255. }
  256. private:
  257. template <class Binary>
  258. bool apply_binary(const const_reference& x) const noexcept {
  259. return x.bref_.visit([this, ix = x.idx_](const auto* xp) {
  260. return this->apply_binary<Binary>(xp[ix]);
  261. });
  262. }
  263. template <class Binary, class U>
  264. bool apply_binary(const U& x) const noexcept {
  265. return bref_.visit([i = idx_, &x](const auto* p) { return Binary()(p[i], x); });
  266. }
  267. protected:
  268. buffer_type& bref_;
  269. std::size_t idx_;
  270. friend class reference;
  271. };
  272. /// implementation detail
  273. class reference : public const_reference,
  274. public detail::partially_ordered<reference, reference, void> {
  275. public:
  276. reference(buffer_type& b, std::size_t i) noexcept : const_reference(b, i) {}
  277. // references do copy-construct
  278. reference(const reference& x) noexcept = default;
  279. // references do not rebind, assign through
  280. reference& operator=(const reference& x) {
  281. return operator=(static_cast<const_reference>(x));
  282. }
  283. // references do not rebind, assign through
  284. reference& operator=(const const_reference& x) {
  285. // safe for self-assignment, assigning matching type doesn't invalide buffer
  286. x.bref_.visit([this, ix = x.idx_](const auto* xp) { this->operator=(xp[ix]); });
  287. return *this;
  288. }
  289. template <class U>
  290. detail::if_arithmetic_or_large_int<U, reference&> operator=(const U& x) {
  291. this->bref_.visit([this, &x](auto* p) {
  292. // gcc-8 optimizes the expression `p[this->idx_] = 0` away even at -O0,
  293. // so we merge it into the next line which is properly counted
  294. adder()((p[this->idx_] = 0, p), this->bref_, this->idx_, x);
  295. });
  296. return *this;
  297. }
  298. bool operator<(const reference& o) const noexcept {
  299. return const_reference::operator<(o);
  300. }
  301. bool operator==(const reference& o) const noexcept {
  302. return const_reference::operator==(o);
  303. }
  304. template <class U>
  305. detail::if_arithmetic_or_large_int<U, bool> operator<(const U& o) const noexcept {
  306. return const_reference::operator<(o);
  307. }
  308. template <class U>
  309. detail::if_arithmetic_or_large_int<U, bool> operator>(const U& o) const noexcept {
  310. return const_reference::operator>(o);
  311. }
  312. template <class U>
  313. detail::if_arithmetic_or_large_int<U, bool> operator==(const U& o) const noexcept {
  314. return const_reference::operator==(o);
  315. }
  316. reference& operator+=(const const_reference& x) {
  317. x.bref_.visit([this, ix = x.idx_](const auto* xp) { this->operator+=(xp[ix]); });
  318. return *this;
  319. }
  320. template <class U>
  321. detail::if_arithmetic_or_large_int<U, reference&> operator+=(const U& x) {
  322. this->bref_.visit(adder(), this->bref_, this->idx_, x);
  323. return *this;
  324. }
  325. reference& operator-=(const double x) { return operator+=(-x); }
  326. reference& operator*=(const double x) {
  327. this->bref_.visit(multiplier(), this->bref_, this->idx_, x);
  328. return *this;
  329. }
  330. reference& operator/=(const double x) { return operator*=(1.0 / x); }
  331. reference& operator++() {
  332. this->bref_.visit(incrementor(), this->bref_, this->idx_);
  333. return *this;
  334. }
  335. };
  336. private:
  337. template <class Value, class Reference>
  338. class iterator_impl : public detail::iterator_adaptor<iterator_impl<Value, Reference>,
  339. std::size_t, Reference, Value> {
  340. public:
  341. iterator_impl() = default;
  342. template <class V, class R>
  343. iterator_impl(const iterator_impl<V, R>& it)
  344. : iterator_impl::iterator_adaptor_(it.base()), buffer_(it.buffer_) {}
  345. iterator_impl(buffer_type* b, std::size_t i) noexcept
  346. : iterator_impl::iterator_adaptor_(i), buffer_(b) {}
  347. Reference operator*() const noexcept { return {*buffer_, this->base()}; }
  348. template <class V, class R>
  349. friend class iterator_impl;
  350. private:
  351. mutable buffer_type* buffer_ = nullptr;
  352. };
  353. public:
  354. using const_iterator = iterator_impl<const value_type, const_reference>;
  355. using iterator = iterator_impl<value_type, reference>;
  356. explicit unlimited_storage(const allocator_type& a = {}) : buffer_(a) {}
  357. unlimited_storage(const unlimited_storage&) = default;
  358. unlimited_storage& operator=(const unlimited_storage&) = default;
  359. unlimited_storage(unlimited_storage&&) = default;
  360. unlimited_storage& operator=(unlimited_storage&&) = default;
  361. // TODO
  362. // template <class Allocator>
  363. // unlimited_storage(const unlimited_storage<Allocator>& s)
  364. template <class Iterable, class = detail::requires_iterable<Iterable>>
  365. explicit unlimited_storage(const Iterable& s) {
  366. using std::begin;
  367. using std::end;
  368. auto s_begin = begin(s);
  369. auto s_end = end(s);
  370. using V = typename std::iterator_traits<decltype(begin(s))>::value_type;
  371. // must be non-const to avoid msvc warning about if constexpr
  372. auto ti = buffer_type::template type_index<V>();
  373. auto nt = mp11::mp_size<typename buffer_type::types>::value;
  374. const std::size_t size = static_cast<std::size_t>(std::distance(s_begin, s_end));
  375. if (ti < nt)
  376. buffer_.template make<V>(size, s_begin);
  377. else
  378. buffer_.template make<double>(size, s_begin);
  379. }
  380. template <class Iterable, class = detail::requires_iterable<Iterable>>
  381. unlimited_storage& operator=(const Iterable& s) {
  382. *this = unlimited_storage(s);
  383. return *this;
  384. }
  385. allocator_type get_allocator() const { return buffer_.alloc; }
  386. void reset(std::size_t n) { buffer_.template make<U8>(n); }
  387. std::size_t size() const noexcept { return buffer_.size; }
  388. reference operator[](std::size_t i) noexcept { return {buffer_, i}; }
  389. const_reference operator[](std::size_t i) const noexcept { return {buffer_, i}; }
  390. bool operator==(const unlimited_storage& x) const noexcept {
  391. if (size() != x.size()) return false;
  392. return buffer_.visit([&x](const auto* p) {
  393. return x.buffer_.visit([p, n = x.size()](const auto* xp) {
  394. return std::equal(p, p + n, xp, detail::safe_equal{});
  395. });
  396. });
  397. }
  398. template <class Iterable>
  399. bool operator==(const Iterable& iterable) const {
  400. if (size() != iterable.size()) return false;
  401. return buffer_.visit([&iterable](const auto* p) {
  402. return std::equal(p, p + iterable.size(), std::begin(iterable),
  403. detail::safe_equal{});
  404. });
  405. }
  406. unlimited_storage& operator*=(const double x) {
  407. buffer_.visit(multiplier(), buffer_, x);
  408. return *this;
  409. }
  410. iterator begin() noexcept { return {&buffer_, 0}; }
  411. iterator end() noexcept { return {&buffer_, size()}; }
  412. const_iterator begin() const noexcept { return {&buffer_, 0}; }
  413. const_iterator end() const noexcept { return {&buffer_, size()}; }
  414. /// implementation detail; used by unit tests, not part of generic storage interface
  415. template <class T>
  416. unlimited_storage(std::size_t s, const T* p, const allocator_type& a = {})
  417. : buffer_(std::move(a)) {
  418. buffer_.template make<T>(s, p);
  419. }
  420. template <class Archive>
  421. void serialize(Archive& ar, unsigned /* version */) {
  422. if (Archive::is_loading::value) {
  423. buffer_type tmp(buffer_.alloc);
  424. std::size_t size;
  425. ar& make_nvp("type", tmp.type);
  426. ar& make_nvp("size", size);
  427. tmp.visit([this, size](auto* tp) {
  428. assert(tp == nullptr);
  429. using T = std::decay_t<decltype(*tp)>;
  430. buffer_.template make<T>(size);
  431. });
  432. } else {
  433. ar& make_nvp("type", buffer_.type);
  434. ar& make_nvp("size", buffer_.size);
  435. }
  436. buffer_.visit([this, &ar](auto* tp) {
  437. auto w = detail::make_array_wrapper(tp, this->buffer_.size);
  438. ar& make_nvp("buffer", w);
  439. });
  440. }
  441. private:
  442. struct incrementor {
  443. template <class T>
  444. void operator()(T* tp, buffer_type& b, std::size_t i) {
  445. assert(tp && i < b.size);
  446. if (!detail::safe_increment(tp[i])) {
  447. using U = detail::next_type<typename buffer_type::types, T>;
  448. b.template make<U>(b.size, tp);
  449. ++static_cast<U*>(b.ptr)[i];
  450. }
  451. }
  452. void operator()(large_int* tp, buffer_type&, std::size_t i) { ++tp[i]; }
  453. void operator()(double* tp, buffer_type&, std::size_t i) { ++tp[i]; }
  454. };
  455. struct adder {
  456. template <class U>
  457. void operator()(double* tp, buffer_type&, std::size_t i, const U& x) {
  458. tp[i] += static_cast<double>(x);
  459. }
  460. void operator()(large_int* tp, buffer_type&, std::size_t i, const large_int& x) {
  461. tp[i] += x; // potentially adding large_int to itself is safe
  462. }
  463. template <class T, class U>
  464. void operator()(T* tp, buffer_type& b, std::size_t i, const U& x) {
  465. is_x_integral(std::is_integral<U>{}, tp, b, i, x);
  466. }
  467. template <class T, class U>
  468. void is_x_integral(std::false_type, T* tp, buffer_type& b, std::size_t i,
  469. const U& x) {
  470. // x could be reference to buffer we manipulate, make copy before changing buffer
  471. const auto v = static_cast<double>(x);
  472. b.template make<double>(b.size, tp);
  473. operator()(static_cast<double*>(b.ptr), b, i, v);
  474. }
  475. template <class T>
  476. void is_x_integral(std::false_type, T* tp, buffer_type& b, std::size_t i,
  477. const large_int& x) {
  478. // x could be reference to buffer we manipulate, make copy before changing buffer
  479. const auto v = static_cast<large_int>(x);
  480. b.template make<large_int>(b.size, tp);
  481. operator()(static_cast<large_int*>(b.ptr), b, i, v);
  482. }
  483. template <class T, class U>
  484. void is_x_integral(std::true_type, T* tp, buffer_type& b, std::size_t i, const U& x) {
  485. is_x_unsigned(std::is_unsigned<U>{}, tp, b, i, x);
  486. }
  487. template <class T, class U>
  488. void is_x_unsigned(std::false_type, T* tp, buffer_type& b, std::size_t i,
  489. const U& x) {
  490. if (x >= 0)
  491. is_x_unsigned(std::true_type{}, tp, b, i, detail::make_unsigned(x));
  492. else
  493. is_x_integral(std::false_type{}, tp, b, i, static_cast<double>(x));
  494. }
  495. template <class T, class U>
  496. void is_x_unsigned(std::true_type, T* tp, buffer_type& b, std::size_t i, const U& x) {
  497. if (detail::safe_radd(tp[i], x)) return;
  498. // x could be reference to buffer we manipulate, need to convert to value
  499. const auto y = x;
  500. using TN = detail::next_type<typename buffer_type::types, T>;
  501. b.template make<TN>(b.size, tp);
  502. is_x_unsigned(std::true_type{}, static_cast<TN*>(b.ptr), b, i, y);
  503. }
  504. template <class U>
  505. void is_x_unsigned(std::true_type, large_int* tp, buffer_type&, std::size_t i,
  506. const U& x) {
  507. tp[i] += x;
  508. }
  509. };
  510. struct multiplier {
  511. template <class T>
  512. void operator()(T* tp, buffer_type& b, const double x) {
  513. // potential lossy conversion that cannot be avoided
  514. b.template make<double>(b.size, tp);
  515. operator()(static_cast<double*>(b.ptr), b, x);
  516. }
  517. void operator()(double* tp, buffer_type& b, const double x) {
  518. for (auto end = tp + b.size; tp != end; ++tp) *tp *= x;
  519. }
  520. template <class T>
  521. void operator()(T* tp, buffer_type& b, std::size_t i, const double x) {
  522. b.template make<double>(b.size, tp);
  523. operator()(static_cast<double*>(b.ptr), b, i, x);
  524. }
  525. void operator()(double* tp, buffer_type&, std::size_t i, const double x) {
  526. tp[i] *= static_cast<double>(x);
  527. }
  528. };
  529. mutable buffer_type buffer_;
  530. friend struct unsafe_access;
  531. };
  532. } // namespace histogram
  533. } // namespace boost
  534. #endif