checked_ptr.h 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584
  1. // Copyright 2020 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #ifndef BASE_MEMORY_CHECKED_PTR_H_
  5. #define BASE_MEMORY_CHECKED_PTR_H_
  6. #include <stddef.h>
  7. #include <stdint.h>
  8. #include <utility>
  9. #include "base/compiler_specific.h"
  10. #include "base/logging.h"
  11. #include "build/build_config.h"
  12. // TEST: We can't use protection in the real code (yet) because it may lead to
  13. // crashes in absence of PartitionAlloc support. Setting it to 0 will disable
  14. // the protection, while preserving all calculations.
  15. #define CHECKED_PTR2_PROTECTION_ENABLED 0
  16. #define CHECKED_PTR2_USE_NO_OP_WRAPPER 0
  17. // Set it to 1 to avoid branches when checking if per-pointer protection is
  18. // enabled.
  19. #define CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED 0
  20. // Set it to 1 to avoid branches when dereferencing the pointer.
  21. // Must be 1 if the above is 1.
  22. #define CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING 0
  23. namespace base {
  24. // NOTE: All methods should be ALWAYS_INLINE. CheckedPtr is meant to be a
  25. // lightweight replacement of a raw pointer, hence performance is critical.
  26. namespace internal {
  27. // These classes/structures are part of the CheckedPtr implementation.
  28. // DO NOT USE THESE CLASSES DIRECTLY YOURSELF.
  29. struct CheckedPtrNoOpImpl {
  30. // Wraps a pointer, and returns its uintptr_t representation.
  31. // Use |const volatile| to prevent compiler error. These will be dropped
  32. // anyway when casting to uintptr_t and brought back upon pointer extraction.
  33. static ALWAYS_INLINE uintptr_t WrapRawPtr(const volatile void* cv_ptr) {
  34. return reinterpret_cast<uintptr_t>(cv_ptr);
  35. }
  36. // Returns equivalent of |WrapRawPtr(nullptr)|. Separated out to make it a
  37. // constexpr.
  38. static constexpr ALWAYS_INLINE uintptr_t GetWrappedNullPtr() {
  39. // This relies on nullptr and 0 being equal in the eyes of reinterpret_cast,
  40. // which apparently isn't true in all environments.
  41. return 0;
  42. }
  43. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  44. // hasn't been freed. The function is allowed to crash on nullptr.
  45. static ALWAYS_INLINE void* SafelyUnwrapPtrForDereference(
  46. uintptr_t wrapped_ptr) {
  47. return reinterpret_cast<void*>(wrapped_ptr);
  48. }
  49. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  50. // hasn't been freed. The function must handle nullptr gracefully.
  51. static ALWAYS_INLINE void* SafelyUnwrapPtrForExtraction(
  52. uintptr_t wrapped_ptr) {
  53. return reinterpret_cast<void*>(wrapped_ptr);
  54. }
  55. // Unwraps the pointer's uintptr_t representation, without making an assertion
  56. // on whether memory was freed or not.
  57. static ALWAYS_INLINE void* UnsafelyUnwrapPtrForComparison(
  58. uintptr_t wrapped_ptr) {
  59. return reinterpret_cast<void*>(wrapped_ptr);
  60. }
  61. // Advance the wrapped pointer by |delta| bytes.
  62. static ALWAYS_INLINE uintptr_t Advance(uintptr_t wrapped_ptr, size_t delta) {
  63. return wrapped_ptr + delta;
  64. }
  65. // This is for accounting only, used by unit tests.
  66. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  67. };
  68. #if defined(ARCH_CPU_64_BITS)
  69. constexpr int kValidAddressBits = 48;
  70. constexpr uintptr_t kAddressMask = (1ull << kValidAddressBits) - 1;
  71. constexpr int kGenerationBits = sizeof(uintptr_t) * 8 - kValidAddressBits;
  72. constexpr uintptr_t kGenerationMask = ~kAddressMask;
  73. constexpr int kTopBitShift = 63;
  74. constexpr uintptr_t kTopBit = 1ull << kTopBitShift;
  75. static_assert(kTopBit << 1 == 0, "kTopBit should really be the top bit");
  76. static_assert((kTopBit & kGenerationMask) > 0,
  77. "kTopBit bit must be inside the generation region");
  78. // TEST: Use volatile so that the read isn't optimized out.
  79. static volatile bool g_enabled = true;
  80. struct CheckedPtr2Impl {
  81. static_assert(sizeof(uintptr_t) == 8,
  82. "only 64-bit architectures are supported");
  83. // Wraps a pointer, and returns its uintptr_t representation.
  84. static ALWAYS_INLINE uintptr_t WrapRawPtr(const volatile void* cv_ptr) {
  85. void* ptr = const_cast<void*>(cv_ptr);
  86. uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
  87. #if CHECKED_PTR2_USE_NO_OP_WRAPPER
  88. static_assert(!CHECKED_PTR2_PROTECTION_ENABLED, "");
  89. #else
  90. // Make sure that the address bits that will be used for generation are 0.
  91. // Otherwise the logic may fail.
  92. DCHECK_EQ(ExtractGeneration(addr), 0ull);
  93. // TEST: |g_enabled| should be replaced with a check if the allocation is on
  94. // PartitionAlloc. There could be also a Finch check added.
  95. if (ptr == nullptr || !g_enabled) {
  96. return addr;
  97. }
  98. // TEST: It should be |size = base::PartitionAllocGetSize(ptr)|, however
  99. // |PartitionAllocGetSize()| will likely crash if used an a non-PA pointer.
  100. // For now, replacing it with something that always passes.
  101. //
  102. // TEST: There shouldn't be |volatile|; that's to prevent optimization of %.
  103. volatile size_t size = (addr & (addr - 1)) ^ addr;
  104. if (addr % size != 0) {
  105. DCHECK(false);
  106. return addr;
  107. }
  108. // Read the generation from 16 bits before the allocation. Then place it in
  109. // the top bits of the address.
  110. //
  111. // TODO(bartekn): Consider if casting to |volatile*| is needed. I
  112. // believe it's needed when dereferencing, not sure about here.
  113. static_assert(sizeof(uint16_t) * 8 == kGenerationBits, "");
  114. #if CHECKED_PTR2_PROTECTION_ENABLED
  115. uintptr_t generation = *(static_cast<volatile uint16_t*>(ptr) - 1);
  116. #else
  117. // TEST: Reading from offset -1 may crash without PA support.
  118. // Just read from offset 0 to attain the same perf characteristics as the
  119. // expected production solution.
  120. // This generation will be ignored anyway either when unwrapping or below
  121. // (depending on the algorithm variant), on the
  122. // !CHECKED_PTR2_PROTECTION_ENABLED path.
  123. uintptr_t generation = *(static_cast<volatile uint16_t*>(ptr));
  124. #endif // #else CHECKED_PTR2_PROTECTION_ENABLED
  125. generation <<= kValidAddressBits;
  126. addr |= generation;
  127. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  128. // Always set top bit to 1, to indicated that the protection is enabled.
  129. addr |= kTopBit;
  130. #if !CHECKED_PTR2_PROTECTION_ENABLED
  131. // TEST: Clear the generation, or else it could crash without PA support.
  132. // If the top bit was set, the unwrapper would read from before the address
  133. // address, but with it cleared, it'll read from the address itself.
  134. addr &= kAddressMask;
  135. #endif // #if !CHECKED_PTR2_PROTECTION_ENABLED
  136. #endif // #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  137. #endif // #if CHECKED_PTR2_USE_NO_OP_WRAPPER
  138. return addr;
  139. }
  140. // Returns equivalent of |WrapRawPtr(nullptr)|. Separated out to make it a
  141. // constexpr.
  142. static constexpr ALWAYS_INLINE uintptr_t GetWrappedNullPtr() {
  143. return kWrappedNullPtr;
  144. }
  145. static ALWAYS_INLINE uintptr_t
  146. SafelyUnwrapPtrInternal(uintptr_t wrapped_ptr) {
  147. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  148. // Top bit tells if the protection is enabled. Use it to decide whether to
  149. // read the word before the allocation, which exists only if the protection
  150. // is enabled. Otherwise it may crash, in which case read the data from the
  151. // beginning of the allocation instead and ignore it later. All this magic
  152. // is to avoid a branch, for performance reasons.
  153. //
  154. // A couple examples, assuming 64-bit system (continued below):
  155. // Ex.1: wrapped_ptr=0x8442000012345678
  156. // => enabled=0x8000000000000000
  157. // => offset=1
  158. // Ex.2: wrapped_ptr=0x0000000012345678
  159. // => enabled=0x0000000000000000
  160. // => offset=0
  161. uintptr_t enabled = wrapped_ptr & kTopBit;
  162. // We can't have protection disabled and generation set in the same time.
  163. DCHECK(!(enabled == 0 && (ExtractGeneration(wrapped_ptr)) != 0));
  164. uintptr_t offset = enabled >> kTopBitShift; // 0 or 1
  165. // Use offset to decide if the generation should be read at the beginning or
  166. // before the allocation.
  167. // TODO(bartekn): Do something about 1-byte allocations. Reading 2-byte
  168. // generation at the allocation could crash. This case is executed
  169. // specifically for non-PartitionAlloc pointers, so we can't make
  170. // assumptions about alignment.
  171. //
  172. // Cast to volatile to ensure memory is read. E.g. in a tight loop, the
  173. // compiler could cache the value in a register and thus could miss that
  174. // another thread freed memory and cleared generation.
  175. //
  176. // Examples (continued):
  177. // Ex.1: generation_ptr=0x0000000012345676
  178. // a) if pointee wasn't freed, read e.g. generation=0x0442 (could be
  179. // also 0x8442, the top bit is overwritten later)
  180. // b) if pointee was freed, read e.g. generation=0x1234 (could be
  181. // anything)
  182. // Ex.2: generation_ptr=0x0000000012345678, read e.g. 0x2345 (doesn't
  183. // matter what we read, as long as this read doesn't crash)
  184. volatile uint16_t* generation_ptr =
  185. reinterpret_cast<volatile uint16_t*>(ExtractAddress(wrapped_ptr)) -
  186. offset;
  187. uintptr_t generation = *generation_ptr;
  188. // Shift generation into the right place and add back the enabled bit.
  189. //
  190. // Examples (continued):
  191. // Ex.1:
  192. // a) generation=0x8442000000000000
  193. // a) generation=0x9234000000000000
  194. // Ex.2: generation=0x2345000000000000
  195. generation <<= kValidAddressBits;
  196. generation |= enabled;
  197. // If the protection isn't enabled, clear top bits. Casting to a signed
  198. // type makes >> sign extend the last bit.
  199. //
  200. // Examples (continued):
  201. // Ex.1: mask=0xffff000000000000
  202. // a) generation=0x8442000000000000
  203. // b) generation=0x9234000000000000
  204. // Ex.2: mask=0x0000000000000000 => generation=0x0000000000000000
  205. uintptr_t mask = static_cast<intptr_t>(enabled) >> (kGenerationBits - 1);
  206. generation &= mask;
  207. // Use hardware to detect generation mismatch. CPU will crash if top bits
  208. // aren't all 0 (technically it won't if all bits are 1, but that's a kernel
  209. // mode address, which isn't allowed either... also, top bit will be always
  210. // zeroed out).
  211. //
  212. // Examples (continued):
  213. // Ex.1:
  214. // a) returning 0x0000000012345678
  215. // b) returning 0x1676000012345678 (this will generate a desired crash)
  216. // Ex.2: returning 0x0000000012345678
  217. static_assert(CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING, "");
  218. return generation ^ wrapped_ptr;
  219. #else // #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  220. uintptr_t ptr_generation = wrapped_ptr >> kValidAddressBits;
  221. if (ptr_generation > 0) {
  222. // Read generation from before the allocation.
  223. //
  224. // Cast to volatile to ensure memory is read. E.g. in a tight loop, the
  225. // compiler could cache the value in a register and thus could miss that
  226. // another thread freed memory and cleared generation.
  227. #if CHECKED_PTR2_PROTECTION_ENABLED
  228. uintptr_t read_generation =
  229. *(reinterpret_cast<volatile uint16_t*>(ExtractAddress(wrapped_ptr)) -
  230. 1);
  231. #else
  232. // TEST: Reading from before the pointer may crash. See more above...
  233. uintptr_t read_generation =
  234. *(reinterpret_cast<volatile uint16_t*>(ExtractAddress(wrapped_ptr)));
  235. #endif
  236. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING
  237. // Use hardware to detect generation mismatch. CPU will crash if top bits
  238. // aren't all 0 (technically it won't if all bits are 1, but that's a
  239. // kernel mode address, which isn't allowed either).
  240. read_generation <<= kValidAddressBits;
  241. return read_generation ^ wrapped_ptr;
  242. #else
  243. #if CHECKED_PTR2_PROTECTION_ENABLED
  244. if (UNLIKELY(ptr_generation != read_generation))
  245. IMMEDIATE_CRASH();
  246. #else
  247. // TEST: Use volatile to prevent optimizing out the calculations leading
  248. // to this point.
  249. volatile bool x = false;
  250. if (ptr_generation != read_generation)
  251. x = true;
  252. #endif // #else CHECKED_PTR2_PROTECTION_ENABLED
  253. return wrapped_ptr & kAddressMask;
  254. #endif // #else CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING
  255. }
  256. return wrapped_ptr;
  257. #endif // #else CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  258. }
  259. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  260. // hasn't been freed. The function is allowed to crash on nullptr.
  261. static ALWAYS_INLINE void* SafelyUnwrapPtrForDereference(
  262. uintptr_t wrapped_ptr) {
  263. #if CHECKED_PTR2_PROTECTION_ENABLED
  264. return reinterpret_cast<void*>(SafelyUnwrapPtrInternal(wrapped_ptr));
  265. #else
  266. // TEST: Use volatile to prevent optimizing out the calculations leading to
  267. // this point.
  268. // |SafelyUnwrapPtrInternal| was separated out solely for this purpose.
  269. volatile uintptr_t addr = SafelyUnwrapPtrInternal(wrapped_ptr);
  270. return reinterpret_cast<void*>(addr);
  271. #endif
  272. }
  273. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  274. // hasn't been freed. The function must handle nullptr gracefully.
  275. static ALWAYS_INLINE void* SafelyUnwrapPtrForExtraction(
  276. uintptr_t wrapped_ptr) {
  277. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  278. // In this implementation SafelyUnwrapPtrForDereference doesn't tolerate
  279. // nullptr, because it reads unconditionally to avoid branches. Handle the
  280. // nullptr case here.
  281. if (wrapped_ptr == kWrappedNullPtr)
  282. return nullptr;
  283. return reinterpret_cast<void*>(SafelyUnwrapPtrForDereference(wrapped_ptr));
  284. #else
  285. // In this implementation SafelyUnwrapPtrForDereference handles nullptr case
  286. // well.
  287. return reinterpret_cast<void*>(SafelyUnwrapPtrForDereference(wrapped_ptr));
  288. #endif
  289. }
  290. // Unwraps the pointer's uintptr_t representation, without making an assertion
  291. // on whether memory was freed or not.
  292. static ALWAYS_INLINE void* UnsafelyUnwrapPtrForComparison(
  293. uintptr_t wrapped_ptr) {
  294. return reinterpret_cast<void*>(ExtractAddress(wrapped_ptr));
  295. }
  296. // Advance the wrapped pointer by |delta| bytes.
  297. static ALWAYS_INLINE uintptr_t Advance(uintptr_t wrapped_ptr, size_t delta) {
  298. // Mask out the generation to disable the protection. It's not supported for
  299. // pointers inside an allocation.
  300. return ExtractAddress(wrapped_ptr) + delta;
  301. }
  302. // This is for accounting only, used by unit tests.
  303. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  304. private:
  305. static ALWAYS_INLINE uintptr_t ExtractAddress(uintptr_t wrapped_ptr) {
  306. return wrapped_ptr & kAddressMask;
  307. }
  308. static ALWAYS_INLINE uintptr_t ExtractGeneration(uintptr_t wrapped_ptr) {
  309. return wrapped_ptr & kGenerationMask;
  310. }
  311. // This relies on nullptr and 0 being equal in the eyes of reinterpret_cast,
  312. // which apparently isn't true in some rare environments.
  313. static constexpr uintptr_t kWrappedNullPtr = 0;
  314. };
  315. #endif // #if defined(ARCH_CPU_64_BITS)
  316. template <typename T>
  317. struct DereferencedPointerType {
  318. using Type = decltype(*std::declval<T*>());
  319. };
  320. // This explicitly doesn't define any type aliases, since dereferencing void is
  321. // invalid.
  322. template <>
  323. struct DereferencedPointerType<void> {};
  324. } // namespace internal
  325. // DO NOT USE! EXPERIMENTAL ONLY! This is helpful for local testing!
  326. //
  327. // CheckedPtr is meant to be a pointer wrapper, that will crash on
  328. // Use-After-Free (UaF) to prevent security issues. This is very much in the
  329. // experimental phase. More context in:
  330. // https://docs.google.com/document/d/1pnnOAIz_DMWDI4oIOFoMAqLnf_MZ2GsrJNb_dbQ3ZBg
  331. //
  332. // For now, CheckedPtr is a no-op wrapper to aid local testing.
  333. //
  334. // Goals for this API:
  335. // 1. Minimize amount of caller-side changes as much as physically possible.
  336. // 2. Keep this class as small as possible, while still satisfying goal #1 (i.e.
  337. // we aren't striving to maximize compatibility with raw pointers, merely
  338. // adding support for cases encountered so far).
  339. template <typename T,
  340. #if defined(ARCH_CPU_64_BITS)
  341. typename Impl = internal::CheckedPtr2Impl>
  342. #else
  343. typename Impl = internal::CheckedPtrNoOpImpl>
  344. #endif
  345. class CheckedPtr {
  346. public:
  347. // CheckedPtr can be trivially default constructed (leaving |wrapped_ptr_|
  348. // uninitialized). This is needed for compatibility with raw pointers.
  349. //
  350. // TODO(lukasza): Always initialize |wrapped_ptr_|. Fix resulting build
  351. // errors. Analyze performance impact.
  352. constexpr CheckedPtr() noexcept = default;
  353. // Deliberately implicit, because CheckedPtr is supposed to resemble raw ptr.
  354. // NOLINTNEXTLINE(runtime/explicit)
  355. constexpr ALWAYS_INLINE CheckedPtr(nullptr_t) noexcept
  356. : wrapped_ptr_(Impl::GetWrappedNullPtr()) {}
  357. // Deliberately implicit, because CheckedPtr is supposed to resemble raw ptr.
  358. // NOLINTNEXTLINE(runtime/explicit)
  359. ALWAYS_INLINE CheckedPtr(T* p) noexcept : wrapped_ptr_(Impl::WrapRawPtr(p)) {}
  360. // In addition to nullptr_t ctor above, CheckedPtr needs to have these
  361. // as |=default| or |constexpr| to avoid hitting -Wglobal-constructors in
  362. // cases like this:
  363. // struct SomeStruct { int int_field; CheckedPtr<int> ptr_field; };
  364. // SomeStruct g_global_var = { 123, nullptr };
  365. CheckedPtr(const CheckedPtr&) noexcept = default;
  366. CheckedPtr(CheckedPtr&&) noexcept = default;
  367. CheckedPtr& operator=(const CheckedPtr&) noexcept = default;
  368. CheckedPtr& operator=(CheckedPtr&&) noexcept = default;
  369. ALWAYS_INLINE CheckedPtr& operator=(T* p) noexcept {
  370. wrapped_ptr_ = Impl::WrapRawPtr(p);
  371. return *this;
  372. }
  373. ALWAYS_INLINE CheckedPtr& operator=(std::nullptr_t) noexcept {
  374. wrapped_ptr_ = Impl::GetWrappedNullPtr();
  375. return *this;
  376. }
  377. ~CheckedPtr() = default;
  378. // Avoid using. The goal of CheckedPtr is to be as close to raw pointer as
  379. // possible, so use it only if absolutely necessary (e.g. for const_cast).
  380. ALWAYS_INLINE T* get() const { return GetForExtraction(); }
  381. explicit ALWAYS_INLINE operator bool() const {
  382. return wrapped_ptr_ != Impl::GetWrappedNullPtr();
  383. }
  384. // Use SFINAE to avoid defining |operator*| for T=void, which wouldn't compile
  385. // due to |void&|.
  386. template <typename U = T,
  387. typename V = typename internal::DereferencedPointerType<U>::Type>
  388. ALWAYS_INLINE V& operator*() const {
  389. return *GetForDereference();
  390. }
  391. ALWAYS_INLINE T* operator->() const { return GetForDereference(); }
  392. // Deliberately implicit, because CheckedPtr is supposed to resemble raw ptr.
  393. // NOLINTNEXTLINE(runtime/explicit)
  394. ALWAYS_INLINE operator T*() const { return GetForExtraction(); }
  395. template <typename U>
  396. explicit ALWAYS_INLINE operator U*() const {
  397. return static_cast<U*>(GetForExtraction());
  398. }
  399. ALWAYS_INLINE CheckedPtr& operator++() {
  400. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, sizeof(T));
  401. return *this;
  402. }
  403. ALWAYS_INLINE CheckedPtr& operator--() {
  404. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, -sizeof(T));
  405. return *this;
  406. }
  407. ALWAYS_INLINE CheckedPtr operator++(int /* post_increment */) {
  408. CheckedPtr result = *this;
  409. ++(*this);
  410. return result;
  411. }
  412. ALWAYS_INLINE CheckedPtr operator--(int /* post_decrement */) {
  413. CheckedPtr result = *this;
  414. --(*this);
  415. return result;
  416. }
  417. ALWAYS_INLINE CheckedPtr& operator+=(ptrdiff_t delta_elems) {
  418. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, delta_elems * sizeof(T));
  419. return *this;
  420. }
  421. ALWAYS_INLINE CheckedPtr& operator-=(ptrdiff_t delta_elems) {
  422. return *this += -delta_elems;
  423. }
  424. // Be careful to cover all cases with CheckedPtr being on both sides, left
  425. // side only and right side only. If any case is missed, a more costly
  426. // |operator T*()| will get called, instead of |operator==|.
  427. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs,
  428. const CheckedPtr& rhs) {
  429. return lhs.GetForComparison() == rhs.GetForComparison();
  430. }
  431. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs,
  432. const CheckedPtr& rhs) {
  433. return !(lhs == rhs);
  434. }
  435. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs, T* rhs) {
  436. return lhs.GetForComparison() == rhs;
  437. }
  438. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs, T* rhs) {
  439. return !(lhs == rhs);
  440. }
  441. friend ALWAYS_INLINE bool operator==(T* lhs, const CheckedPtr& rhs) {
  442. return rhs == lhs; // Reverse order to call the operator above.
  443. }
  444. friend ALWAYS_INLINE bool operator!=(T* lhs, const CheckedPtr& rhs) {
  445. return rhs != lhs; // Reverse order to call the operator above.
  446. }
  447. // Needed for cases like |derived_ptr == base_ptr|. Without these, a more
  448. // costly |operator T*()| will get called, instead of |operator==|.
  449. template <typename U>
  450. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs,
  451. const CheckedPtr<U, Impl>& rhs) {
  452. // Add |const volatile| when casting, in case |U| has any. Even if |T|
  453. // doesn't, comparison between |T*| and |const volatile T*| is fine.
  454. return lhs.GetForComparison() ==
  455. static_cast<std::add_cv_t<T>*>(rhs.GetForComparison());
  456. }
  457. template <typename U>
  458. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs,
  459. const CheckedPtr<U, Impl>& rhs) {
  460. return !(lhs == rhs);
  461. }
  462. template <typename U>
  463. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs, U* rhs) {
  464. // Add |const volatile| when casting, in case |U| has any. Even if |T|
  465. // doesn't, comparison between |T*| and |const volatile T*| is fine.
  466. return lhs.GetForComparison() == static_cast<std::add_cv_t<T>*>(rhs);
  467. }
  468. template <typename U>
  469. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs, U* rhs) {
  470. return !(lhs == rhs);
  471. }
  472. template <typename U>
  473. friend ALWAYS_INLINE bool operator==(U* lhs, const CheckedPtr& rhs) {
  474. return rhs == lhs; // Reverse order to call the operator above.
  475. }
  476. template <typename U>
  477. friend ALWAYS_INLINE bool operator!=(U* lhs, const CheckedPtr& rhs) {
  478. return rhs != lhs; // Reverse order to call the operator above.
  479. }
  480. // Needed for comparisons against nullptr. Without these, a slightly more
  481. // costly version would be called that extracts wrapped pointer, as opposed
  482. // to plain comparison against 0.
  483. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs, nullptr_t) {
  484. return !lhs;
  485. }
  486. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs, nullptr_t) {
  487. return !!lhs; // Use !! otherwise the costly implicit cast will be used.
  488. }
  489. friend ALWAYS_INLINE bool operator==(nullptr_t, const CheckedPtr& rhs) {
  490. return !rhs;
  491. }
  492. friend ALWAYS_INLINE bool operator!=(nullptr_t, const CheckedPtr& rhs) {
  493. return !!rhs; // Use !! otherwise the costly implicit cast will be used.
  494. }
  495. friend ALWAYS_INLINE void swap(CheckedPtr& lhs, CheckedPtr& rhs) noexcept {
  496. Impl::IncrementSwapCountForTest();
  497. std::swap(lhs.wrapped_ptr_, rhs.wrapped_ptr_);
  498. }
  499. private:
  500. // This getter is meant for situations where the pointer is meant to be
  501. // dereferenced. It is allowed to crash on nullptr (it may or may not),
  502. // because it knows that the caller will crash on nullptr.
  503. ALWAYS_INLINE T* GetForDereference() const {
  504. return static_cast<T*>(Impl::SafelyUnwrapPtrForDereference(wrapped_ptr_));
  505. }
  506. // This getter is meant for situations where the raw pointer is meant to be
  507. // extracted outside of this class, but not necessarily with an intention to
  508. // dereference. It mustn't crash on nullptr.
  509. ALWAYS_INLINE T* GetForExtraction() const {
  510. return static_cast<T*>(Impl::SafelyUnwrapPtrForExtraction(wrapped_ptr_));
  511. }
  512. // This getter is meant *only* for situations where the pointer is meant to be
  513. // compared (guaranteeing no dereference or extraction outside of this class).
  514. // Any verifications can and should be skipped for performance reasons.
  515. ALWAYS_INLINE T* GetForComparison() const {
  516. return static_cast<T*>(Impl::UnsafelyUnwrapPtrForComparison(wrapped_ptr_));
  517. }
  518. // Store the pointer as |uintptr_t|, because depending on implementation, its
  519. // unused bits may be re-purposed to store extra information.
  520. uintptr_t wrapped_ptr_;
  521. template <typename U, typename V>
  522. friend class CheckedPtr;
  523. };
  524. } // namespace base
  525. using base::CheckedPtr;
  526. #endif // BASE_MEMORY_CHECKED_PTR_H_