checked_ptr.h 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855
  1. // Copyright 2020 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #ifndef BASE_MEMORY_CHECKED_PTR_H_
  5. #define BASE_MEMORY_CHECKED_PTR_H_
  6. #include <stddef.h>
  7. #include <stdint.h>
  8. #include <utility>
  9. #include "base/allocator/partition_allocator/checked_ptr_support.h"
  10. #include "base/allocator/partition_allocator/partition_address_space.h"
  11. #include "base/allocator/partition_allocator/partition_alloc_forward.h"
  12. #include "base/allocator/partition_allocator/partition_ref_count.h"
  13. #include "base/allocator/partition_allocator/partition_tag.h"
  14. #include "base/check_op.h"
  15. #include "base/compiler_specific.h"
  16. #include "base/partition_alloc_buildflags.h"
  17. #include "build/build_config.h"
  18. #include "build/buildflag.h"
  19. #define ENABLE_CHECKED_PTR2_OR_MTE_IMPL 0
  20. #if ENABLE_CHECKED_PTR2_OR_MTE_IMPL
  21. static_assert(ENABLE_TAG_FOR_CHECKED_PTR2 || ENABLE_TAG_FOR_MTE_CHECKED_PTR ||
  22. ENABLE_TAG_FOR_SINGLE_TAG_CHECKED_PTR,
  23. "CheckedPtr2OrMTEImpl can only by used if tags are enabled");
  24. #endif
  25. #define ENABLE_BACKUP_REF_PTR_IMPL 0
  26. #if ENABLE_BACKUP_REF_PTR_IMPL
  27. static_assert(ENABLE_REF_COUNT_FOR_BACKUP_REF_PTR,
  28. "BackupRefPtrImpl can only by used if PartitionRefCount is "
  29. "enabled");
  30. #endif
  31. #define CHECKED_PTR2_USE_NO_OP_WRAPPER 0
  32. #define CHECKED_PTR2_USE_TRIVIAL_UNWRAPPER 0
  33. // Set it to 1 to avoid branches when checking if per-pointer protection is
  34. // enabled.
  35. #define CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED 0
  36. // Set it to 1 to avoid branches when dereferencing the pointer.
  37. // Must be 1 if the above is 1.
  38. #define CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING 0
  39. namespace base {
  40. // NOTE: All methods should be ALWAYS_INLINE. CheckedPtr is meant to be a
  41. // lightweight replacement of a raw pointer, hence performance is critical.
  42. namespace internal {
  43. // These classes/structures are part of the CheckedPtr implementation.
  44. // DO NOT USE THESE CLASSES DIRECTLY YOURSELF.
  45. struct CheckedPtrNoOpImpl {
  46. // Wraps a pointer, and returns its uintptr_t representation.
  47. // Use |const volatile| to prevent compiler error. These will be dropped
  48. // anyway when casting to uintptr_t and brought back upon pointer extraction.
  49. static ALWAYS_INLINE uintptr_t WrapRawPtr(const volatile void* cv_ptr) {
  50. return reinterpret_cast<uintptr_t>(cv_ptr);
  51. }
  52. // Notifies the allocator when a wrapped pointer is being removed or replaced.
  53. static ALWAYS_INLINE void ReleaseWrappedPtr(uintptr_t) {}
  54. // Returns equivalent of |WrapRawPtr(nullptr)|. Separated out to make it a
  55. // constexpr.
  56. static constexpr ALWAYS_INLINE uintptr_t GetWrappedNullPtr() {
  57. // This relies on nullptr and 0 being equal in the eyes of reinterpret_cast,
  58. // which apparently isn't true in all environments.
  59. return 0;
  60. }
  61. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  62. // hasn't been freed. The function is allowed to crash on nullptr.
  63. static ALWAYS_INLINE void* SafelyUnwrapPtrForDereference(
  64. uintptr_t wrapped_ptr) {
  65. return reinterpret_cast<void*>(wrapped_ptr);
  66. }
  67. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  68. // hasn't been freed. The function must handle nullptr gracefully.
  69. static ALWAYS_INLINE void* SafelyUnwrapPtrForExtraction(
  70. uintptr_t wrapped_ptr) {
  71. return reinterpret_cast<void*>(wrapped_ptr);
  72. }
  73. // Unwraps the pointer's uintptr_t representation, without making an assertion
  74. // on whether memory was freed or not.
  75. static ALWAYS_INLINE void* UnsafelyUnwrapPtrForComparison(
  76. uintptr_t wrapped_ptr) {
  77. return reinterpret_cast<void*>(wrapped_ptr);
  78. }
  79. // Upcasts the wrapped pointer.
  80. template <typename To, typename From>
  81. static ALWAYS_INLINE constexpr uintptr_t Upcast(uintptr_t wrapped_ptr) {
  82. static_assert(std::is_convertible<From*, To*>::value,
  83. "From must be convertible to To.");
  84. return reinterpret_cast<uintptr_t>(
  85. static_cast<To*>(reinterpret_cast<From*>(wrapped_ptr)));
  86. }
  87. // Advance the wrapped pointer by |delta| bytes.
  88. static ALWAYS_INLINE uintptr_t Advance(uintptr_t wrapped_ptr, size_t delta) {
  89. return wrapped_ptr + delta;
  90. }
  91. // Returns a copy of a wrapped pointer, without making an assertion
  92. // on whether memory was freed or not.
  93. static ALWAYS_INLINE uintptr_t Duplicate(uintptr_t wrapped_ptr) {
  94. return wrapped_ptr;
  95. }
  96. // This is for accounting only, used by unit tests.
  97. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  98. };
  99. #if defined(ARCH_CPU_64_BITS) && !defined(OS_NACL)
  100. constexpr int kValidAddressBits = 48;
  101. constexpr uintptr_t kAddressMask = (1ull << kValidAddressBits) - 1;
  102. constexpr int kGenerationBits = sizeof(uintptr_t) * 8 - kValidAddressBits;
  103. constexpr uintptr_t kGenerationMask = ~kAddressMask;
  104. constexpr int kTopBitShift = 63;
  105. constexpr uintptr_t kTopBit = 1ull << kTopBitShift;
  106. static_assert(kTopBit << 1 == 0, "kTopBit should really be the top bit");
  107. static_assert((kTopBit & kGenerationMask) > 0,
  108. "kTopBit bit must be inside the generation region");
  109. #if BUILDFLAG(USE_PARTITION_ALLOC) && ENABLE_CHECKED_PTR2_OR_MTE_IMPL
  110. // This functionality is outside of CheckedPtr2OrMTEImpl, so that it can be
  111. // overridden by tests.
  112. struct CheckedPtr2OrMTEImplPartitionAllocSupport {
  113. // Checks if the necessary support is enabled in PartitionAlloc for |ptr|.
  114. static ALWAYS_INLINE bool EnabledForPtr(void* ptr) {
  115. // CheckedPtr2 and MTECheckedPtr algorithms work only when memory is
  116. // allocated by PartitionAlloc, from normal buckets pool. CheckedPtr2
  117. // additionally requires that the pointer points to the beginning of the
  118. // allocated slot.
  119. //
  120. // TODO(bartekn): Allow direct-map buckets for MTECheckedPtr, once
  121. // PartitionAlloc supports it. (Currently not implemented for simplicity,
  122. // but there are no technological obstacles preventing it; whereas in case
  123. // of CheckedPtr2, PartitionAllocGetSlotOffset won't work with direct-map.)
  124. return IsManagedByPartitionAllocNormalBuckets(ptr)
  125. // Checking offset is not needed for ENABLE_TAG_FOR_SINGLE_TAG_CHECKED_PTR,
  126. // but call it anyway for apples-to-apples comparison with
  127. // ENABLE_TAG_FOR_CHECKED_PTR2.
  128. #if ENABLE_TAG_FOR_CHECKED_PTR2 || ENABLE_TAG_FOR_SINGLE_TAG_CHECKED_PTR
  129. && base::internal::PartitionAllocGetSlotOffset(ptr) == 0
  130. #endif
  131. ;
  132. }
  133. // Returns pointer to the tag that protects are pointed by |ptr|.
  134. static ALWAYS_INLINE void* TagPointer(void* ptr) {
  135. return PartitionTagPointer(ptr);
  136. }
  137. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  138. // Returns offset of the tag from the beginning of the slot. Works only with
  139. // CheckedPtr2 algorithm.
  140. static constexpr size_t TagOffset() {
  141. #if ENABLE_TAG_FOR_CHECKED_PTR2
  142. return kPartitionTagOffset;
  143. #else
  144. // Unreachable, but can't use NOTREACHED() due to constexpr. Return
  145. // something weird so that the caller is very likely to crash.
  146. return 0x87654321FEDCBA98;
  147. #endif
  148. }
  149. #endif
  150. };
  151. #endif // BUILDFLAG(USE_PARTITION_ALLOC) && ENABLE_CHECKED_PTR2_OR_MTE_IMPL
  152. template <typename PartitionAllocSupport>
  153. struct CheckedPtr2OrMTEImpl {
  154. // This implementation assumes that pointers are 64 bits long and at least 16
  155. // top bits are unused. The latter is harder to verify statically, but this is
  156. // true for all currently supported 64-bit architectures (DCHECK when wrapping
  157. // will verify that).
  158. static_assert(sizeof(void*) >= 8, "Need 64-bit pointers");
  159. // Wraps a pointer, and returns its uintptr_t representation.
  160. static ALWAYS_INLINE uintptr_t WrapRawPtr(const volatile void* cv_ptr) {
  161. void* ptr = const_cast<void*>(cv_ptr);
  162. uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
  163. #if !CHECKED_PTR2_USE_NO_OP_WRAPPER
  164. // Make sure that the address bits that will be used for generation are 0.
  165. // If they aren't, they'd fool the unwrapper into thinking that the
  166. // protection is enabled, making it try to read and compare the generation.
  167. DCHECK_EQ(ExtractGeneration(addr), 0ull);
  168. // Return a not-wrapped |addr|, if it's either nullptr or if the protection
  169. // for this pointer is disabled.
  170. if (!PartitionAllocSupport::EnabledForPtr(ptr)) {
  171. return addr;
  172. }
  173. // Read the generation and place it in the top bits of the address.
  174. // Even if PartitionAlloc's tag has less than kGenerationBits, we'll read
  175. // what's given and pad the rest with 0s.
  176. static_assert(sizeof(PartitionTag) * 8 <= kGenerationBits, "");
  177. uintptr_t generation = *(static_cast<volatile PartitionTag*>(
  178. PartitionAllocSupport::TagPointer(ptr)));
  179. generation <<= kValidAddressBits;
  180. addr |= generation;
  181. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  182. // Always set top bit to 1, to indicated that the protection is enabled.
  183. addr |= kTopBit;
  184. #endif // CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  185. #endif // !CHECKED_PTR2_USE_NO_OP_WRAPPER
  186. return addr;
  187. }
  188. // Notifies the allocator when a wrapped pointer is being removed or replaced.
  189. // No-op for CheckedPtr2OrMTEImpl.
  190. static ALWAYS_INLINE void ReleaseWrappedPtr(uintptr_t) {}
  191. // Returns equivalent of |WrapRawPtr(nullptr)|. Separated out to make it a
  192. // constexpr.
  193. static constexpr ALWAYS_INLINE uintptr_t GetWrappedNullPtr() {
  194. return kWrappedNullPtr;
  195. }
  196. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  197. // hasn't been freed. The function is allowed to crash on nullptr.
  198. static ALWAYS_INLINE void* SafelyUnwrapPtrForDereference(
  199. uintptr_t wrapped_ptr) {
  200. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  201. // This variant can only be used with CheckedPtr2 algorithm, because it
  202. // relies on the generation to exist at a constant offset before the
  203. // allocation.
  204. static_assert(ENABLE_TAG_FOR_CHECKED_PTR2, "");
  205. // Top bit tells if the protection is enabled. Use it to decide whether to
  206. // read the word before the allocation, which exists only if the protection
  207. // is enabled. Otherwise it may crash, in which case read the data from the
  208. // beginning of the allocation instead and ignore it later. All this magic
  209. // is to avoid a branch, for performance reasons.
  210. //
  211. // A couple examples, assuming 64-bit system (continued below):
  212. // Ex.1: wrapped_ptr=0x8442000012345678
  213. // => enabled=0x8000000000000000
  214. // => offset=1
  215. // Ex.2: wrapped_ptr=0x0000000012345678
  216. // => enabled=0x0000000000000000
  217. // => offset=0
  218. uintptr_t enabled = wrapped_ptr & kTopBit;
  219. // We can't have protection disabled and generation set in the same time.
  220. DCHECK(!(enabled == 0 && (ExtractGeneration(wrapped_ptr)) != 0));
  221. uintptr_t offset = enabled >> kTopBitShift; // 0 or 1
  222. // Use offset to decide if the generation should be read at the beginning or
  223. // before the allocation.
  224. // TODO(bartekn): Do something about 1-byte allocations. Reading 2-byte
  225. // generation at the allocation could crash. This case is executed
  226. // specifically for non-PartitionAlloc pointers, so we can't make
  227. // assumptions about alignment.
  228. //
  229. // Cast to volatile to ensure memory is read. E.g. in a tight loop, the
  230. // compiler could cache the value in a register and thus could miss that
  231. // another thread freed memory and cleared generation.
  232. //
  233. // Examples (continued):
  234. // Ex.1: generation_ptr=0x0000000012345676
  235. // a) if pointee wasn't freed, read e.g. generation=0x0442 (could be
  236. // also 0x8442, the top bit is overwritten later)
  237. // b) if pointee was freed, read e.g. generation=0x1234 (could be
  238. // anything)
  239. // Ex.2: generation_ptr=0x0000000012345678, read e.g. 0x2345 (doesn't
  240. // matter what we read, as long as this read doesn't crash)
  241. volatile PartitionTag* generation_ptr =
  242. static_cast<volatile PartitionTag*>(ExtractPtr(wrapped_ptr)) -
  243. offset * (PartitionAllocSupport::TagOffset() / sizeof(PartitionTag));
  244. uintptr_t generation = *generation_ptr;
  245. // Shift generation into the right place and add back the enabled bit.
  246. //
  247. // Examples (continued):
  248. // Ex.1:
  249. // a) generation=0x8442000000000000
  250. // a) generation=0x9234000000000000
  251. // Ex.2: generation=0x2345000000000000
  252. generation <<= kValidAddressBits;
  253. generation |= enabled;
  254. // If the protection isn't enabled, clear top bits. Casting to a signed
  255. // type makes >> sign extend the last bit.
  256. //
  257. // Examples (continued):
  258. // Ex.1: mask=0xffff000000000000
  259. // a) generation=0x8442000000000000
  260. // b) generation=0x9234000000000000
  261. // Ex.2: mask=0x0000000000000000 => generation=0x0000000000000000
  262. uintptr_t mask = static_cast<intptr_t>(enabled) >> (kGenerationBits - 1);
  263. generation &= mask;
  264. // Use hardware to detect generation mismatch. CPU will crash if top bits
  265. // aren't all 0 (technically it won't if all bits are 1, but that's a kernel
  266. // mode address, which isn't allowed either... also, top bit will be always
  267. // zeroed out).
  268. //
  269. // Examples (continued):
  270. // Ex.1:
  271. // a) returning 0x0000000012345678
  272. // b) returning 0x1676000012345678 (this will generate a desired crash)
  273. // Ex.2: returning 0x0000000012345678
  274. static_assert(CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING, "");
  275. return reinterpret_cast<void*>(generation ^ wrapped_ptr);
  276. #else // CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  277. uintptr_t ptr_generation = wrapped_ptr >> kValidAddressBits;
  278. if (ptr_generation > 0) {
  279. // Read the generation provided by PartitionAlloc.
  280. //
  281. // Cast to volatile to ensure memory is read. E.g. in a tight loop, the
  282. // compiler could cache the value in a register and thus could miss that
  283. // another thread freed memory and cleared generation.
  284. uintptr_t read_generation = *static_cast<volatile PartitionTag*>(
  285. PartitionAllocSupport::TagPointer(ExtractPtr(wrapped_ptr)));
  286. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING
  287. // Use hardware to detect generation mismatch. CPU will crash if top bits
  288. // aren't all 0 (technically it won't if all bits are 1, but that's a
  289. // kernel mode address, which isn't allowed either).
  290. read_generation <<= kValidAddressBits;
  291. return reinterpret_cast<void*>(read_generation ^ wrapped_ptr);
  292. #else
  293. if (UNLIKELY(ptr_generation != read_generation))
  294. IMMEDIATE_CRASH();
  295. return reinterpret_cast<void*>(wrapped_ptr & kAddressMask);
  296. #endif // CHECKED_PTR2_AVOID_BRANCH_WHEN_DEREFERENCING
  297. }
  298. return reinterpret_cast<void*>(wrapped_ptr);
  299. #endif // CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  300. }
  301. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  302. // hasn't been freed. The function must handle nullptr gracefully.
  303. static ALWAYS_INLINE void* SafelyUnwrapPtrForExtraction(
  304. uintptr_t wrapped_ptr) {
  305. #if CHECKED_PTR2_AVOID_BRANCH_WHEN_CHECKING_ENABLED
  306. // In this implementation, SafelyUnwrapPtrForDereference doesn't tolerate
  307. // nullptr, because it reads unconditionally to avoid branches. Handle the
  308. // nullptr case here.
  309. if (wrapped_ptr == kWrappedNullPtr)
  310. return nullptr;
  311. return SafelyUnwrapPtrForDereference(wrapped_ptr);
  312. #else
  313. // In this implementation, SafelyUnwrapPtrForDereference handles nullptr
  314. // case well.
  315. return SafelyUnwrapPtrForDereference(wrapped_ptr);
  316. #endif
  317. }
  318. // Unwraps the pointer's uintptr_t representation, without making an assertion
  319. // on whether memory was freed or not.
  320. static ALWAYS_INLINE void* UnsafelyUnwrapPtrForComparison(
  321. uintptr_t wrapped_ptr) {
  322. return ExtractPtr(wrapped_ptr);
  323. }
  324. // Upcasts the wrapped pointer.
  325. template <typename To, typename From>
  326. static ALWAYS_INLINE uintptr_t Upcast(uintptr_t wrapped_ptr) {
  327. static_assert(std::is_convertible<From*, To*>::value,
  328. "From must be convertible to To.");
  329. #if ENABLE_TAG_FOR_CHECKED_PTR2 || ENABLE_TAG_FOR_SINGLE_TAG_CHECKED_PTR
  330. if (IsPtrUnaffectedByUpcast<To, From>())
  331. return wrapped_ptr;
  332. // CheckedPtr2 doesn't support a pointer pointing in the middle of an
  333. // allocated object, so disable the generation tag.
  334. //
  335. // Clearing tag is not needed for ENABLE_TAG_FOR_SINGLE_TAG_CHECKED_PTR,
  336. // but do it anyway for apples-to-apples comparison with
  337. // ENABLE_TAG_FOR_CHECKED_PTR2.
  338. uintptr_t base_addr = reinterpret_cast<uintptr_t>(
  339. static_cast<To*>(reinterpret_cast<From*>(ExtractPtr(wrapped_ptr))));
  340. return base_addr;
  341. #elif ENABLE_TAG_FOR_MTE_CHECKED_PTR
  342. // The top-bit generation tag must not affect the result of upcast.
  343. return reinterpret_cast<uintptr_t>(
  344. static_cast<To*>(reinterpret_cast<From*>(wrapped_ptr)));
  345. #else
  346. static_assert(std::is_void<To>::value, // Always false.
  347. "Unknown tagging mode");
  348. return 0;
  349. #endif
  350. }
  351. // Advance the wrapped pointer by |delta| bytes.
  352. static ALWAYS_INLINE uintptr_t Advance(uintptr_t wrapped_ptr, size_t delta) {
  353. // Mask out the generation to disable the protection. It's not supported for
  354. // pointers inside an allocation.
  355. return ExtractAddress(wrapped_ptr) + delta;
  356. }
  357. // Returns a copy of a wrapped pointer, without making an assertion
  358. // on whether memory was freed or not.
  359. static ALWAYS_INLINE uintptr_t Duplicate(uintptr_t wrapped_ptr) {
  360. return wrapped_ptr;
  361. }
  362. // This is for accounting only, used by unit tests.
  363. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  364. private:
  365. static ALWAYS_INLINE uintptr_t ExtractAddress(uintptr_t wrapped_ptr) {
  366. return wrapped_ptr & kAddressMask;
  367. }
  368. static ALWAYS_INLINE void* ExtractPtr(uintptr_t wrapped_ptr) {
  369. return reinterpret_cast<void*>(ExtractAddress(wrapped_ptr));
  370. }
  371. static ALWAYS_INLINE uintptr_t ExtractGeneration(uintptr_t wrapped_ptr) {
  372. return wrapped_ptr & kGenerationMask;
  373. }
  374. template <typename To, typename From>
  375. static constexpr ALWAYS_INLINE bool IsPtrUnaffectedByUpcast() {
  376. static_assert(std::is_convertible<From*, To*>::value,
  377. "From must be convertible to To.");
  378. uintptr_t d = 0x10000;
  379. From* dp = reinterpret_cast<From*>(d);
  380. To* bp = dp;
  381. uintptr_t b = reinterpret_cast<uintptr_t>(bp);
  382. return b == d;
  383. }
  384. // This relies on nullptr and 0 being equal in the eyes of reinterpret_cast,
  385. // which apparently isn't true in some rare environments.
  386. static constexpr uintptr_t kWrappedNullPtr = 0;
  387. };
  388. #if ENABLE_BACKUP_REF_PTR_IMPL
  389. struct BackupRefPtrImpl {
  390. // Note that `BackupRefPtrImpl` itself is not thread-safe. If multiple threads
  391. // modify the same smart pointer object without synchronization, a data race
  392. // will occur.
  393. // Wraps a pointer, and returns its uintptr_t representation.
  394. // Use |const volatile| to prevent compiler error. These will be dropped
  395. // anyway when casting to uintptr_t and brought back upon pointer extraction.
  396. static ALWAYS_INLINE uintptr_t WrapRawPtr(const volatile void* cv_ptr) {
  397. void* ptr = const_cast<void*>(cv_ptr);
  398. uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
  399. if (LIKELY(IsManagedByPartitionAllocNormalBuckets(ptr)))
  400. PartitionRefCountPointer(ptr)->AddRef();
  401. return addr;
  402. }
  403. // Notifies the allocator when a wrapped pointer is being removed or replaced.
  404. static ALWAYS_INLINE void ReleaseWrappedPtr(uintptr_t wrapped_ptr) {
  405. void* ptr = reinterpret_cast<void*>(wrapped_ptr);
  406. // This check already covers the nullptr case.
  407. if (LIKELY(IsManagedByPartitionAllocNormalBuckets(ptr)))
  408. PartitionRefCountPointer(ptr)->Release();
  409. }
  410. // Returns equivalent of |WrapRawPtr(nullptr)|. Separated out to make it a
  411. // constexpr.
  412. static constexpr ALWAYS_INLINE uintptr_t GetWrappedNullPtr() {
  413. // This relies on nullptr and 0 being equal in the eyes of reinterpret_cast,
  414. // which apparently isn't true in all environments.
  415. return 0;
  416. }
  417. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  418. // hasn't been freed. The function is allowed to crash on nullptr.
  419. static ALWAYS_INLINE void* SafelyUnwrapPtrForDereference(
  420. uintptr_t wrapped_ptr) {
  421. return reinterpret_cast<void*>(wrapped_ptr);
  422. }
  423. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  424. // hasn't been freed. The function must handle nullptr gracefully.
  425. static ALWAYS_INLINE void* SafelyUnwrapPtrForExtraction(
  426. uintptr_t wrapped_ptr) {
  427. return reinterpret_cast<void*>(wrapped_ptr);
  428. }
  429. // Unwraps the pointer's uintptr_t representation, without making an assertion
  430. // on whether memory was freed or not.
  431. static ALWAYS_INLINE void* UnsafelyUnwrapPtrForComparison(
  432. uintptr_t wrapped_ptr) {
  433. return reinterpret_cast<void*>(wrapped_ptr);
  434. }
  435. // Upcasts the wrapped pointer.
  436. template <typename To, typename From>
  437. static ALWAYS_INLINE constexpr uintptr_t Upcast(uintptr_t wrapped_ptr) {
  438. static_assert(std::is_convertible<From*, To*>::value,
  439. "From must be convertible to To.");
  440. return reinterpret_cast<uintptr_t>(
  441. static_cast<To*>(reinterpret_cast<From*>(wrapped_ptr)));
  442. }
  443. // Advance the wrapped pointer by |delta| bytes.
  444. static ALWAYS_INLINE uintptr_t Advance(uintptr_t wrapped_ptr, size_t delta) {
  445. return wrapped_ptr + delta;
  446. }
  447. // Returns a copy of a wrapped pointer, without making an assertion
  448. // on whether memory was freed or not. This method increments the reference
  449. // count of the allocation slot.
  450. static ALWAYS_INLINE uintptr_t Duplicate(uintptr_t wrapped_ptr) {
  451. return WrapRawPtr(reinterpret_cast<void*>(wrapped_ptr));
  452. }
  453. // This is for accounting only, used by unit tests.
  454. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  455. };
  456. #endif // ENABLE_BACKUP_REF_PTR_IMPL
  457. #endif // defined(ARCH_CPU_64_BITS) && !defined(OS_NACL)
  458. } // namespace internal
  459. // DO NOT USE! EXPERIMENTAL ONLY! This is helpful for local testing!
  460. //
  461. // CheckedPtr is meant to be a pointer wrapper, that will crash on
  462. // Use-After-Free (UaF) to prevent security issues. This is very much in the
  463. // experimental phase. More context in:
  464. // https://docs.google.com/document/d/1pnnOAIz_DMWDI4oIOFoMAqLnf_MZ2GsrJNb_dbQ3ZBg
  465. //
  466. // For now, CheckedPtr is a no-op wrapper to aid local testing.
  467. //
  468. // Goals for this API:
  469. // 1. Minimize amount of caller-side changes as much as physically possible.
  470. // 2. Keep this class as small as possible, while still satisfying goal #1 (i.e.
  471. // we aren't striving to maximize compatibility with raw pointers, merely
  472. // adding support for cases encountered so far).
  473. template <typename T,
  474. #if defined(ARCH_CPU_64_BITS) && !defined(OS_NACL) && \
  475. BUILDFLAG(USE_PARTITION_ALLOC)
  476. #if ENABLE_CHECKED_PTR2_OR_MTE_IMPL
  477. typename Impl = internal::CheckedPtr2OrMTEImpl<
  478. internal::CheckedPtr2OrMTEImplPartitionAllocSupport>>
  479. #elif ENABLE_BACKUP_REF_PTR_IMPL
  480. typename Impl = internal::BackupRefPtrImpl>
  481. #else
  482. typename Impl = internal::CheckedPtrNoOpImpl>
  483. #endif
  484. #else // defined(ARCH_CPU_64_BITS) && !defined(OS_NACL) &&
  485. // BUILDFLAG(USE_PARTITION_ALLOC)
  486. typename Impl = internal::CheckedPtrNoOpImpl>
  487. #endif
  488. class CheckedPtr {
  489. public:
  490. #if ENABLE_BACKUP_REF_PTR_IMPL
  491. // BackupRefPtr requires a non-trivial default constructor, destructor, etc.
  492. constexpr ALWAYS_INLINE CheckedPtr() noexcept
  493. : wrapped_ptr_(Impl::GetWrappedNullPtr()) {}
  494. CheckedPtr(const CheckedPtr& p) noexcept
  495. : wrapped_ptr_(Impl::Duplicate(p.wrapped_ptr_)) {}
  496. CheckedPtr(CheckedPtr&& p) noexcept {
  497. wrapped_ptr_ = p.wrapped_ptr_;
  498. p.wrapped_ptr_ = Impl::GetWrappedNullPtr();
  499. }
  500. CheckedPtr& operator=(const CheckedPtr& p) {
  501. // Duplicate before releasing, in case the pointer is assigned to itself.
  502. uintptr_t new_ptr = Impl::Duplicate(p.wrapped_ptr_);
  503. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  504. wrapped_ptr_ = new_ptr;
  505. return *this;
  506. }
  507. CheckedPtr& operator=(CheckedPtr&& p) {
  508. if (LIKELY(this != &p)) {
  509. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  510. wrapped_ptr_ = p.wrapped_ptr_;
  511. p.wrapped_ptr_ = Impl::GetWrappedNullPtr();
  512. }
  513. return *this;
  514. }
  515. ALWAYS_INLINE ~CheckedPtr() noexcept {
  516. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  517. // Work around external issues where CheckedPtr is used after destruction.
  518. wrapped_ptr_ = Impl::GetWrappedNullPtr();
  519. }
  520. #else // ENABLE_BACKUP_REF_PTR_IMPL
  521. // CheckedPtr can be trivially default constructed (leaving |wrapped_ptr_|
  522. // uninitialized). This is needed for compatibility with raw pointers.
  523. //
  524. // TODO(lukasza): Always initialize |wrapped_ptr_|. Fix resulting build
  525. // errors. Analyze performance impact.
  526. constexpr CheckedPtr() noexcept = default;
  527. // In addition to nullptr_t ctor above, CheckedPtr needs to have these
  528. // as |=default| or |constexpr| to avoid hitting -Wglobal-constructors in
  529. // cases like this:
  530. // struct SomeStruct { int int_field; CheckedPtr<int> ptr_field; };
  531. // SomeStruct g_global_var = { 123, nullptr };
  532. CheckedPtr(const CheckedPtr&) noexcept = default;
  533. CheckedPtr(CheckedPtr&&) noexcept = default;
  534. CheckedPtr& operator=(const CheckedPtr&) noexcept = default;
  535. CheckedPtr& operator=(CheckedPtr&&) noexcept = default;
  536. ~CheckedPtr() = default;
  537. #endif // ENABLE_BACKUP_REF_PTR_IMPL
  538. // Deliberately implicit, because CheckedPtr is supposed to resemble raw ptr.
  539. // NOLINTNEXTLINE(runtime/explicit)
  540. constexpr ALWAYS_INLINE CheckedPtr(std::nullptr_t) noexcept
  541. : wrapped_ptr_(Impl::GetWrappedNullPtr()) {}
  542. // Deliberately implicit, because CheckedPtr is supposed to resemble raw ptr.
  543. // NOLINTNEXTLINE(runtime/explicit)
  544. ALWAYS_INLINE CheckedPtr(T* p) noexcept : wrapped_ptr_(Impl::WrapRawPtr(p)) {}
  545. // Deliberately implicit in order to support implicit upcast.
  546. template <typename U,
  547. typename Unused = std::enable_if_t<
  548. std::is_convertible<U*, T*>::value &&
  549. !std::is_void<typename std::remove_cv<T>::type>::value>>
  550. // NOLINTNEXTLINE(google-explicit-constructor)
  551. ALWAYS_INLINE CheckedPtr(const CheckedPtr<U, Impl>& ptr) noexcept
  552. : wrapped_ptr_(
  553. Impl::Duplicate(Impl::template Upcast<T, U>(ptr.wrapped_ptr_))) {}
  554. // Deliberately implicit in order to support implicit upcast.
  555. template <typename U,
  556. typename Unused = std::enable_if_t<
  557. std::is_convertible<U*, T*>::value &&
  558. !std::is_void<typename std::remove_cv<T>::type>::value>>
  559. // NOLINTNEXTLINE(google-explicit-constructor)
  560. ALWAYS_INLINE CheckedPtr(CheckedPtr<U, Impl>&& ptr) noexcept
  561. : wrapped_ptr_(Impl::template Upcast<T, U>(ptr.wrapped_ptr_)) {
  562. #if ENABLE_BACKUP_REF_PTR_IMPL
  563. ptr.wrapped_ptr_ = Impl::GetWrappedNullPtr();
  564. #endif
  565. }
  566. ALWAYS_INLINE CheckedPtr& operator=(std::nullptr_t) noexcept {
  567. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  568. wrapped_ptr_ = Impl::GetWrappedNullPtr();
  569. return *this;
  570. }
  571. ALWAYS_INLINE CheckedPtr& operator=(T* p) noexcept {
  572. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  573. wrapped_ptr_ = Impl::WrapRawPtr(p);
  574. return *this;
  575. }
  576. // Upcast assignment
  577. template <typename U,
  578. typename Unused = std::enable_if_t<
  579. std::is_convertible<U*, T*>::value &&
  580. !std::is_void<typename std::remove_cv<T>::type>::value>>
  581. ALWAYS_INLINE CheckedPtr& operator=(const CheckedPtr<U, Impl>& ptr) noexcept {
  582. DCHECK(reinterpret_cast<uintptr_t>(this) !=
  583. reinterpret_cast<uintptr_t>(&ptr));
  584. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  585. wrapped_ptr_ =
  586. Impl::Duplicate(Impl::template Upcast<T, U>(ptr.wrapped_ptr_));
  587. return *this;
  588. }
  589. template <typename U,
  590. typename Unused = std::enable_if_t<
  591. std::is_convertible<U*, T*>::value &&
  592. !std::is_void<typename std::remove_cv<T>::type>::value>>
  593. ALWAYS_INLINE CheckedPtr& operator=(CheckedPtr<U, Impl>&& ptr) noexcept {
  594. DCHECK(reinterpret_cast<uintptr_t>(this) !=
  595. reinterpret_cast<uintptr_t>(&ptr));
  596. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  597. wrapped_ptr_ = Impl::template Upcast<T, U>(ptr.wrapped_ptr_);
  598. #if ENABLE_BACKUP_REF_PTR_IMPL
  599. ptr.wrapped_ptr_ = Impl::GetWrappedNullPtr();
  600. #endif
  601. return *this;
  602. }
  603. // Avoid using. The goal of CheckedPtr is to be as close to raw pointer as
  604. // possible, so use it only if absolutely necessary (e.g. for const_cast).
  605. ALWAYS_INLINE T* get() const { return GetForExtraction(); }
  606. explicit ALWAYS_INLINE operator bool() const {
  607. return wrapped_ptr_ != Impl::GetWrappedNullPtr();
  608. }
  609. template <typename U = T,
  610. typename Unused = std::enable_if_t<
  611. !std::is_void<typename std::remove_cv<U>::type>::value>>
  612. ALWAYS_INLINE U& operator*() const {
  613. return *GetForDereference();
  614. }
  615. ALWAYS_INLINE T* operator->() const { return GetForDereference(); }
  616. // Deliberately implicit, because CheckedPtr is supposed to resemble raw ptr.
  617. // NOLINTNEXTLINE(runtime/explicit)
  618. ALWAYS_INLINE operator T*() const { return GetForExtraction(); }
  619. template <typename U>
  620. explicit ALWAYS_INLINE operator U*() const {
  621. return static_cast<U*>(GetForExtraction());
  622. }
  623. ALWAYS_INLINE CheckedPtr& operator++() {
  624. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, sizeof(T));
  625. return *this;
  626. }
  627. ALWAYS_INLINE CheckedPtr& operator--() {
  628. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, -sizeof(T));
  629. return *this;
  630. }
  631. ALWAYS_INLINE CheckedPtr operator++(int /* post_increment */) {
  632. CheckedPtr result = *this;
  633. ++(*this);
  634. return result;
  635. }
  636. ALWAYS_INLINE CheckedPtr operator--(int /* post_decrement */) {
  637. CheckedPtr result = *this;
  638. --(*this);
  639. return result;
  640. }
  641. ALWAYS_INLINE CheckedPtr& operator+=(ptrdiff_t delta_elems) {
  642. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, delta_elems * sizeof(T));
  643. return *this;
  644. }
  645. ALWAYS_INLINE CheckedPtr& operator-=(ptrdiff_t delta_elems) {
  646. return *this += -delta_elems;
  647. }
  648. // Be careful to cover all cases with CheckedPtr being on both sides, left
  649. // side only and right side only. If any case is missed, a more costly
  650. // |operator T*()| will get called, instead of |operator==|.
  651. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs,
  652. const CheckedPtr& rhs) {
  653. return lhs.GetForComparison() == rhs.GetForComparison();
  654. }
  655. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs,
  656. const CheckedPtr& rhs) {
  657. return !(lhs == rhs);
  658. }
  659. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs, T* rhs) {
  660. return lhs.GetForComparison() == rhs;
  661. }
  662. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs, T* rhs) {
  663. return !(lhs == rhs);
  664. }
  665. friend ALWAYS_INLINE bool operator==(T* lhs, const CheckedPtr& rhs) {
  666. return rhs == lhs; // Reverse order to call the operator above.
  667. }
  668. friend ALWAYS_INLINE bool operator!=(T* lhs, const CheckedPtr& rhs) {
  669. return rhs != lhs; // Reverse order to call the operator above.
  670. }
  671. // Needed for cases like |derived_ptr == base_ptr|. Without these, a more
  672. // costly |operator T*()| will get called, instead of |operator==|.
  673. template <typename U>
  674. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs,
  675. const CheckedPtr<U, Impl>& rhs) {
  676. // Add |const volatile| when casting, in case |U| has any. Even if |T|
  677. // doesn't, comparison between |T*| and |const volatile T*| is fine.
  678. return lhs.GetForComparison() ==
  679. static_cast<std::add_cv_t<T>*>(rhs.GetForComparison());
  680. }
  681. template <typename U>
  682. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs,
  683. const CheckedPtr<U, Impl>& rhs) {
  684. return !(lhs == rhs);
  685. }
  686. template <typename U>
  687. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs, U* rhs) {
  688. // Add |const volatile| when casting, in case |U| has any. Even if |T|
  689. // doesn't, comparison between |T*| and |const volatile T*| is fine.
  690. return lhs.GetForComparison() == static_cast<std::add_cv_t<T>*>(rhs);
  691. }
  692. template <typename U>
  693. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs, U* rhs) {
  694. return !(lhs == rhs);
  695. }
  696. template <typename U>
  697. friend ALWAYS_INLINE bool operator==(U* lhs, const CheckedPtr& rhs) {
  698. return rhs == lhs; // Reverse order to call the operator above.
  699. }
  700. template <typename U>
  701. friend ALWAYS_INLINE bool operator!=(U* lhs, const CheckedPtr& rhs) {
  702. return rhs != lhs; // Reverse order to call the operator above.
  703. }
  704. // Needed for comparisons against nullptr. Without these, a slightly more
  705. // costly version would be called that extracts wrapped pointer, as opposed
  706. // to plain comparison against 0.
  707. friend ALWAYS_INLINE bool operator==(const CheckedPtr& lhs, std::nullptr_t) {
  708. return !lhs;
  709. }
  710. friend ALWAYS_INLINE bool operator!=(const CheckedPtr& lhs, std::nullptr_t) {
  711. return !!lhs; // Use !! otherwise the costly implicit cast will be used.
  712. }
  713. friend ALWAYS_INLINE bool operator==(std::nullptr_t, const CheckedPtr& rhs) {
  714. return !rhs;
  715. }
  716. friend ALWAYS_INLINE bool operator!=(std::nullptr_t, const CheckedPtr& rhs) {
  717. return !!rhs; // Use !! otherwise the costly implicit cast will be used.
  718. }
  719. friend ALWAYS_INLINE void swap(CheckedPtr& lhs, CheckedPtr& rhs) noexcept {
  720. Impl::IncrementSwapCountForTest();
  721. std::swap(lhs.wrapped_ptr_, rhs.wrapped_ptr_);
  722. }
  723. private:
  724. // This getter is meant for situations where the pointer is meant to be
  725. // dereferenced. It is allowed to crash on nullptr (it may or may not),
  726. // because it knows that the caller will crash on nullptr.
  727. ALWAYS_INLINE T* GetForDereference() const {
  728. #if CHECKED_PTR2_USE_TRIVIAL_UNWRAPPER
  729. return static_cast<T*>(Impl::UnsafelyUnwrapPtrForComparison(wrapped_ptr_));
  730. #else
  731. return static_cast<T*>(Impl::SafelyUnwrapPtrForDereference(wrapped_ptr_));
  732. #endif
  733. }
  734. // This getter is meant for situations where the raw pointer is meant to be
  735. // extracted outside of this class, but not necessarily with an intention to
  736. // dereference. It mustn't crash on nullptr.
  737. ALWAYS_INLINE T* GetForExtraction() const {
  738. #if CHECKED_PTR2_USE_TRIVIAL_UNWRAPPER
  739. return static_cast<T*>(Impl::UnsafelyUnwrapPtrForComparison(wrapped_ptr_));
  740. #else
  741. return static_cast<T*>(Impl::SafelyUnwrapPtrForExtraction(wrapped_ptr_));
  742. #endif
  743. }
  744. // This getter is meant *only* for situations where the pointer is meant to be
  745. // compared (guaranteeing no dereference or extraction outside of this class).
  746. // Any verifications can and should be skipped for performance reasons.
  747. ALWAYS_INLINE T* GetForComparison() const {
  748. return static_cast<T*>(Impl::UnsafelyUnwrapPtrForComparison(wrapped_ptr_));
  749. }
  750. // Store the pointer as |uintptr_t|, because depending on implementation, its
  751. // unused bits may be re-purposed to store extra information.
  752. uintptr_t wrapped_ptr_;
  753. template <typename U, typename V>
  754. friend class CheckedPtr;
  755. };
  756. } // namespace base
  757. using base::CheckedPtr;
  758. #endif // BASE_MEMORY_CHECKED_PTR_H_