CpuArch.h 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336
  1. /* CpuArch.h -- CPU specific code
  2. 2018-02-18 : Igor Pavlov : Public domain */
  3. #ifndef __CPU_ARCH_H
  4. #define __CPU_ARCH_H
  5. #include "7zTypes.h"
  6. EXTERN_C_BEGIN
  7. /*
  8. MY_CPU_LE means that CPU is LITTLE ENDIAN.
  9. MY_CPU_BE means that CPU is BIG ENDIAN.
  10. If MY_CPU_LE and MY_CPU_BE are not defined, we don't know about ENDIANNESS of platform.
  11. MY_CPU_LE_UNALIGN means that CPU is LITTLE ENDIAN and CPU supports unaligned memory accesses.
  12. */
  13. #if defined(_M_X64) \
  14. || defined(_M_AMD64) \
  15. || defined(__x86_64__) \
  16. || defined(__AMD64__) \
  17. || defined(__amd64__)
  18. #define MY_CPU_AMD64
  19. #ifdef __ILP32__
  20. #define MY_CPU_NAME "x32"
  21. #else
  22. #define MY_CPU_NAME "x64"
  23. #endif
  24. #define MY_CPU_64BIT
  25. #endif
  26. #if defined(_M_IX86) \
  27. || defined(__i386__)
  28. #define MY_CPU_X86
  29. #define MY_CPU_NAME "x86"
  30. #define MY_CPU_32BIT
  31. #endif
  32. #if defined(_M_ARM64) \
  33. || defined(__AARCH64EL__) \
  34. || defined(__AARCH64EB__) \
  35. || defined(__aarch64__)
  36. #define MY_CPU_ARM64
  37. #define MY_CPU_NAME "arm64"
  38. #define MY_CPU_64BIT
  39. #endif
  40. #if defined(_M_ARM) \
  41. || defined(_M_ARM_NT) \
  42. || defined(_M_ARMT) \
  43. || defined(__arm__) \
  44. || defined(__thumb__) \
  45. || defined(__ARMEL__) \
  46. || defined(__ARMEB__) \
  47. || defined(__THUMBEL__) \
  48. || defined(__THUMBEB__)
  49. #define MY_CPU_ARM
  50. #define MY_CPU_NAME "arm"
  51. #define MY_CPU_32BIT
  52. #endif
  53. #if defined(_M_IA64) \
  54. || defined(__ia64__)
  55. #define MY_CPU_IA64
  56. #define MY_CPU_NAME "ia64"
  57. #define MY_CPU_64BIT
  58. #endif
  59. #if defined(__mips64) \
  60. || defined(__mips64__) \
  61. || (defined(__mips) && (__mips == 64 || __mips == 4 || __mips == 3))
  62. #define MY_CPU_NAME "mips64"
  63. #define MY_CPU_64BIT
  64. #elif defined(__mips__)
  65. #define MY_CPU_NAME "mips"
  66. /* #define MY_CPU_32BIT */
  67. #endif
  68. #if defined(__ppc64__) \
  69. || defined(__powerpc64__)
  70. #ifdef __ILP32__
  71. #define MY_CPU_NAME "ppc64-32"
  72. #else
  73. #define MY_CPU_NAME "ppc64"
  74. #endif
  75. #define MY_CPU_64BIT
  76. #elif defined(__ppc__) \
  77. || defined(__powerpc__)
  78. #define MY_CPU_NAME "ppc"
  79. #define MY_CPU_32BIT
  80. #endif
  81. #if defined(__sparc64__)
  82. #define MY_CPU_NAME "sparc64"
  83. #define MY_CPU_64BIT
  84. #elif defined(__sparc__)
  85. #define MY_CPU_NAME "sparc"
  86. /* #define MY_CPU_32BIT */
  87. #endif
  88. #if defined(MY_CPU_X86) || defined(MY_CPU_AMD64)
  89. #define MY_CPU_X86_OR_AMD64
  90. #endif
  91. #ifdef _WIN32
  92. #ifdef MY_CPU_ARM
  93. #define MY_CPU_ARM_LE
  94. #endif
  95. #ifdef MY_CPU_ARM64
  96. #define MY_CPU_ARM64_LE
  97. #endif
  98. #ifdef _M_IA64
  99. #define MY_CPU_IA64_LE
  100. #endif
  101. #endif
  102. #if defined(MY_CPU_X86_OR_AMD64) \
  103. || defined(MY_CPU_ARM_LE) \
  104. || defined(MY_CPU_ARM64_LE) \
  105. || defined(MY_CPU_IA64_LE) \
  106. || defined(__LITTLE_ENDIAN__) \
  107. || defined(__ARMEL__) \
  108. || defined(__THUMBEL__) \
  109. || defined(__AARCH64EL__) \
  110. || defined(__MIPSEL__) \
  111. || defined(__MIPSEL) \
  112. || defined(_MIPSEL) \
  113. || defined(__BFIN__) \
  114. || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__))
  115. #define MY_CPU_LE
  116. #endif
  117. #if defined(__BIG_ENDIAN__) \
  118. || defined(__ARMEB__) \
  119. || defined(__THUMBEB__) \
  120. || defined(__AARCH64EB__) \
  121. || defined(__MIPSEB__) \
  122. || defined(__MIPSEB) \
  123. || defined(_MIPSEB) \
  124. || defined(__m68k__) \
  125. || defined(__s390__) \
  126. || defined(__s390x__) \
  127. || defined(__zarch__) \
  128. || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__))
  129. #define MY_CPU_BE
  130. #endif
  131. #if defined(MY_CPU_LE) && defined(MY_CPU_BE)
  132. #error Stop_Compiling_Bad_Endian
  133. #endif
  134. #if defined(MY_CPU_32BIT) && defined(MY_CPU_64BIT)
  135. #error Stop_Compiling_Bad_32_64_BIT
  136. #endif
  137. #ifndef MY_CPU_NAME
  138. #ifdef MY_CPU_LE
  139. #define MY_CPU_NAME "LE"
  140. #elif defined(MY_CPU_BE)
  141. #define MY_CPU_NAME "BE"
  142. #else
  143. /*
  144. #define MY_CPU_NAME ""
  145. */
  146. #endif
  147. #endif
  148. #ifdef MY_CPU_LE
  149. #if defined(MY_CPU_X86_OR_AMD64) \
  150. || defined(MY_CPU_ARM64) \
  151. || defined(__ARM_FEATURE_UNALIGNED)
  152. #define MY_CPU_LE_UNALIGN
  153. #endif
  154. #endif
  155. #ifdef MY_CPU_LE_UNALIGN
  156. #define GetUi16(p) (*(const UInt16 *)(const void *)(p))
  157. #define GetUi32(p) (*(const UInt32 *)(const void *)(p))
  158. #define GetUi64(p) (*(const UInt64 *)(const void *)(p))
  159. #define SetUi16(p, v) { *(UInt16 *)(p) = (v); }
  160. #define SetUi32(p, v) { *(UInt32 *)(p) = (v); }
  161. #define SetUi64(p, v) { *(UInt64 *)(p) = (v); }
  162. #else
  163. #define GetUi16(p) ( (UInt16) ( \
  164. ((const Byte *)(p))[0] | \
  165. ((UInt16)((const Byte *)(p))[1] << 8) ))
  166. #define GetUi32(p) ( \
  167. ((const Byte *)(p))[0] | \
  168. ((UInt32)((const Byte *)(p))[1] << 8) | \
  169. ((UInt32)((const Byte *)(p))[2] << 16) | \
  170. ((UInt32)((const Byte *)(p))[3] << 24))
  171. #define GetUi64(p) (GetUi32(p) | ((UInt64)GetUi32(((const Byte *)(p)) + 4) << 32))
  172. #define SetUi16(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \
  173. _ppp_[0] = (Byte)_vvv_; \
  174. _ppp_[1] = (Byte)(_vvv_ >> 8); }
  175. #define SetUi32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \
  176. _ppp_[0] = (Byte)_vvv_; \
  177. _ppp_[1] = (Byte)(_vvv_ >> 8); \
  178. _ppp_[2] = (Byte)(_vvv_ >> 16); \
  179. _ppp_[3] = (Byte)(_vvv_ >> 24); }
  180. #define SetUi64(p, v) { Byte *_ppp2_ = (Byte *)(p); UInt64 _vvv2_ = (v); \
  181. SetUi32(_ppp2_ , (UInt32)_vvv2_); \
  182. SetUi32(_ppp2_ + 4, (UInt32)(_vvv2_ >> 32)); }
  183. #endif
  184. #ifdef __has_builtin
  185. #define MY__has_builtin(x) __has_builtin(x)
  186. #else
  187. #define MY__has_builtin(x) 0
  188. #endif
  189. #if defined(MY_CPU_LE_UNALIGN) && /* defined(_WIN64) && */ (_MSC_VER >= 1300)
  190. /* Note: we use bswap instruction, that is unsupported in 386 cpu */
  191. #include <stdlib.h>
  192. #pragma intrinsic(_byteswap_ushort)
  193. #pragma intrinsic(_byteswap_ulong)
  194. #pragma intrinsic(_byteswap_uint64)
  195. /* #define GetBe16(p) _byteswap_ushort(*(const UInt16 *)(const Byte *)(p)) */
  196. #define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p))
  197. #define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p))
  198. #define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = _byteswap_ulong(v)
  199. #elif defined(MY_CPU_LE_UNALIGN) && ( \
  200. (defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))) \
  201. || (defined(__clang__) && MY__has_builtin(__builtin_bswap16)) )
  202. /* #define GetBe16(p) __builtin_bswap16(*(const UInt16 *)(const Byte *)(p)) */
  203. #define GetBe32(p) __builtin_bswap32(*(const UInt32 *)(const Byte *)(p))
  204. #define GetBe64(p) __builtin_bswap64(*(const UInt64 *)(const Byte *)(p))
  205. #define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = __builtin_bswap32(v)
  206. #else
  207. #define GetBe32(p) ( \
  208. ((UInt32)((const Byte *)(p))[0] << 24) | \
  209. ((UInt32)((const Byte *)(p))[1] << 16) | \
  210. ((UInt32)((const Byte *)(p))[2] << 8) | \
  211. ((const Byte *)(p))[3] )
  212. #define GetBe64(p) (((UInt64)GetBe32(p) << 32) | GetBe32(((const Byte *)(p)) + 4))
  213. #define SetBe32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \
  214. _ppp_[0] = (Byte)(_vvv_ >> 24); \
  215. _ppp_[1] = (Byte)(_vvv_ >> 16); \
  216. _ppp_[2] = (Byte)(_vvv_ >> 8); \
  217. _ppp_[3] = (Byte)_vvv_; }
  218. #endif
  219. #ifndef GetBe16
  220. #define GetBe16(p) ( (UInt16) ( \
  221. ((UInt16)((const Byte *)(p))[0] << 8) | \
  222. ((const Byte *)(p))[1] ))
  223. #endif
  224. #ifdef MY_CPU_X86_OR_AMD64
  225. typedef struct
  226. {
  227. UInt32 maxFunc;
  228. UInt32 vendor[3];
  229. UInt32 ver;
  230. UInt32 b;
  231. UInt32 c;
  232. UInt32 d;
  233. } Cx86cpuid;
  234. enum
  235. {
  236. CPU_FIRM_INTEL,
  237. CPU_FIRM_AMD,
  238. CPU_FIRM_VIA
  239. };
  240. void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d);
  241. BoolInt x86cpuid_CheckAndRead(Cx86cpuid *p);
  242. int x86cpuid_GetFirm(const Cx86cpuid *p);
  243. #define x86cpuid_GetFamily(ver) (((ver >> 16) & 0xFF0) | ((ver >> 8) & 0xF))
  244. #define x86cpuid_GetModel(ver) (((ver >> 12) & 0xF0) | ((ver >> 4) & 0xF))
  245. #define x86cpuid_GetStepping(ver) (ver & 0xF)
  246. BoolInt CPU_Is_InOrder();
  247. BoolInt CPU_Is_Aes_Supported();
  248. BoolInt CPU_IsSupported_PageGB();
  249. #endif
  250. EXTERN_C_END
  251. #endif