test_hashing.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401
  1. import numpy as np
  2. import pytest
  3. import pandas as pd
  4. from pandas import (
  5. DataFrame,
  6. Index,
  7. MultiIndex,
  8. Series,
  9. )
  10. import pandas._testing as tm
  11. from pandas.core.util.hashing import hash_tuples
  12. from pandas.util import (
  13. hash_array,
  14. hash_pandas_object,
  15. )
  16. @pytest.fixture(
  17. params=[
  18. Series([1, 2, 3] * 3, dtype="int32"),
  19. Series([None, 2.5, 3.5] * 3, dtype="float32"),
  20. Series(["a", "b", "c"] * 3, dtype="category"),
  21. Series(["d", "e", "f"] * 3),
  22. Series([True, False, True] * 3),
  23. Series(pd.date_range("20130101", periods=9)),
  24. Series(pd.date_range("20130101", periods=9, tz="US/Eastern")),
  25. Series(pd.timedelta_range("2000", periods=9)),
  26. ]
  27. )
  28. def series(request):
  29. return request.param
  30. @pytest.fixture(params=[True, False])
  31. def index(request):
  32. return request.param
  33. def test_consistency():
  34. # Check that our hash doesn't change because of a mistake
  35. # in the actual code; this is the ground truth.
  36. result = hash_pandas_object(Index(["foo", "bar", "baz"]))
  37. expected = Series(
  38. np.array(
  39. [3600424527151052760, 1374399572096150070, 477881037637427054],
  40. dtype="uint64",
  41. ),
  42. index=["foo", "bar", "baz"],
  43. )
  44. tm.assert_series_equal(result, expected)
  45. def test_hash_array(series):
  46. arr = series.values
  47. tm.assert_numpy_array_equal(hash_array(arr), hash_array(arr))
  48. @pytest.mark.parametrize("dtype", ["U", object])
  49. def test_hash_array_mixed(dtype):
  50. result1 = hash_array(np.array(["3", "4", "All"]))
  51. result2 = hash_array(np.array([3, 4, "All"], dtype=dtype))
  52. tm.assert_numpy_array_equal(result1, result2)
  53. @pytest.mark.parametrize("val", [5, "foo", pd.Timestamp("20130101")])
  54. def test_hash_array_errors(val):
  55. msg = "must pass a ndarray-like"
  56. with pytest.raises(TypeError, match=msg):
  57. hash_array(val)
  58. def test_hash_array_index_exception():
  59. # GH42003 TypeError instead of AttributeError
  60. obj = pd.DatetimeIndex(["2018-10-28 01:20:00"], tz="Europe/Berlin")
  61. msg = "Use hash_pandas_object instead"
  62. with pytest.raises(TypeError, match=msg):
  63. hash_array(obj)
  64. def test_hash_tuples():
  65. tuples = [(1, "one"), (1, "two"), (2, "one")]
  66. result = hash_tuples(tuples)
  67. expected = hash_pandas_object(MultiIndex.from_tuples(tuples)).values
  68. tm.assert_numpy_array_equal(result, expected)
  69. # We only need to support MultiIndex and list-of-tuples
  70. msg = "|".join(["object is not iterable", "zip argument #1 must support iteration"])
  71. with pytest.raises(TypeError, match=msg):
  72. hash_tuples(tuples[0])
  73. @pytest.mark.parametrize("val", [5, "foo", pd.Timestamp("20130101")])
  74. def test_hash_tuples_err(val):
  75. msg = "must be convertible to a list-of-tuples"
  76. with pytest.raises(TypeError, match=msg):
  77. hash_tuples(val)
  78. def test_multiindex_unique():
  79. mi = MultiIndex.from_tuples([(118, 472), (236, 118), (51, 204), (102, 51)])
  80. assert mi.is_unique is True
  81. result = hash_pandas_object(mi)
  82. assert result.is_unique is True
  83. def test_multiindex_objects():
  84. mi = MultiIndex(
  85. levels=[["b", "d", "a"], [1, 2, 3]],
  86. codes=[[0, 1, 0, 2], [2, 0, 0, 1]],
  87. names=["col1", "col2"],
  88. )
  89. recons = mi._sort_levels_monotonic()
  90. # These are equal.
  91. assert mi.equals(recons)
  92. assert Index(mi.values).equals(Index(recons.values))
  93. @pytest.mark.parametrize(
  94. "obj",
  95. [
  96. Series([1, 2, 3]),
  97. Series([1.0, 1.5, 3.2]),
  98. Series([1.0, 1.5, np.nan]),
  99. Series([1.0, 1.5, 3.2], index=[1.5, 1.1, 3.3]),
  100. Series(["a", "b", "c"]),
  101. Series(["a", np.nan, "c"]),
  102. Series(["a", None, "c"]),
  103. Series([True, False, True]),
  104. Series(dtype=object),
  105. DataFrame({"x": ["a", "b", "c"], "y": [1, 2, 3]}),
  106. DataFrame(),
  107. tm.makeMissingDataframe(),
  108. tm.makeMixedDataFrame(),
  109. tm.makeTimeDataFrame(),
  110. tm.makeTimeSeries(),
  111. Series(tm.makePeriodIndex()),
  112. Series(pd.date_range("20130101", periods=3, tz="US/Eastern")),
  113. ],
  114. )
  115. def test_hash_pandas_object(obj, index):
  116. a = hash_pandas_object(obj, index=index)
  117. b = hash_pandas_object(obj, index=index)
  118. tm.assert_series_equal(a, b)
  119. @pytest.mark.parametrize(
  120. "obj",
  121. [
  122. Series([1, 2, 3]),
  123. Series([1.0, 1.5, 3.2]),
  124. Series([1.0, 1.5, np.nan]),
  125. Series([1.0, 1.5, 3.2], index=[1.5, 1.1, 3.3]),
  126. Series(["a", "b", "c"]),
  127. Series(["a", np.nan, "c"]),
  128. Series(["a", None, "c"]),
  129. Series([True, False, True]),
  130. DataFrame({"x": ["a", "b", "c"], "y": [1, 2, 3]}),
  131. tm.makeMissingDataframe(),
  132. tm.makeMixedDataFrame(),
  133. tm.makeTimeDataFrame(),
  134. tm.makeTimeSeries(),
  135. Series(tm.makePeriodIndex()),
  136. Series(pd.date_range("20130101", periods=3, tz="US/Eastern")),
  137. ],
  138. )
  139. def test_hash_pandas_object_diff_index_non_empty(obj):
  140. a = hash_pandas_object(obj, index=True)
  141. b = hash_pandas_object(obj, index=False)
  142. assert not (a == b).all()
  143. @pytest.mark.parametrize(
  144. "obj",
  145. [
  146. Index([1, 2, 3]),
  147. Index([True, False, True]),
  148. tm.makeTimedeltaIndex(),
  149. tm.makePeriodIndex(),
  150. MultiIndex.from_product(
  151. [range(5), ["foo", "bar", "baz"], pd.date_range("20130101", periods=2)]
  152. ),
  153. MultiIndex.from_product([pd.CategoricalIndex(list("aabc")), range(3)]),
  154. ],
  155. )
  156. def test_hash_pandas_index(obj, index):
  157. a = hash_pandas_object(obj, index=index)
  158. b = hash_pandas_object(obj, index=index)
  159. tm.assert_series_equal(a, b)
  160. def test_hash_pandas_series(series, index):
  161. a = hash_pandas_object(series, index=index)
  162. b = hash_pandas_object(series, index=index)
  163. tm.assert_series_equal(a, b)
  164. def test_hash_pandas_series_diff_index(series):
  165. a = hash_pandas_object(series, index=True)
  166. b = hash_pandas_object(series, index=False)
  167. assert not (a == b).all()
  168. @pytest.mark.parametrize(
  169. "obj", [Series([], dtype="float64"), Series([], dtype="object"), Index([])]
  170. )
  171. def test_hash_pandas_empty_object(obj, index):
  172. # These are by-definition the same with
  173. # or without the index as the data is empty.
  174. a = hash_pandas_object(obj, index=index)
  175. b = hash_pandas_object(obj, index=index)
  176. tm.assert_series_equal(a, b)
  177. @pytest.mark.parametrize(
  178. "s1",
  179. [
  180. Series(["a", "b", "c", "d"]),
  181. Series([1000, 2000, 3000, 4000]),
  182. Series(pd.date_range(0, periods=4)),
  183. ],
  184. )
  185. @pytest.mark.parametrize("categorize", [True, False])
  186. def test_categorical_consistency(s1, categorize):
  187. # see gh-15143
  188. #
  189. # Check that categoricals hash consistent with their values,
  190. # not codes. This should work for categoricals of any dtype.
  191. s2 = s1.astype("category").cat.set_categories(s1)
  192. s3 = s2.cat.set_categories(list(reversed(s1)))
  193. # These should all hash identically.
  194. h1 = hash_pandas_object(s1, categorize=categorize)
  195. h2 = hash_pandas_object(s2, categorize=categorize)
  196. h3 = hash_pandas_object(s3, categorize=categorize)
  197. tm.assert_series_equal(h1, h2)
  198. tm.assert_series_equal(h1, h3)
  199. def test_categorical_with_nan_consistency():
  200. c = pd.Categorical.from_codes(
  201. [-1, 0, 1, 2, 3, 4], categories=pd.date_range("2012-01-01", periods=5, name="B")
  202. )
  203. expected = hash_array(c, categorize=False)
  204. c = pd.Categorical.from_codes([-1, 0], categories=[pd.Timestamp("2012-01-01")])
  205. result = hash_array(c, categorize=False)
  206. assert result[0] in expected
  207. assert result[1] in expected
  208. def test_pandas_errors():
  209. msg = "Unexpected type for hashing"
  210. with pytest.raises(TypeError, match=msg):
  211. hash_pandas_object(pd.Timestamp("20130101"))
  212. def test_hash_keys():
  213. # Using different hash keys, should have
  214. # different hashes for the same data.
  215. #
  216. # This only matters for object dtypes.
  217. obj = Series(list("abc"))
  218. a = hash_pandas_object(obj, hash_key="9876543210123456")
  219. b = hash_pandas_object(obj, hash_key="9876543210123465")
  220. assert (a != b).all()
  221. def test_df_hash_keys():
  222. # DataFrame version of the test_hash_keys.
  223. # https://github.com/pandas-dev/pandas/issues/41404
  224. obj = DataFrame({"x": np.arange(3), "y": list("abc")})
  225. a = hash_pandas_object(obj, hash_key="9876543210123456")
  226. b = hash_pandas_object(obj, hash_key="9876543210123465")
  227. assert (a != b).all()
  228. def test_df_encoding():
  229. # Check that DataFrame recognizes optional encoding.
  230. # https://github.com/pandas-dev/pandas/issues/41404
  231. # https://github.com/pandas-dev/pandas/pull/42049
  232. obj = DataFrame({"x": np.arange(3), "y": list("a+c")})
  233. a = hash_pandas_object(obj, encoding="utf8")
  234. b = hash_pandas_object(obj, encoding="utf7")
  235. # Note that the "+" is encoded as "+-" in utf-7.
  236. assert a[0] == b[0]
  237. assert a[1] != b[1]
  238. assert a[2] == b[2]
  239. def test_invalid_key():
  240. # This only matters for object dtypes.
  241. msg = "key should be a 16-byte string encoded"
  242. with pytest.raises(ValueError, match=msg):
  243. hash_pandas_object(Series(list("abc")), hash_key="foo")
  244. def test_already_encoded(index):
  245. # If already encoded, then ok.
  246. obj = Series(list("abc")).str.encode("utf8")
  247. a = hash_pandas_object(obj, index=index)
  248. b = hash_pandas_object(obj, index=index)
  249. tm.assert_series_equal(a, b)
  250. def test_alternate_encoding(index):
  251. obj = Series(list("abc"))
  252. a = hash_pandas_object(obj, index=index)
  253. b = hash_pandas_object(obj, index=index)
  254. tm.assert_series_equal(a, b)
  255. @pytest.mark.parametrize("l_exp", range(8))
  256. @pytest.mark.parametrize("l_add", [0, 1])
  257. def test_same_len_hash_collisions(l_exp, l_add):
  258. length = 2 ** (l_exp + 8) + l_add
  259. s = tm.rands_array(length, 2)
  260. result = hash_array(s, "utf8")
  261. assert not result[0] == result[1]
  262. def test_hash_collisions():
  263. # Hash collisions are bad.
  264. #
  265. # https://github.com/pandas-dev/pandas/issues/14711#issuecomment-264885726
  266. hashes = [
  267. "Ingrid-9Z9fKIZmkO7i7Cn51Li34pJm44fgX6DYGBNj3VPlOH50m7HnBlPxfIwFMrcNJNMP6PSgLmwWnInciMWrCSAlLEvt7JkJl4IxiMrVbXSa8ZQoVaq5xoQPjltuJEfwdNlO6jo8qRRHvD8sBEBMQASrRa6TsdaPTPCBo3nwIBpE7YzzmyH0vMBhjQZLx1aCT7faSEx7PgFxQhHdKFWROcysamgy9iVj8DO2Fmwg1NNl93rIAqC3mdqfrCxrzfvIY8aJdzin2cHVzy3QUJxZgHvtUtOLxoqnUHsYbNTeq0xcLXpTZEZCxD4PGubIuCNf32c33M7HFsnjWSEjE2yVdWKhmSVodyF8hFYVmhYnMCztQnJrt3O8ZvVRXd5IKwlLexiSp4h888w7SzAIcKgc3g5XQJf6MlSMftDXm9lIsE1mJNiJEv6uY6pgvC3fUPhatlR5JPpVAHNSbSEE73MBzJrhCAbOLXQumyOXigZuPoME7QgJcBalliQol7YZ9", # noqa: E501
  268. "Tim-b9MddTxOWW2AT1Py6vtVbZwGAmYCjbp89p8mxsiFoVX4FyDOF3wFiAkyQTUgwg9sVqVYOZo09Dh1AzhFHbgij52ylF0SEwgzjzHH8TGY8Lypart4p4onnDoDvVMBa0kdthVGKl6K0BDVGzyOXPXKpmnMF1H6rJzqHJ0HywfwS4XYpVwlAkoeNsiicHkJUFdUAhG229INzvIAiJuAHeJDUoyO4DCBqtoZ5TDend6TK7Y914yHlfH3g1WZu5LksKv68VQHJriWFYusW5e6ZZ6dKaMjTwEGuRgdT66iU5nqWTHRH8WSzpXoCFwGcTOwyuqPSe0fTe21DVtJn1FKj9F9nEnR9xOvJUO7E0piCIF4Ad9yAIDY4DBimpsTfKXCu1vdHpKYerzbndfuFe5AhfMduLYZJi5iAw8qKSwR5h86ttXV0Mc0QmXz8dsRvDgxjXSmupPxBggdlqUlC828hXiTPD7am0yETBV0F3bEtvPiNJfremszcV8NcqAoARMe", # noqa: E501
  269. ]
  270. # These should be different.
  271. result1 = hash_array(np.asarray(hashes[0:1], dtype=object), "utf8")
  272. expected1 = np.array([14963968704024874985], dtype=np.uint64)
  273. tm.assert_numpy_array_equal(result1, expected1)
  274. result2 = hash_array(np.asarray(hashes[1:2], dtype=object), "utf8")
  275. expected2 = np.array([16428432627716348016], dtype=np.uint64)
  276. tm.assert_numpy_array_equal(result2, expected2)
  277. result = hash_array(np.asarray(hashes, dtype=object), "utf8")
  278. tm.assert_numpy_array_equal(result, np.concatenate([expected1, expected2], axis=0))
  279. @pytest.mark.parametrize(
  280. "data, result_data",
  281. [
  282. [[tuple("1"), tuple("2")], [10345501319357378243, 8331063931016360761]],
  283. [[(1,), (2,)], [9408946347443669104, 3278256261030523334]],
  284. ],
  285. )
  286. def test_hash_with_tuple(data, result_data):
  287. # GH#28969 array containing a tuple raises on call to arr.astype(str)
  288. # apparently a numpy bug github.com/numpy/numpy/issues/9441
  289. df = DataFrame({"data": data})
  290. result = hash_pandas_object(df)
  291. expected = Series(result_data, dtype=np.uint64)
  292. tm.assert_series_equal(result, expected)
  293. def test_hashable_tuple_args():
  294. # require that the elements of such tuples are themselves hashable
  295. df3 = DataFrame(
  296. {
  297. "data": [
  298. (
  299. 1,
  300. [],
  301. ),
  302. (
  303. 2,
  304. {},
  305. ),
  306. ]
  307. }
  308. )
  309. with pytest.raises(TypeError, match="unhashable type: 'list'"):
  310. hash_pandas_object(df3)
  311. def test_hash_object_none_key():
  312. # https://github.com/pandas-dev/pandas/issues/30887
  313. result = pd.util.hash_pandas_object(Series(["a", "b"]), hash_key=None)
  314. expected = Series([4578374827886788867, 17338122309987883691], dtype="uint64")
  315. tm.assert_series_equal(result, expected)