test_duplicates.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339
  1. from itertools import product
  2. import numpy as np
  3. import pytest
  4. from pandas._libs import hashtable
  5. from pandas import (
  6. NA,
  7. DatetimeIndex,
  8. MultiIndex,
  9. Series,
  10. )
  11. import pandas._testing as tm
  12. @pytest.mark.parametrize("names", [None, ["first", "second"]])
  13. def test_unique(names):
  14. mi = MultiIndex.from_arrays([[1, 2, 1, 2], [1, 1, 1, 2]], names=names)
  15. res = mi.unique()
  16. exp = MultiIndex.from_arrays([[1, 2, 2], [1, 1, 2]], names=mi.names)
  17. tm.assert_index_equal(res, exp)
  18. mi = MultiIndex.from_arrays([list("aaaa"), list("abab")], names=names)
  19. res = mi.unique()
  20. exp = MultiIndex.from_arrays([list("aa"), list("ab")], names=mi.names)
  21. tm.assert_index_equal(res, exp)
  22. mi = MultiIndex.from_arrays([list("aaaa"), list("aaaa")], names=names)
  23. res = mi.unique()
  24. exp = MultiIndex.from_arrays([["a"], ["a"]], names=mi.names)
  25. tm.assert_index_equal(res, exp)
  26. # GH #20568 - empty MI
  27. mi = MultiIndex.from_arrays([[], []], names=names)
  28. res = mi.unique()
  29. tm.assert_index_equal(mi, res)
  30. def test_unique_datetimelike():
  31. idx1 = DatetimeIndex(
  32. ["2015-01-01", "2015-01-01", "2015-01-01", "2015-01-01", "NaT", "NaT"]
  33. )
  34. idx2 = DatetimeIndex(
  35. ["2015-01-01", "2015-01-01", "2015-01-02", "2015-01-02", "NaT", "2015-01-01"],
  36. tz="Asia/Tokyo",
  37. )
  38. result = MultiIndex.from_arrays([idx1, idx2]).unique()
  39. eidx1 = DatetimeIndex(["2015-01-01", "2015-01-01", "NaT", "NaT"])
  40. eidx2 = DatetimeIndex(
  41. ["2015-01-01", "2015-01-02", "NaT", "2015-01-01"], tz="Asia/Tokyo"
  42. )
  43. exp = MultiIndex.from_arrays([eidx1, eidx2])
  44. tm.assert_index_equal(result, exp)
  45. @pytest.mark.parametrize("level", [0, "first", 1, "second"])
  46. def test_unique_level(idx, level):
  47. # GH #17896 - with level= argument
  48. result = idx.unique(level=level)
  49. expected = idx.get_level_values(level).unique()
  50. tm.assert_index_equal(result, expected)
  51. # With already unique level
  52. mi = MultiIndex.from_arrays([[1, 3, 2, 4], [1, 3, 2, 5]], names=["first", "second"])
  53. result = mi.unique(level=level)
  54. expected = mi.get_level_values(level)
  55. tm.assert_index_equal(result, expected)
  56. # With empty MI
  57. mi = MultiIndex.from_arrays([[], []], names=["first", "second"])
  58. result = mi.unique(level=level)
  59. expected = mi.get_level_values(level)
  60. tm.assert_index_equal(result, expected)
  61. def test_duplicate_multiindex_codes():
  62. # GH 17464
  63. # Make sure that a MultiIndex with duplicate levels throws a ValueError
  64. msg = r"Level values must be unique: \[[A', ]+\] on level 0"
  65. with pytest.raises(ValueError, match=msg):
  66. mi = MultiIndex([["A"] * 10, range(10)], [[0] * 10, range(10)])
  67. # And that using set_levels with duplicate levels fails
  68. mi = MultiIndex.from_arrays([["A", "A", "B", "B", "B"], [1, 2, 1, 2, 3]])
  69. msg = r"Level values must be unique: \[[AB', ]+\] on level 0"
  70. with pytest.raises(ValueError, match=msg):
  71. mi.set_levels([["A", "B", "A", "A", "B"], [2, 1, 3, -2, 5]])
  72. @pytest.mark.parametrize("names", [["a", "b", "a"], [1, 1, 2], [1, "a", 1]])
  73. def test_duplicate_level_names(names):
  74. # GH18872, GH19029
  75. mi = MultiIndex.from_product([[0, 1]] * 3, names=names)
  76. assert mi.names == names
  77. # With .rename()
  78. mi = MultiIndex.from_product([[0, 1]] * 3)
  79. mi = mi.rename(names)
  80. assert mi.names == names
  81. # With .rename(., level=)
  82. mi.rename(names[1], level=1, inplace=True)
  83. mi = mi.rename([names[0], names[2]], level=[0, 2])
  84. assert mi.names == names
  85. def test_duplicate_meta_data():
  86. # GH 10115
  87. mi = MultiIndex(
  88. levels=[[0, 1], [0, 1, 2]], codes=[[0, 0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 0, 1, 2]]
  89. )
  90. for idx in [
  91. mi,
  92. mi.set_names([None, None]),
  93. mi.set_names([None, "Num"]),
  94. mi.set_names(["Upper", "Num"]),
  95. ]:
  96. assert idx.has_duplicates
  97. assert idx.drop_duplicates().names == idx.names
  98. def test_has_duplicates(idx, idx_dup):
  99. # see fixtures
  100. assert idx.is_unique is True
  101. assert idx.has_duplicates is False
  102. assert idx_dup.is_unique is False
  103. assert idx_dup.has_duplicates is True
  104. mi = MultiIndex(
  105. levels=[[0, 1], [0, 1, 2]], codes=[[0, 0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 0, 1, 2]]
  106. )
  107. assert mi.is_unique is False
  108. assert mi.has_duplicates is True
  109. # single instance of NaN
  110. mi_nan = MultiIndex(
  111. levels=[["a", "b"], [0, 1]], codes=[[-1, 0, 0, 1, 1], [-1, 0, 1, 0, 1]]
  112. )
  113. assert mi_nan.is_unique is True
  114. assert mi_nan.has_duplicates is False
  115. # multiple instances of NaN
  116. mi_nan_dup = MultiIndex(
  117. levels=[["a", "b"], [0, 1]], codes=[[-1, -1, 0, 0, 1, 1], [-1, -1, 0, 1, 0, 1]]
  118. )
  119. assert mi_nan_dup.is_unique is False
  120. assert mi_nan_dup.has_duplicates is True
  121. def test_has_duplicates_from_tuples():
  122. # GH 9075
  123. t = [
  124. ("x", "out", "z", 5, "y", "in", "z", 169),
  125. ("x", "out", "z", 7, "y", "in", "z", 119),
  126. ("x", "out", "z", 9, "y", "in", "z", 135),
  127. ("x", "out", "z", 13, "y", "in", "z", 145),
  128. ("x", "out", "z", 14, "y", "in", "z", 158),
  129. ("x", "out", "z", 16, "y", "in", "z", 122),
  130. ("x", "out", "z", 17, "y", "in", "z", 160),
  131. ("x", "out", "z", 18, "y", "in", "z", 180),
  132. ("x", "out", "z", 20, "y", "in", "z", 143),
  133. ("x", "out", "z", 21, "y", "in", "z", 128),
  134. ("x", "out", "z", 22, "y", "in", "z", 129),
  135. ("x", "out", "z", 25, "y", "in", "z", 111),
  136. ("x", "out", "z", 28, "y", "in", "z", 114),
  137. ("x", "out", "z", 29, "y", "in", "z", 121),
  138. ("x", "out", "z", 31, "y", "in", "z", 126),
  139. ("x", "out", "z", 32, "y", "in", "z", 155),
  140. ("x", "out", "z", 33, "y", "in", "z", 123),
  141. ("x", "out", "z", 12, "y", "in", "z", 144),
  142. ]
  143. mi = MultiIndex.from_tuples(t)
  144. assert not mi.has_duplicates
  145. @pytest.mark.parametrize("nlevels", [4, 8])
  146. @pytest.mark.parametrize("with_nulls", [True, False])
  147. def test_has_duplicates_overflow(nlevels, with_nulls):
  148. # handle int64 overflow if possible
  149. # no overflow with 4
  150. # overflow possible with 8
  151. codes = np.tile(np.arange(500), 2)
  152. level = np.arange(500)
  153. if with_nulls: # inject some null values
  154. codes[500] = -1 # common nan value
  155. codes = [codes.copy() for i in range(nlevels)]
  156. for i in range(nlevels):
  157. codes[i][500 + i - nlevels // 2] = -1
  158. codes += [np.array([-1, 1]).repeat(500)]
  159. else:
  160. codes = [codes] * nlevels + [np.arange(2).repeat(500)]
  161. levels = [level] * nlevels + [[0, 1]]
  162. # no dups
  163. mi = MultiIndex(levels=levels, codes=codes)
  164. assert not mi.has_duplicates
  165. # with a dup
  166. if with_nulls:
  167. def f(a):
  168. return np.insert(a, 1000, a[0])
  169. codes = list(map(f, codes))
  170. mi = MultiIndex(levels=levels, codes=codes)
  171. else:
  172. values = mi.values.tolist()
  173. mi = MultiIndex.from_tuples(values + [values[0]])
  174. assert mi.has_duplicates
  175. @pytest.mark.parametrize(
  176. "keep, expected",
  177. [
  178. ("first", np.array([False, False, False, True, True, False])),
  179. ("last", np.array([False, True, True, False, False, False])),
  180. (False, np.array([False, True, True, True, True, False])),
  181. ],
  182. )
  183. def test_duplicated(idx_dup, keep, expected):
  184. result = idx_dup.duplicated(keep=keep)
  185. tm.assert_numpy_array_equal(result, expected)
  186. @pytest.mark.arm_slow
  187. def test_duplicated_large(keep):
  188. # GH 9125
  189. n, k = 200, 5000
  190. levels = [np.arange(n), tm.makeStringIndex(n), 1000 + np.arange(n)]
  191. codes = [np.random.choice(n, k * n) for lev in levels]
  192. mi = MultiIndex(levels=levels, codes=codes)
  193. result = mi.duplicated(keep=keep)
  194. expected = hashtable.duplicated(mi.values, keep=keep)
  195. tm.assert_numpy_array_equal(result, expected)
  196. def test_duplicated2():
  197. # TODO: more informative test name
  198. # GH5873
  199. for a in [101, 102]:
  200. mi = MultiIndex.from_arrays([[101, a], [3.5, np.nan]])
  201. assert not mi.has_duplicates
  202. tm.assert_numpy_array_equal(mi.duplicated(), np.zeros(2, dtype="bool"))
  203. for n in range(1, 6): # 1st level shape
  204. for m in range(1, 5): # 2nd level shape
  205. # all possible unique combinations, including nan
  206. codes = product(range(-1, n), range(-1, m))
  207. mi = MultiIndex(
  208. levels=[list("abcde")[:n], list("WXYZ")[:m]],
  209. codes=np.random.permutation(list(codes)).T,
  210. )
  211. assert len(mi) == (n + 1) * (m + 1)
  212. assert not mi.has_duplicates
  213. tm.assert_numpy_array_equal(
  214. mi.duplicated(), np.zeros(len(mi), dtype="bool")
  215. )
  216. def test_duplicated_drop_duplicates():
  217. # GH#4060
  218. idx = MultiIndex.from_arrays(([1, 2, 3, 1, 2, 3], [1, 1, 1, 1, 2, 2]))
  219. expected = np.array([False, False, False, True, False, False], dtype=bool)
  220. duplicated = idx.duplicated()
  221. tm.assert_numpy_array_equal(duplicated, expected)
  222. assert duplicated.dtype == bool
  223. expected = MultiIndex.from_arrays(([1, 2, 3, 2, 3], [1, 1, 1, 2, 2]))
  224. tm.assert_index_equal(idx.drop_duplicates(), expected)
  225. expected = np.array([True, False, False, False, False, False])
  226. duplicated = idx.duplicated(keep="last")
  227. tm.assert_numpy_array_equal(duplicated, expected)
  228. assert duplicated.dtype == bool
  229. expected = MultiIndex.from_arrays(([2, 3, 1, 2, 3], [1, 1, 1, 2, 2]))
  230. tm.assert_index_equal(idx.drop_duplicates(keep="last"), expected)
  231. expected = np.array([True, False, False, True, False, False])
  232. duplicated = idx.duplicated(keep=False)
  233. tm.assert_numpy_array_equal(duplicated, expected)
  234. assert duplicated.dtype == bool
  235. expected = MultiIndex.from_arrays(([2, 3, 2, 3], [1, 1, 2, 2]))
  236. tm.assert_index_equal(idx.drop_duplicates(keep=False), expected)
  237. @pytest.mark.parametrize(
  238. "dtype",
  239. [
  240. np.complex64,
  241. np.complex128,
  242. ],
  243. )
  244. def test_duplicated_series_complex_numbers(dtype):
  245. # GH 17927
  246. expected = Series(
  247. [False, False, False, True, False, False, False, True, False, True],
  248. dtype=bool,
  249. )
  250. result = Series(
  251. [
  252. np.nan + np.nan * 1j,
  253. 0,
  254. 1j,
  255. 1j,
  256. 1,
  257. 1 + 1j,
  258. 1 + 2j,
  259. 1 + 1j,
  260. np.nan,
  261. np.nan + np.nan * 1j,
  262. ],
  263. dtype=dtype,
  264. ).duplicated()
  265. tm.assert_series_equal(result, expected)
  266. def test_midx_unique_ea_dtype():
  267. # GH#48335
  268. vals_a = Series([1, 2, NA, NA], dtype="Int64")
  269. vals_b = np.array([1, 2, 3, 3])
  270. midx = MultiIndex.from_arrays([vals_a, vals_b], names=["a", "b"])
  271. result = midx.unique()
  272. exp_vals_a = Series([1, 2, NA], dtype="Int64")
  273. exp_vals_b = np.array([1, 2, 3])
  274. expected = MultiIndex.from_arrays([exp_vals_a, exp_vals_b], names=["a", "b"])
  275. tm.assert_index_equal(result, expected)