test_concat.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787
  1. from collections import (
  2. abc,
  3. deque,
  4. )
  5. from datetime import datetime
  6. from decimal import Decimal
  7. from typing import Iterator
  8. from warnings import (
  9. catch_warnings,
  10. simplefilter,
  11. )
  12. import numpy as np
  13. import pytest
  14. from pandas.errors import (
  15. InvalidIndexError,
  16. PerformanceWarning,
  17. )
  18. import pandas.util._test_decorators as td
  19. import pandas as pd
  20. from pandas import (
  21. DataFrame,
  22. Index,
  23. MultiIndex,
  24. PeriodIndex,
  25. Series,
  26. concat,
  27. date_range,
  28. )
  29. import pandas._testing as tm
  30. from pandas.core.arrays import SparseArray
  31. from pandas.tests.extension.decimal import to_decimal
  32. class TestConcatenate:
  33. def test_append_concat(self):
  34. # GH#1815
  35. d1 = date_range("12/31/1990", "12/31/1999", freq="A-DEC")
  36. d2 = date_range("12/31/2000", "12/31/2009", freq="A-DEC")
  37. s1 = Series(np.random.randn(10), d1)
  38. s2 = Series(np.random.randn(10), d2)
  39. s1 = s1.to_period()
  40. s2 = s2.to_period()
  41. # drops index
  42. result = concat([s1, s2])
  43. assert isinstance(result.index, PeriodIndex)
  44. assert result.index[0] == s1.index[0]
  45. def test_concat_copy(self, using_array_manager, using_copy_on_write):
  46. df = DataFrame(np.random.randn(4, 3))
  47. df2 = DataFrame(np.random.randint(0, 10, size=4).reshape(4, 1))
  48. df3 = DataFrame({5: "foo"}, index=range(4))
  49. # These are actual copies.
  50. result = concat([df, df2, df3], axis=1, copy=True)
  51. if not using_copy_on_write:
  52. for arr in result._mgr.arrays:
  53. assert arr.base is None
  54. else:
  55. for arr in result._mgr.arrays:
  56. assert arr.base is not None
  57. # These are the same.
  58. result = concat([df, df2, df3], axis=1, copy=False)
  59. for arr in result._mgr.arrays:
  60. if arr.dtype.kind == "f":
  61. assert arr.base is df._mgr.arrays[0].base
  62. elif arr.dtype.kind in ["i", "u"]:
  63. assert arr.base is df2._mgr.arrays[0].base
  64. elif arr.dtype == object:
  65. if using_array_manager:
  66. # we get the same array object, which has no base
  67. assert arr is df3._mgr.arrays[0]
  68. else:
  69. assert arr.base is not None
  70. # Float block was consolidated.
  71. df4 = DataFrame(np.random.randn(4, 1))
  72. result = concat([df, df2, df3, df4], axis=1, copy=False)
  73. for arr in result._mgr.arrays:
  74. if arr.dtype.kind == "f":
  75. if using_array_manager or using_copy_on_write:
  76. # this is a view on some array in either df or df4
  77. assert any(
  78. np.shares_memory(arr, other)
  79. for other in df._mgr.arrays + df4._mgr.arrays
  80. )
  81. else:
  82. # the block was consolidated, so we got a copy anyway
  83. assert arr.base is None
  84. elif arr.dtype.kind in ["i", "u"]:
  85. assert arr.base is df2._mgr.arrays[0].base
  86. elif arr.dtype == object:
  87. # this is a view on df3
  88. assert any(np.shares_memory(arr, other) for other in df3._mgr.arrays)
  89. def test_concat_with_group_keys(self):
  90. # axis=0
  91. df = DataFrame(np.random.randn(3, 4))
  92. df2 = DataFrame(np.random.randn(4, 4))
  93. result = concat([df, df2], keys=[0, 1])
  94. exp_index = MultiIndex.from_arrays(
  95. [[0, 0, 0, 1, 1, 1, 1], [0, 1, 2, 0, 1, 2, 3]]
  96. )
  97. expected = DataFrame(np.r_[df.values, df2.values], index=exp_index)
  98. tm.assert_frame_equal(result, expected)
  99. result = concat([df, df], keys=[0, 1])
  100. exp_index2 = MultiIndex.from_arrays([[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]])
  101. expected = DataFrame(np.r_[df.values, df.values], index=exp_index2)
  102. tm.assert_frame_equal(result, expected)
  103. # axis=1
  104. df = DataFrame(np.random.randn(4, 3))
  105. df2 = DataFrame(np.random.randn(4, 4))
  106. result = concat([df, df2], keys=[0, 1], axis=1)
  107. expected = DataFrame(np.c_[df.values, df2.values], columns=exp_index)
  108. tm.assert_frame_equal(result, expected)
  109. result = concat([df, df], keys=[0, 1], axis=1)
  110. expected = DataFrame(np.c_[df.values, df.values], columns=exp_index2)
  111. tm.assert_frame_equal(result, expected)
  112. def test_concat_keys_specific_levels(self):
  113. df = DataFrame(np.random.randn(10, 4))
  114. pieces = [df.iloc[:, [0, 1]], df.iloc[:, [2]], df.iloc[:, [3]]]
  115. level = ["three", "two", "one", "zero"]
  116. result = concat(
  117. pieces,
  118. axis=1,
  119. keys=["one", "two", "three"],
  120. levels=[level],
  121. names=["group_key"],
  122. )
  123. tm.assert_index_equal(result.columns.levels[0], Index(level, name="group_key"))
  124. tm.assert_index_equal(result.columns.levels[1], Index([0, 1, 2, 3]))
  125. assert result.columns.names == ["group_key", None]
  126. @pytest.mark.parametrize("mapping", ["mapping", "dict"])
  127. def test_concat_mapping(self, mapping, non_dict_mapping_subclass):
  128. constructor = dict if mapping == "dict" else non_dict_mapping_subclass
  129. frames = constructor(
  130. {
  131. "foo": DataFrame(np.random.randn(4, 3)),
  132. "bar": DataFrame(np.random.randn(4, 3)),
  133. "baz": DataFrame(np.random.randn(4, 3)),
  134. "qux": DataFrame(np.random.randn(4, 3)),
  135. }
  136. )
  137. sorted_keys = list(frames.keys())
  138. result = concat(frames)
  139. expected = concat([frames[k] for k in sorted_keys], keys=sorted_keys)
  140. tm.assert_frame_equal(result, expected)
  141. result = concat(frames, axis=1)
  142. expected = concat([frames[k] for k in sorted_keys], keys=sorted_keys, axis=1)
  143. tm.assert_frame_equal(result, expected)
  144. keys = ["baz", "foo", "bar"]
  145. result = concat(frames, keys=keys)
  146. expected = concat([frames[k] for k in keys], keys=keys)
  147. tm.assert_frame_equal(result, expected)
  148. def test_concat_keys_and_levels(self):
  149. df = DataFrame(np.random.randn(1, 3))
  150. df2 = DataFrame(np.random.randn(1, 4))
  151. levels = [["foo", "baz"], ["one", "two"]]
  152. names = ["first", "second"]
  153. result = concat(
  154. [df, df2, df, df2],
  155. keys=[("foo", "one"), ("foo", "two"), ("baz", "one"), ("baz", "two")],
  156. levels=levels,
  157. names=names,
  158. )
  159. expected = concat([df, df2, df, df2])
  160. exp_index = MultiIndex(
  161. levels=levels + [[0]],
  162. codes=[[0, 0, 1, 1], [0, 1, 0, 1], [0, 0, 0, 0]],
  163. names=names + [None],
  164. )
  165. expected.index = exp_index
  166. tm.assert_frame_equal(result, expected)
  167. # no names
  168. result = concat(
  169. [df, df2, df, df2],
  170. keys=[("foo", "one"), ("foo", "two"), ("baz", "one"), ("baz", "two")],
  171. levels=levels,
  172. )
  173. assert result.index.names == (None,) * 3
  174. # no levels
  175. result = concat(
  176. [df, df2, df, df2],
  177. keys=[("foo", "one"), ("foo", "two"), ("baz", "one"), ("baz", "two")],
  178. names=["first", "second"],
  179. )
  180. assert result.index.names == ("first", "second", None)
  181. tm.assert_index_equal(
  182. result.index.levels[0], Index(["baz", "foo"], name="first")
  183. )
  184. def test_concat_keys_levels_no_overlap(self):
  185. # GH #1406
  186. df = DataFrame(np.random.randn(1, 3), index=["a"])
  187. df2 = DataFrame(np.random.randn(1, 4), index=["b"])
  188. msg = "Values not found in passed level"
  189. with pytest.raises(ValueError, match=msg):
  190. concat([df, df], keys=["one", "two"], levels=[["foo", "bar", "baz"]])
  191. msg = "Key one not in level"
  192. with pytest.raises(ValueError, match=msg):
  193. concat([df, df2], keys=["one", "two"], levels=[["foo", "bar", "baz"]])
  194. def test_crossed_dtypes_weird_corner(self):
  195. columns = ["A", "B", "C", "D"]
  196. df1 = DataFrame(
  197. {
  198. "A": np.array([1, 2, 3, 4], dtype="f8"),
  199. "B": np.array([1, 2, 3, 4], dtype="i8"),
  200. "C": np.array([1, 2, 3, 4], dtype="f8"),
  201. "D": np.array([1, 2, 3, 4], dtype="i8"),
  202. },
  203. columns=columns,
  204. )
  205. df2 = DataFrame(
  206. {
  207. "A": np.array([1, 2, 3, 4], dtype="i8"),
  208. "B": np.array([1, 2, 3, 4], dtype="f8"),
  209. "C": np.array([1, 2, 3, 4], dtype="i8"),
  210. "D": np.array([1, 2, 3, 4], dtype="f8"),
  211. },
  212. columns=columns,
  213. )
  214. appended = concat([df1, df2], ignore_index=True)
  215. expected = DataFrame(
  216. np.concatenate([df1.values, df2.values], axis=0), columns=columns
  217. )
  218. tm.assert_frame_equal(appended, expected)
  219. df = DataFrame(np.random.randn(1, 3), index=["a"])
  220. df2 = DataFrame(np.random.randn(1, 4), index=["b"])
  221. result = concat([df, df2], keys=["one", "two"], names=["first", "second"])
  222. assert result.index.names == ("first", "second")
  223. def test_with_mixed_tuples(self, sort):
  224. # 10697
  225. # columns have mixed tuples, so handle properly
  226. df1 = DataFrame({"A": "foo", ("B", 1): "bar"}, index=range(2))
  227. df2 = DataFrame({"B": "foo", ("B", 1): "bar"}, index=range(2))
  228. # it works
  229. concat([df1, df2], sort=sort)
  230. def test_concat_mixed_objs(self):
  231. # concat mixed series/frames
  232. # G2385
  233. # axis 1
  234. index = date_range("01-Jan-2013", periods=10, freq="H")
  235. arr = np.arange(10, dtype="int64")
  236. s1 = Series(arr, index=index)
  237. s2 = Series(arr, index=index)
  238. df = DataFrame(arr.reshape(-1, 1), index=index)
  239. expected = DataFrame(
  240. np.repeat(arr, 2).reshape(-1, 2), index=index, columns=[0, 0]
  241. )
  242. result = concat([df, df], axis=1)
  243. tm.assert_frame_equal(result, expected)
  244. expected = DataFrame(
  245. np.repeat(arr, 2).reshape(-1, 2), index=index, columns=[0, 1]
  246. )
  247. result = concat([s1, s2], axis=1)
  248. tm.assert_frame_equal(result, expected)
  249. expected = DataFrame(
  250. np.repeat(arr, 3).reshape(-1, 3), index=index, columns=[0, 1, 2]
  251. )
  252. result = concat([s1, s2, s1], axis=1)
  253. tm.assert_frame_equal(result, expected)
  254. expected = DataFrame(
  255. np.repeat(arr, 5).reshape(-1, 5), index=index, columns=[0, 0, 1, 2, 3]
  256. )
  257. result = concat([s1, df, s2, s2, s1], axis=1)
  258. tm.assert_frame_equal(result, expected)
  259. # with names
  260. s1.name = "foo"
  261. expected = DataFrame(
  262. np.repeat(arr, 3).reshape(-1, 3), index=index, columns=["foo", 0, 0]
  263. )
  264. result = concat([s1, df, s2], axis=1)
  265. tm.assert_frame_equal(result, expected)
  266. s2.name = "bar"
  267. expected = DataFrame(
  268. np.repeat(arr, 3).reshape(-1, 3), index=index, columns=["foo", 0, "bar"]
  269. )
  270. result = concat([s1, df, s2], axis=1)
  271. tm.assert_frame_equal(result, expected)
  272. # ignore index
  273. expected = DataFrame(
  274. np.repeat(arr, 3).reshape(-1, 3), index=index, columns=[0, 1, 2]
  275. )
  276. result = concat([s1, df, s2], axis=1, ignore_index=True)
  277. tm.assert_frame_equal(result, expected)
  278. # axis 0
  279. expected = DataFrame(
  280. np.tile(arr, 3).reshape(-1, 1), index=index.tolist() * 3, columns=[0]
  281. )
  282. result = concat([s1, df, s2])
  283. tm.assert_frame_equal(result, expected)
  284. expected = DataFrame(np.tile(arr, 3).reshape(-1, 1), columns=[0])
  285. result = concat([s1, df, s2], ignore_index=True)
  286. tm.assert_frame_equal(result, expected)
  287. def test_dtype_coerceion(self):
  288. # 12411
  289. df = DataFrame({"date": [pd.Timestamp("20130101").tz_localize("UTC"), pd.NaT]})
  290. result = concat([df.iloc[[0]], df.iloc[[1]]])
  291. tm.assert_series_equal(result.dtypes, df.dtypes)
  292. # 12045
  293. df = DataFrame({"date": [datetime(2012, 1, 1), datetime(1012, 1, 2)]})
  294. result = concat([df.iloc[[0]], df.iloc[[1]]])
  295. tm.assert_series_equal(result.dtypes, df.dtypes)
  296. # 11594
  297. df = DataFrame({"text": ["some words"] + [None] * 9})
  298. result = concat([df.iloc[[0]], df.iloc[[1]]])
  299. tm.assert_series_equal(result.dtypes, df.dtypes)
  300. def test_concat_single_with_key(self):
  301. df = DataFrame(np.random.randn(10, 4))
  302. result = concat([df], keys=["foo"])
  303. expected = concat([df, df], keys=["foo", "bar"])
  304. tm.assert_frame_equal(result, expected[:10])
  305. def test_concat_no_items_raises(self):
  306. with pytest.raises(ValueError, match="No objects to concatenate"):
  307. concat([])
  308. def test_concat_exclude_none(self):
  309. df = DataFrame(np.random.randn(10, 4))
  310. pieces = [df[:5], None, None, df[5:]]
  311. result = concat(pieces)
  312. tm.assert_frame_equal(result, df)
  313. with pytest.raises(ValueError, match="All objects passed were None"):
  314. concat([None, None])
  315. def test_concat_keys_with_none(self):
  316. # #1649
  317. df0 = DataFrame([[10, 20, 30], [10, 20, 30], [10, 20, 30]])
  318. result = concat({"a": None, "b": df0, "c": df0[:2], "d": df0[:1], "e": df0})
  319. expected = concat({"b": df0, "c": df0[:2], "d": df0[:1], "e": df0})
  320. tm.assert_frame_equal(result, expected)
  321. result = concat(
  322. [None, df0, df0[:2], df0[:1], df0], keys=["a", "b", "c", "d", "e"]
  323. )
  324. expected = concat([df0, df0[:2], df0[:1], df0], keys=["b", "c", "d", "e"])
  325. tm.assert_frame_equal(result, expected)
  326. def test_concat_bug_1719(self):
  327. ts1 = tm.makeTimeSeries()
  328. ts2 = tm.makeTimeSeries()[::2]
  329. # to join with union
  330. # these two are of different length!
  331. left = concat([ts1, ts2], join="outer", axis=1)
  332. right = concat([ts2, ts1], join="outer", axis=1)
  333. assert len(left) == len(right)
  334. def test_concat_bug_2972(self):
  335. ts0 = Series(np.zeros(5))
  336. ts1 = Series(np.ones(5))
  337. ts0.name = ts1.name = "same name"
  338. result = concat([ts0, ts1], axis=1)
  339. expected = DataFrame({0: ts0, 1: ts1})
  340. expected.columns = ["same name", "same name"]
  341. tm.assert_frame_equal(result, expected)
  342. def test_concat_bug_3602(self):
  343. # GH 3602, duplicate columns
  344. df1 = DataFrame(
  345. {
  346. "firmNo": [0, 0, 0, 0],
  347. "prc": [6, 6, 6, 6],
  348. "stringvar": ["rrr", "rrr", "rrr", "rrr"],
  349. }
  350. )
  351. df2 = DataFrame(
  352. {"C": [9, 10, 11, 12], "misc": [1, 2, 3, 4], "prc": [6, 6, 6, 6]}
  353. )
  354. expected = DataFrame(
  355. [
  356. [0, 6, "rrr", 9, 1, 6],
  357. [0, 6, "rrr", 10, 2, 6],
  358. [0, 6, "rrr", 11, 3, 6],
  359. [0, 6, "rrr", 12, 4, 6],
  360. ]
  361. )
  362. expected.columns = ["firmNo", "prc", "stringvar", "C", "misc", "prc"]
  363. result = concat([df1, df2], axis=1)
  364. tm.assert_frame_equal(result, expected)
  365. def test_concat_iterables(self):
  366. # GH8645 check concat works with tuples, list, generators, and weird
  367. # stuff like deque and custom iterables
  368. df1 = DataFrame([1, 2, 3])
  369. df2 = DataFrame([4, 5, 6])
  370. expected = DataFrame([1, 2, 3, 4, 5, 6])
  371. tm.assert_frame_equal(concat((df1, df2), ignore_index=True), expected)
  372. tm.assert_frame_equal(concat([df1, df2], ignore_index=True), expected)
  373. tm.assert_frame_equal(
  374. concat((df for df in (df1, df2)), ignore_index=True), expected
  375. )
  376. tm.assert_frame_equal(concat(deque((df1, df2)), ignore_index=True), expected)
  377. class CustomIterator1:
  378. def __len__(self) -> int:
  379. return 2
  380. def __getitem__(self, index):
  381. try:
  382. return {0: df1, 1: df2}[index]
  383. except KeyError as err:
  384. raise IndexError from err
  385. tm.assert_frame_equal(concat(CustomIterator1(), ignore_index=True), expected)
  386. class CustomIterator2(abc.Iterable):
  387. def __iter__(self) -> Iterator:
  388. yield df1
  389. yield df2
  390. tm.assert_frame_equal(concat(CustomIterator2(), ignore_index=True), expected)
  391. def test_concat_order(self):
  392. # GH 17344, GH#47331
  393. dfs = [DataFrame(index=range(3), columns=["a", 1, None])]
  394. dfs += [DataFrame(index=range(3), columns=[None, 1, "a"]) for _ in range(100)]
  395. result = concat(dfs, sort=True).columns
  396. expected = Index([1, "a", None])
  397. tm.assert_index_equal(result, expected)
  398. def test_concat_different_extension_dtypes_upcasts(self):
  399. a = Series(pd.array([1, 2], dtype="Int64"))
  400. b = Series(to_decimal([1, 2]))
  401. result = concat([a, b], ignore_index=True)
  402. expected = Series([1, 2, Decimal(1), Decimal(2)], dtype=object)
  403. tm.assert_series_equal(result, expected)
  404. def test_concat_ordered_dict(self):
  405. # GH 21510
  406. expected = concat(
  407. [Series(range(3)), Series(range(4))], keys=["First", "Another"]
  408. )
  409. result = concat({"First": Series(range(3)), "Another": Series(range(4))})
  410. tm.assert_series_equal(result, expected)
  411. def test_concat_duplicate_indices_raise(self):
  412. # GH 45888: test raise for concat DataFrames with duplicate indices
  413. # https://github.com/pandas-dev/pandas/issues/36263
  414. df1 = DataFrame(np.random.randn(5), index=[0, 1, 2, 3, 3], columns=["a"])
  415. df2 = DataFrame(np.random.randn(5), index=[0, 1, 2, 2, 4], columns=["b"])
  416. msg = "Reindexing only valid with uniquely valued Index objects"
  417. with pytest.raises(InvalidIndexError, match=msg):
  418. concat([df1, df2], axis=1)
  419. @pytest.mark.parametrize("dt", np.sctypes["float"])
  420. def test_concat_no_unnecessary_upcast(dt, frame_or_series):
  421. # GH 13247
  422. dims = frame_or_series(dtype=object).ndim
  423. dfs = [
  424. frame_or_series(np.array([1], dtype=dt, ndmin=dims)),
  425. frame_or_series(np.array([np.nan], dtype=dt, ndmin=dims)),
  426. frame_or_series(np.array([5], dtype=dt, ndmin=dims)),
  427. ]
  428. x = concat(dfs)
  429. assert x.values.dtype == dt
  430. @pytest.mark.parametrize("pdt", [Series, DataFrame])
  431. @pytest.mark.parametrize("dt", np.sctypes["int"])
  432. def test_concat_will_upcast(dt, pdt):
  433. with catch_warnings(record=True):
  434. dims = pdt().ndim
  435. dfs = [
  436. pdt(np.array([1], dtype=dt, ndmin=dims)),
  437. pdt(np.array([np.nan], ndmin=dims)),
  438. pdt(np.array([5], dtype=dt, ndmin=dims)),
  439. ]
  440. x = concat(dfs)
  441. assert x.values.dtype == "float64"
  442. def test_concat_empty_and_non_empty_frame_regression():
  443. # GH 18178 regression test
  444. df1 = DataFrame({"foo": [1]})
  445. df2 = DataFrame({"foo": []})
  446. expected = DataFrame({"foo": [1.0]})
  447. result = concat([df1, df2])
  448. tm.assert_frame_equal(result, expected)
  449. def test_concat_sparse():
  450. # GH 23557
  451. a = Series(SparseArray([0, 1, 2]))
  452. expected = DataFrame(data=[[0, 0], [1, 1], [2, 2]]).astype(
  453. pd.SparseDtype(np.int64, 0)
  454. )
  455. result = concat([a, a], axis=1)
  456. tm.assert_frame_equal(result, expected)
  457. def test_concat_dense_sparse():
  458. # GH 30668
  459. dtype = pd.SparseDtype(np.float64, None)
  460. a = Series(pd.arrays.SparseArray([1, None]), dtype=dtype)
  461. b = Series([1], dtype=float)
  462. expected = Series(data=[1, None, 1], index=[0, 1, 0]).astype(dtype)
  463. result = concat([a, b], axis=0)
  464. tm.assert_series_equal(result, expected)
  465. @pytest.mark.parametrize("keys", [["e", "f", "f"], ["f", "e", "f"]])
  466. def test_duplicate_keys(keys):
  467. # GH 33654
  468. df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
  469. s1 = Series([7, 8, 9], name="c")
  470. s2 = Series([10, 11, 12], name="d")
  471. result = concat([df, s1, s2], axis=1, keys=keys)
  472. expected_values = [[1, 4, 7, 10], [2, 5, 8, 11], [3, 6, 9, 12]]
  473. expected_columns = MultiIndex.from_tuples(
  474. [(keys[0], "a"), (keys[0], "b"), (keys[1], "c"), (keys[2], "d")]
  475. )
  476. expected = DataFrame(expected_values, columns=expected_columns)
  477. tm.assert_frame_equal(result, expected)
  478. def test_duplicate_keys_same_frame():
  479. # GH 43595
  480. keys = ["e", "e"]
  481. df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
  482. result = concat([df, df], axis=1, keys=keys)
  483. expected_values = [[1, 4, 1, 4], [2, 5, 2, 5], [3, 6, 3, 6]]
  484. expected_columns = MultiIndex.from_tuples(
  485. [(keys[0], "a"), (keys[0], "b"), (keys[1], "a"), (keys[1], "b")]
  486. )
  487. expected = DataFrame(expected_values, columns=expected_columns)
  488. with catch_warnings():
  489. # result.columns not sorted, resulting in performance warning
  490. simplefilter("ignore", PerformanceWarning)
  491. tm.assert_frame_equal(result, expected)
  492. @pytest.mark.parametrize(
  493. "obj",
  494. [
  495. tm.SubclassedDataFrame({"A": np.arange(0, 10)}),
  496. tm.SubclassedSeries(np.arange(0, 10), name="A"),
  497. ],
  498. )
  499. def test_concat_preserves_subclass(obj):
  500. # GH28330 -- preserve subclass
  501. result = concat([obj, obj])
  502. assert isinstance(result, type(obj))
  503. def test_concat_frame_axis0_extension_dtypes():
  504. # preserve extension dtype (through common_dtype mechanism)
  505. df1 = DataFrame({"a": pd.array([1, 2, 3], dtype="Int64")})
  506. df2 = DataFrame({"a": np.array([4, 5, 6])})
  507. result = concat([df1, df2], ignore_index=True)
  508. expected = DataFrame({"a": [1, 2, 3, 4, 5, 6]}, dtype="Int64")
  509. tm.assert_frame_equal(result, expected)
  510. result = concat([df2, df1], ignore_index=True)
  511. expected = DataFrame({"a": [4, 5, 6, 1, 2, 3]}, dtype="Int64")
  512. tm.assert_frame_equal(result, expected)
  513. def test_concat_preserves_extension_int64_dtype():
  514. # GH 24768
  515. df_a = DataFrame({"a": [-1]}, dtype="Int64")
  516. df_b = DataFrame({"b": [1]}, dtype="Int64")
  517. result = concat([df_a, df_b], ignore_index=True)
  518. expected = DataFrame({"a": [-1, None], "b": [None, 1]}, dtype="Int64")
  519. tm.assert_frame_equal(result, expected)
  520. @pytest.mark.parametrize(
  521. "dtype1,dtype2,expected_dtype",
  522. [
  523. ("bool", "bool", "bool"),
  524. ("boolean", "bool", "boolean"),
  525. ("bool", "boolean", "boolean"),
  526. ("boolean", "boolean", "boolean"),
  527. ],
  528. )
  529. def test_concat_bool_types(dtype1, dtype2, expected_dtype):
  530. # GH 42800
  531. ser1 = Series([True, False], dtype=dtype1)
  532. ser2 = Series([False, True], dtype=dtype2)
  533. result = concat([ser1, ser2], ignore_index=True)
  534. expected = Series([True, False, False, True], dtype=expected_dtype)
  535. tm.assert_series_equal(result, expected)
  536. @pytest.mark.parametrize(
  537. ("keys", "integrity"),
  538. [
  539. (["red"] * 3, True),
  540. (["red"] * 3, False),
  541. (["red", "blue", "red"], False),
  542. (["red", "blue", "red"], True),
  543. ],
  544. )
  545. def test_concat_repeated_keys(keys, integrity):
  546. # GH: 20816
  547. series_list = [Series({"a": 1}), Series({"b": 2}), Series({"c": 3})]
  548. result = concat(series_list, keys=keys, verify_integrity=integrity)
  549. tuples = list(zip(keys, ["a", "b", "c"]))
  550. expected = Series([1, 2, 3], index=MultiIndex.from_tuples(tuples))
  551. tm.assert_series_equal(result, expected)
  552. def test_concat_null_object_with_dti():
  553. # GH#40841
  554. dti = pd.DatetimeIndex(
  555. ["2021-04-08 21:21:14+00:00"], dtype="datetime64[ns, UTC]", name="Time (UTC)"
  556. )
  557. right = DataFrame(data={"C": [0.5274]}, index=dti)
  558. idx = Index([None], dtype="object", name="Maybe Time (UTC)")
  559. left = DataFrame(data={"A": [None], "B": [np.nan]}, index=idx)
  560. result = concat([left, right], axis="columns")
  561. exp_index = Index([None, dti[0]], dtype=object)
  562. expected = DataFrame(
  563. {"A": [None, None], "B": [np.nan, np.nan], "C": [np.nan, 0.5274]},
  564. index=exp_index,
  565. )
  566. tm.assert_frame_equal(result, expected)
  567. def test_concat_multiindex_with_empty_rangeindex():
  568. # GH#41234
  569. mi = MultiIndex.from_tuples([("B", 1), ("C", 1)])
  570. df1 = DataFrame([[1, 2]], columns=mi)
  571. df2 = DataFrame(index=[1], columns=pd.RangeIndex(0))
  572. result = concat([df1, df2])
  573. expected = DataFrame([[1, 2], [np.nan, np.nan]], columns=mi)
  574. tm.assert_frame_equal(result, expected)
  575. @pytest.mark.parametrize(
  576. "data",
  577. [
  578. Series(data=[1, 2]),
  579. DataFrame(
  580. data={
  581. "col1": [1, 2],
  582. }
  583. ),
  584. DataFrame(dtype=float),
  585. Series(dtype=float),
  586. ],
  587. )
  588. def test_concat_drop_attrs(data):
  589. # GH#41828
  590. df1 = data.copy()
  591. df1.attrs = {1: 1}
  592. df2 = data.copy()
  593. df2.attrs = {1: 2}
  594. df = concat([df1, df2])
  595. assert len(df.attrs) == 0
  596. @pytest.mark.parametrize(
  597. "data",
  598. [
  599. Series(data=[1, 2]),
  600. DataFrame(
  601. data={
  602. "col1": [1, 2],
  603. }
  604. ),
  605. DataFrame(dtype=float),
  606. Series(dtype=float),
  607. ],
  608. )
  609. def test_concat_retain_attrs(data):
  610. # GH#41828
  611. df1 = data.copy()
  612. df1.attrs = {1: 1}
  613. df2 = data.copy()
  614. df2.attrs = {1: 1}
  615. df = concat([df1, df2])
  616. assert df.attrs[1] == 1
  617. @td.skip_array_manager_invalid_test
  618. @pytest.mark.parametrize("df_dtype", ["float64", "int64", "datetime64[ns]"])
  619. @pytest.mark.parametrize("empty_dtype", [None, "float64", "object"])
  620. def test_concat_ignore_empty_object_float(empty_dtype, df_dtype):
  621. # https://github.com/pandas-dev/pandas/issues/45637
  622. df = DataFrame({"foo": [1, 2], "bar": [1, 2]}, dtype=df_dtype)
  623. empty = DataFrame(columns=["foo", "bar"], dtype=empty_dtype)
  624. result = concat([empty, df])
  625. expected = df
  626. if df_dtype == "int64":
  627. # TODO what exact behaviour do we want for integer eventually?
  628. if empty_dtype == "float64":
  629. expected = df.astype("float64")
  630. else:
  631. expected = df.astype("object")
  632. tm.assert_frame_equal(result, expected)
  633. @td.skip_array_manager_invalid_test
  634. @pytest.mark.parametrize("df_dtype", ["float64", "int64", "datetime64[ns]"])
  635. @pytest.mark.parametrize("empty_dtype", [None, "float64", "object"])
  636. def test_concat_ignore_all_na_object_float(empty_dtype, df_dtype):
  637. df = DataFrame({"foo": [1, 2], "bar": [1, 2]}, dtype=df_dtype)
  638. empty = DataFrame({"foo": [np.nan], "bar": [np.nan]}, dtype=empty_dtype)
  639. result = concat([empty, df], ignore_index=True)
  640. if df_dtype == "int64":
  641. # TODO what exact behaviour do we want for integer eventually?
  642. if empty_dtype == "object":
  643. df_dtype = "object"
  644. else:
  645. df_dtype = "float64"
  646. expected = DataFrame({"foo": [None, 1, 2], "bar": [None, 1, 2]}, dtype=df_dtype)
  647. tm.assert_frame_equal(result, expected)
  648. @td.skip_array_manager_invalid_test
  649. def test_concat_ignore_empty_from_reindex():
  650. # https://github.com/pandas-dev/pandas/pull/43507#issuecomment-920375856
  651. df1 = DataFrame({"a": [1], "b": [pd.Timestamp("2012-01-01")]})
  652. df2 = DataFrame({"a": [2]})
  653. result = concat([df1, df2.reindex(columns=df1.columns)], ignore_index=True)
  654. expected = df1 = DataFrame({"a": [1, 2], "b": [pd.Timestamp("2012-01-01"), pd.NaT]})
  655. tm.assert_frame_equal(result, expected)