test_block_internals.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443
  1. from datetime import (
  2. datetime,
  3. timedelta,
  4. )
  5. import itertools
  6. import numpy as np
  7. import pytest
  8. from pandas.errors import PerformanceWarning
  9. import pandas.util._test_decorators as td
  10. import pandas as pd
  11. from pandas import (
  12. Categorical,
  13. DataFrame,
  14. Series,
  15. Timestamp,
  16. date_range,
  17. option_context,
  18. )
  19. import pandas._testing as tm
  20. from pandas.core.internals import (
  21. NumericBlock,
  22. ObjectBlock,
  23. )
  24. # Segregated collection of methods that require the BlockManager internal data
  25. # structure
  26. # TODO(ArrayManager) check which of those tests need to be rewritten to test the
  27. # equivalent for ArrayManager
  28. pytestmark = td.skip_array_manager_invalid_test
  29. class TestDataFrameBlockInternals:
  30. def test_setitem_invalidates_datetime_index_freq(self):
  31. # GH#24096 altering a datetime64tz column inplace invalidates the
  32. # `freq` attribute on the underlying DatetimeIndex
  33. dti = date_range("20130101", periods=3, tz="US/Eastern")
  34. ts = dti[1]
  35. df = DataFrame({"B": dti})
  36. assert df["B"]._values.freq is None
  37. df.iloc[1, 0] = pd.NaT
  38. assert df["B"]._values.freq is None
  39. # check that the DatetimeIndex was not altered in place
  40. assert dti.freq == "D"
  41. assert dti[1] == ts
  42. def test_cast_internals(self, float_frame):
  43. casted = DataFrame(float_frame._mgr, dtype=int)
  44. expected = DataFrame(float_frame._series, dtype=int)
  45. tm.assert_frame_equal(casted, expected)
  46. casted = DataFrame(float_frame._mgr, dtype=np.int32)
  47. expected = DataFrame(float_frame._series, dtype=np.int32)
  48. tm.assert_frame_equal(casted, expected)
  49. def test_consolidate(self, float_frame):
  50. float_frame["E"] = 7.0
  51. consolidated = float_frame._consolidate()
  52. assert len(consolidated._mgr.blocks) == 1
  53. # Ensure copy, do I want this?
  54. recons = consolidated._consolidate()
  55. assert recons is not consolidated
  56. tm.assert_frame_equal(recons, consolidated)
  57. float_frame["F"] = 8.0
  58. assert len(float_frame._mgr.blocks) == 3
  59. return_value = float_frame._consolidate_inplace()
  60. assert return_value is None
  61. assert len(float_frame._mgr.blocks) == 1
  62. def test_consolidate_inplace(self, float_frame):
  63. frame = float_frame.copy() # noqa
  64. # triggers in-place consolidation
  65. for letter in range(ord("A"), ord("Z")):
  66. float_frame[chr(letter)] = chr(letter)
  67. def test_modify_values(self, float_frame, using_copy_on_write):
  68. if using_copy_on_write:
  69. with pytest.raises(ValueError, match="read-only"):
  70. float_frame.values[5] = 5
  71. assert (float_frame.values[5] != 5).all()
  72. return
  73. float_frame.values[5] = 5
  74. assert (float_frame.values[5] == 5).all()
  75. # unconsolidated
  76. float_frame["E"] = 7.0
  77. col = float_frame["E"]
  78. float_frame.values[6] = 6
  79. # as of 2.0 .values does not consolidate, so subsequent calls to .values
  80. # does not share data
  81. assert not (float_frame.values[6] == 6).all()
  82. assert (col == 7).all()
  83. def test_boolean_set_uncons(self, float_frame):
  84. float_frame["E"] = 7.0
  85. expected = float_frame.values.copy()
  86. expected[expected > 1] = 2
  87. float_frame[float_frame > 1] = 2
  88. tm.assert_almost_equal(expected, float_frame.values)
  89. def test_constructor_with_convert(self):
  90. # this is actually mostly a test of lib.maybe_convert_objects
  91. # #2845
  92. df = DataFrame({"A": [2**63 - 1]})
  93. result = df["A"]
  94. expected = Series(np.asarray([2**63 - 1], np.int64), name="A")
  95. tm.assert_series_equal(result, expected)
  96. df = DataFrame({"A": [2**63]})
  97. result = df["A"]
  98. expected = Series(np.asarray([2**63], np.uint64), name="A")
  99. tm.assert_series_equal(result, expected)
  100. df = DataFrame({"A": [datetime(2005, 1, 1), True]})
  101. result = df["A"]
  102. expected = Series(
  103. np.asarray([datetime(2005, 1, 1), True], np.object_), name="A"
  104. )
  105. tm.assert_series_equal(result, expected)
  106. df = DataFrame({"A": [None, 1]})
  107. result = df["A"]
  108. expected = Series(np.asarray([np.nan, 1], np.float_), name="A")
  109. tm.assert_series_equal(result, expected)
  110. df = DataFrame({"A": [1.0, 2]})
  111. result = df["A"]
  112. expected = Series(np.asarray([1.0, 2], np.float_), name="A")
  113. tm.assert_series_equal(result, expected)
  114. df = DataFrame({"A": [1.0 + 2.0j, 3]})
  115. result = df["A"]
  116. expected = Series(np.asarray([1.0 + 2.0j, 3], np.complex_), name="A")
  117. tm.assert_series_equal(result, expected)
  118. df = DataFrame({"A": [1.0 + 2.0j, 3.0]})
  119. result = df["A"]
  120. expected = Series(np.asarray([1.0 + 2.0j, 3.0], np.complex_), name="A")
  121. tm.assert_series_equal(result, expected)
  122. df = DataFrame({"A": [1.0 + 2.0j, True]})
  123. result = df["A"]
  124. expected = Series(np.asarray([1.0 + 2.0j, True], np.object_), name="A")
  125. tm.assert_series_equal(result, expected)
  126. df = DataFrame({"A": [1.0, None]})
  127. result = df["A"]
  128. expected = Series(np.asarray([1.0, np.nan], np.float_), name="A")
  129. tm.assert_series_equal(result, expected)
  130. df = DataFrame({"A": [1.0 + 2.0j, None]})
  131. result = df["A"]
  132. expected = Series(np.asarray([1.0 + 2.0j, np.nan], np.complex_), name="A")
  133. tm.assert_series_equal(result, expected)
  134. df = DataFrame({"A": [2.0, 1, True, None]})
  135. result = df["A"]
  136. expected = Series(np.asarray([2.0, 1, True, None], np.object_), name="A")
  137. tm.assert_series_equal(result, expected)
  138. df = DataFrame({"A": [2.0, 1, datetime(2006, 1, 1), None]})
  139. result = df["A"]
  140. expected = Series(
  141. np.asarray([2.0, 1, datetime(2006, 1, 1), None], np.object_), name="A"
  142. )
  143. tm.assert_series_equal(result, expected)
  144. def test_construction_with_mixed(self, float_string_frame):
  145. # test construction edge cases with mixed types
  146. # f7u12, this does not work without extensive workaround
  147. data = [
  148. [datetime(2001, 1, 5), np.nan, datetime(2001, 1, 2)],
  149. [datetime(2000, 1, 2), datetime(2000, 1, 3), datetime(2000, 1, 1)],
  150. ]
  151. df = DataFrame(data)
  152. # check dtypes
  153. result = df.dtypes
  154. expected = Series({"datetime64[ns]": 3})
  155. # mixed-type frames
  156. float_string_frame["datetime"] = datetime.now()
  157. float_string_frame["timedelta"] = timedelta(days=1, seconds=1)
  158. assert float_string_frame["datetime"].dtype == "M8[ns]"
  159. assert float_string_frame["timedelta"].dtype == "m8[ns]"
  160. result = float_string_frame.dtypes
  161. expected = Series(
  162. [np.dtype("float64")] * 4
  163. + [
  164. np.dtype("object"),
  165. np.dtype("datetime64[ns]"),
  166. np.dtype("timedelta64[ns]"),
  167. ],
  168. index=list("ABCD") + ["foo", "datetime", "timedelta"],
  169. )
  170. tm.assert_series_equal(result, expected)
  171. def test_construction_with_conversions(self):
  172. # convert from a numpy array of non-ns timedelta64; as of 2.0 this does
  173. # *not* convert
  174. arr = np.array([1, 2, 3], dtype="timedelta64[s]")
  175. df = DataFrame(index=range(3))
  176. df["A"] = arr
  177. expected = DataFrame(
  178. {"A": pd.timedelta_range("00:00:01", periods=3, freq="s")}, index=range(3)
  179. )
  180. tm.assert_numpy_array_equal(df["A"].to_numpy(), arr)
  181. expected = DataFrame(
  182. {
  183. "dt1": Timestamp("20130101"),
  184. "dt2": date_range("20130101", periods=3).astype("M8[s]"),
  185. # 'dt3' : date_range('20130101 00:00:01',periods=3,freq='s'),
  186. },
  187. index=range(3),
  188. )
  189. assert expected.dtypes["dt1"] == "M8[ns]"
  190. assert expected.dtypes["dt2"] == "M8[s]"
  191. df = DataFrame(index=range(3))
  192. df["dt1"] = np.datetime64("2013-01-01")
  193. df["dt2"] = np.array(
  194. ["2013-01-01", "2013-01-02", "2013-01-03"], dtype="datetime64[D]"
  195. )
  196. # df['dt3'] = np.array(['2013-01-01 00:00:01','2013-01-01
  197. # 00:00:02','2013-01-01 00:00:03'],dtype='datetime64[s]')
  198. tm.assert_frame_equal(df, expected)
  199. def test_constructor_compound_dtypes(self):
  200. # GH 5191
  201. # compound dtypes should raise not-implementederror
  202. def f(dtype):
  203. data = list(itertools.repeat((datetime(2001, 1, 1), "aa", 20), 9))
  204. return DataFrame(data=data, columns=["A", "B", "C"], dtype=dtype)
  205. msg = "compound dtypes are not implemented in the DataFrame constructor"
  206. with pytest.raises(NotImplementedError, match=msg):
  207. f([("A", "datetime64[h]"), ("B", "str"), ("C", "int32")])
  208. # pre-2.0 these used to work (though results may be unexpected)
  209. with pytest.raises(TypeError, match="argument must be"):
  210. f("int64")
  211. with pytest.raises(TypeError, match="argument must be"):
  212. f("float64")
  213. # 10822
  214. msg = "^Unknown datetime string format, unable to parse: aa, at position 0$"
  215. with pytest.raises(ValueError, match=msg):
  216. f("M8[ns]")
  217. def test_pickle(self, float_string_frame, timezone_frame):
  218. empty_frame = DataFrame()
  219. unpickled = tm.round_trip_pickle(float_string_frame)
  220. tm.assert_frame_equal(float_string_frame, unpickled)
  221. # buglet
  222. float_string_frame._mgr.ndim
  223. # empty
  224. unpickled = tm.round_trip_pickle(empty_frame)
  225. repr(unpickled)
  226. # tz frame
  227. unpickled = tm.round_trip_pickle(timezone_frame)
  228. tm.assert_frame_equal(timezone_frame, unpickled)
  229. def test_consolidate_datetime64(self):
  230. # numpy vstack bug
  231. df = DataFrame(
  232. {
  233. "starting": pd.to_datetime(
  234. [
  235. "2012-06-21 00:00",
  236. "2012-06-23 07:00",
  237. "2012-06-23 16:30",
  238. "2012-06-25 08:00",
  239. "2012-06-26 12:00",
  240. ]
  241. ),
  242. "ending": pd.to_datetime(
  243. [
  244. "2012-06-23 07:00",
  245. "2012-06-23 16:30",
  246. "2012-06-25 08:00",
  247. "2012-06-26 12:00",
  248. "2012-06-27 08:00",
  249. ]
  250. ),
  251. "measure": [77, 65, 77, 0, 77],
  252. }
  253. )
  254. ser_starting = df.starting
  255. ser_starting.index = ser_starting.values
  256. ser_starting = ser_starting.tz_localize("US/Eastern")
  257. ser_starting = ser_starting.tz_convert("UTC")
  258. ser_starting.index.name = "starting"
  259. ser_ending = df.ending
  260. ser_ending.index = ser_ending.values
  261. ser_ending = ser_ending.tz_localize("US/Eastern")
  262. ser_ending = ser_ending.tz_convert("UTC")
  263. ser_ending.index.name = "ending"
  264. df.starting = ser_starting.index
  265. df.ending = ser_ending.index
  266. tm.assert_index_equal(pd.DatetimeIndex(df.starting), ser_starting.index)
  267. tm.assert_index_equal(pd.DatetimeIndex(df.ending), ser_ending.index)
  268. def test_is_mixed_type(self, float_frame, float_string_frame):
  269. assert not float_frame._is_mixed_type
  270. assert float_string_frame._is_mixed_type
  271. def test_stale_cached_series_bug_473(self, using_copy_on_write):
  272. # this is chained, but ok
  273. with option_context("chained_assignment", None):
  274. Y = DataFrame(
  275. np.random.random((4, 4)),
  276. index=("a", "b", "c", "d"),
  277. columns=("e", "f", "g", "h"),
  278. )
  279. repr(Y)
  280. Y["e"] = Y["e"].astype("object")
  281. if using_copy_on_write:
  282. with tm.raises_chained_assignment_error():
  283. Y["g"]["c"] = np.NaN
  284. else:
  285. Y["g"]["c"] = np.NaN
  286. repr(Y)
  287. result = Y.sum() # noqa
  288. exp = Y["g"].sum() # noqa
  289. if using_copy_on_write:
  290. assert not pd.isna(Y["g"]["c"])
  291. else:
  292. assert pd.isna(Y["g"]["c"])
  293. def test_strange_column_corruption_issue(self, using_copy_on_write):
  294. # TODO(wesm): Unclear how exactly this is related to internal matters
  295. df = DataFrame(index=[0, 1])
  296. df[0] = np.nan
  297. wasCol = {}
  298. with tm.assert_produces_warning(PerformanceWarning):
  299. for i, dt in enumerate(df.index):
  300. for col in range(100, 200):
  301. if col not in wasCol:
  302. wasCol[col] = 1
  303. df[col] = np.nan
  304. if using_copy_on_write:
  305. df.loc[dt, col] = i
  306. else:
  307. df[col][dt] = i
  308. myid = 100
  309. first = len(df.loc[pd.isna(df[myid]), [myid]])
  310. second = len(df.loc[pd.isna(df[myid]), [myid]])
  311. assert first == second == 0
  312. def test_constructor_no_pandas_array(self):
  313. # Ensure that PandasArray isn't allowed inside Series
  314. # See https://github.com/pandas-dev/pandas/issues/23995 for more.
  315. arr = Series([1, 2, 3]).array
  316. result = DataFrame({"A": arr})
  317. expected = DataFrame({"A": [1, 2, 3]})
  318. tm.assert_frame_equal(result, expected)
  319. assert isinstance(result._mgr.blocks[0], NumericBlock)
  320. def test_add_column_with_pandas_array(self):
  321. # GH 26390
  322. df = DataFrame({"a": [1, 2, 3, 4], "b": ["a", "b", "c", "d"]})
  323. df["c"] = pd.arrays.PandasArray(np.array([1, 2, None, 3], dtype=object))
  324. df2 = DataFrame(
  325. {
  326. "a": [1, 2, 3, 4],
  327. "b": ["a", "b", "c", "d"],
  328. "c": pd.arrays.PandasArray(np.array([1, 2, None, 3], dtype=object)),
  329. }
  330. )
  331. assert type(df["c"]._mgr.blocks[0]) == ObjectBlock
  332. assert type(df2["c"]._mgr.blocks[0]) == ObjectBlock
  333. tm.assert_frame_equal(df, df2)
  334. def test_update_inplace_sets_valid_block_values(using_copy_on_write):
  335. # https://github.com/pandas-dev/pandas/issues/33457
  336. df = DataFrame({"a": Series([1, 2, None], dtype="category")})
  337. # inplace update of a single column
  338. df["a"].fillna(1, inplace=True)
  339. # check we haven't put a Series into any block.values
  340. assert isinstance(df._mgr.blocks[0].values, Categorical)
  341. if not using_copy_on_write:
  342. # smoketest for OP bug from GH#35731
  343. assert df.isnull().sum().sum() == 0
  344. def test_nonconsolidated_item_cache_take():
  345. # https://github.com/pandas-dev/pandas/issues/35521
  346. # create non-consolidated dataframe with object dtype columns
  347. df = DataFrame()
  348. df["col1"] = Series(["a"], dtype=object)
  349. df["col2"] = Series([0], dtype=object)
  350. # access column (item cache)
  351. df["col1"] == "A"
  352. # take operation
  353. # (regression was that this consolidated but didn't reset item cache,
  354. # resulting in an invalid cache and the .at operation not working properly)
  355. df[df["col2"] == 0]
  356. # now setting value should update actual dataframe
  357. df.at[0, "col1"] = "A"
  358. expected = DataFrame({"col1": ["A"], "col2": [0]}, dtype=object)
  359. tm.assert_frame_equal(df, expected)
  360. assert df.at[0, "col1"] == "A"