test_datetime64.py 87 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475
  1. # Arithmetic tests for DataFrame/Series/Index/Array classes that should
  2. # behave identically.
  3. # Specifically for datetime64 and datetime64tz dtypes
  4. from datetime import (
  5. datetime,
  6. time,
  7. timedelta,
  8. )
  9. from itertools import (
  10. product,
  11. starmap,
  12. )
  13. import operator
  14. import warnings
  15. import numpy as np
  16. import pytest
  17. import pytz
  18. from pandas._libs.tslibs.conversion import localize_pydatetime
  19. from pandas._libs.tslibs.offsets import shift_months
  20. from pandas.errors import PerformanceWarning
  21. import pandas as pd
  22. from pandas import (
  23. DateOffset,
  24. DatetimeIndex,
  25. NaT,
  26. Period,
  27. Series,
  28. Timedelta,
  29. TimedeltaIndex,
  30. Timestamp,
  31. date_range,
  32. )
  33. import pandas._testing as tm
  34. from pandas.core.ops import roperator
  35. from pandas.tests.arithmetic.common import (
  36. assert_cannot_add,
  37. assert_invalid_addsub_type,
  38. assert_invalid_comparison,
  39. get_upcast_box,
  40. )
  41. # ------------------------------------------------------------------
  42. # Comparisons
  43. class TestDatetime64ArrayLikeComparisons:
  44. # Comparison tests for datetime64 vectors fully parametrized over
  45. # DataFrame/Series/DatetimeIndex/DatetimeArray. Ideally all comparison
  46. # tests will eventually end up here.
  47. def test_compare_zerodim(self, tz_naive_fixture, box_with_array):
  48. # Test comparison with zero-dimensional array is unboxed
  49. tz = tz_naive_fixture
  50. box = box_with_array
  51. dti = date_range("20130101", periods=3, tz=tz)
  52. other = np.array(dti.to_numpy()[0])
  53. dtarr = tm.box_expected(dti, box)
  54. xbox = get_upcast_box(dtarr, other, True)
  55. result = dtarr <= other
  56. expected = np.array([True, False, False])
  57. expected = tm.box_expected(expected, xbox)
  58. tm.assert_equal(result, expected)
  59. @pytest.mark.parametrize(
  60. "other",
  61. [
  62. "foo",
  63. -1,
  64. 99,
  65. 4.0,
  66. object(),
  67. timedelta(days=2),
  68. # GH#19800, GH#19301 datetime.date comparison raises to
  69. # match DatetimeIndex/Timestamp. This also matches the behavior
  70. # of stdlib datetime.datetime
  71. datetime(2001, 1, 1).date(),
  72. # GH#19301 None and NaN are *not* cast to NaT for comparisons
  73. None,
  74. np.nan,
  75. ],
  76. )
  77. def test_dt64arr_cmp_scalar_invalid(self, other, tz_naive_fixture, box_with_array):
  78. # GH#22074, GH#15966
  79. tz = tz_naive_fixture
  80. rng = date_range("1/1/2000", periods=10, tz=tz)
  81. dtarr = tm.box_expected(rng, box_with_array)
  82. assert_invalid_comparison(dtarr, other, box_with_array)
  83. @pytest.mark.parametrize(
  84. "other",
  85. [
  86. # GH#4968 invalid date/int comparisons
  87. list(range(10)),
  88. np.arange(10),
  89. np.arange(10).astype(np.float32),
  90. np.arange(10).astype(object),
  91. pd.timedelta_range("1ns", periods=10).array,
  92. np.array(pd.timedelta_range("1ns", periods=10)),
  93. list(pd.timedelta_range("1ns", periods=10)),
  94. pd.timedelta_range("1 Day", periods=10).astype(object),
  95. pd.period_range("1971-01-01", freq="D", periods=10).array,
  96. pd.period_range("1971-01-01", freq="D", periods=10).astype(object),
  97. ],
  98. )
  99. def test_dt64arr_cmp_arraylike_invalid(
  100. self, other, tz_naive_fixture, box_with_array
  101. ):
  102. tz = tz_naive_fixture
  103. dta = date_range("1970-01-01", freq="ns", periods=10, tz=tz)._data
  104. obj = tm.box_expected(dta, box_with_array)
  105. assert_invalid_comparison(obj, other, box_with_array)
  106. def test_dt64arr_cmp_mixed_invalid(self, tz_naive_fixture):
  107. tz = tz_naive_fixture
  108. dta = date_range("1970-01-01", freq="h", periods=5, tz=tz)._data
  109. other = np.array([0, 1, 2, dta[3], Timedelta(days=1)])
  110. result = dta == other
  111. expected = np.array([False, False, False, True, False])
  112. tm.assert_numpy_array_equal(result, expected)
  113. result = dta != other
  114. tm.assert_numpy_array_equal(result, ~expected)
  115. msg = "Invalid comparison between|Cannot compare type|not supported between"
  116. with pytest.raises(TypeError, match=msg):
  117. dta < other
  118. with pytest.raises(TypeError, match=msg):
  119. dta > other
  120. with pytest.raises(TypeError, match=msg):
  121. dta <= other
  122. with pytest.raises(TypeError, match=msg):
  123. dta >= other
  124. def test_dt64arr_nat_comparison(self, tz_naive_fixture, box_with_array):
  125. # GH#22242, GH#22163 DataFrame considered NaT == ts incorrectly
  126. tz = tz_naive_fixture
  127. box = box_with_array
  128. ts = Timestamp("2021-01-01", tz=tz)
  129. ser = Series([ts, NaT])
  130. obj = tm.box_expected(ser, box)
  131. xbox = get_upcast_box(obj, ts, True)
  132. expected = Series([True, False], dtype=np.bool_)
  133. expected = tm.box_expected(expected, xbox)
  134. result = obj == ts
  135. tm.assert_equal(result, expected)
  136. class TestDatetime64SeriesComparison:
  137. # TODO: moved from tests.series.test_operators; needs cleanup
  138. @pytest.mark.parametrize(
  139. "pair",
  140. [
  141. (
  142. [Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
  143. [NaT, NaT, Timestamp("2011-01-03")],
  144. ),
  145. (
  146. [Timedelta("1 days"), NaT, Timedelta("3 days")],
  147. [NaT, NaT, Timedelta("3 days")],
  148. ),
  149. (
  150. [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
  151. [NaT, NaT, Period("2011-03", freq="M")],
  152. ),
  153. ],
  154. )
  155. @pytest.mark.parametrize("reverse", [True, False])
  156. @pytest.mark.parametrize("dtype", [None, object])
  157. @pytest.mark.parametrize(
  158. "op, expected",
  159. [
  160. (operator.eq, Series([False, False, True])),
  161. (operator.ne, Series([True, True, False])),
  162. (operator.lt, Series([False, False, False])),
  163. (operator.gt, Series([False, False, False])),
  164. (operator.ge, Series([False, False, True])),
  165. (operator.le, Series([False, False, True])),
  166. ],
  167. )
  168. def test_nat_comparisons(
  169. self,
  170. dtype,
  171. index_or_series,
  172. reverse,
  173. pair,
  174. op,
  175. expected,
  176. ):
  177. box = index_or_series
  178. lhs, rhs = pair
  179. if reverse:
  180. # add lhs / rhs switched data
  181. lhs, rhs = rhs, lhs
  182. left = Series(lhs, dtype=dtype)
  183. right = box(rhs, dtype=dtype)
  184. result = op(left, right)
  185. tm.assert_series_equal(result, expected)
  186. @pytest.mark.parametrize(
  187. "data",
  188. [
  189. [Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
  190. [Timedelta("1 days"), NaT, Timedelta("3 days")],
  191. [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
  192. ],
  193. )
  194. @pytest.mark.parametrize("dtype", [None, object])
  195. def test_nat_comparisons_scalar(self, dtype, data, box_with_array):
  196. box = box_with_array
  197. left = Series(data, dtype=dtype)
  198. left = tm.box_expected(left, box)
  199. xbox = get_upcast_box(left, NaT, True)
  200. expected = [False, False, False]
  201. expected = tm.box_expected(expected, xbox)
  202. if box is pd.array and dtype is object:
  203. expected = pd.array(expected, dtype="bool")
  204. tm.assert_equal(left == NaT, expected)
  205. tm.assert_equal(NaT == left, expected)
  206. expected = [True, True, True]
  207. expected = tm.box_expected(expected, xbox)
  208. if box is pd.array and dtype is object:
  209. expected = pd.array(expected, dtype="bool")
  210. tm.assert_equal(left != NaT, expected)
  211. tm.assert_equal(NaT != left, expected)
  212. expected = [False, False, False]
  213. expected = tm.box_expected(expected, xbox)
  214. if box is pd.array and dtype is object:
  215. expected = pd.array(expected, dtype="bool")
  216. tm.assert_equal(left < NaT, expected)
  217. tm.assert_equal(NaT > left, expected)
  218. tm.assert_equal(left <= NaT, expected)
  219. tm.assert_equal(NaT >= left, expected)
  220. tm.assert_equal(left > NaT, expected)
  221. tm.assert_equal(NaT < left, expected)
  222. tm.assert_equal(left >= NaT, expected)
  223. tm.assert_equal(NaT <= left, expected)
  224. @pytest.mark.parametrize("val", [datetime(2000, 1, 4), datetime(2000, 1, 5)])
  225. def test_series_comparison_scalars(self, val):
  226. series = Series(date_range("1/1/2000", periods=10))
  227. result = series > val
  228. expected = Series([x > val for x in series])
  229. tm.assert_series_equal(result, expected)
  230. @pytest.mark.parametrize(
  231. "left,right", [("lt", "gt"), ("le", "ge"), ("eq", "eq"), ("ne", "ne")]
  232. )
  233. def test_timestamp_compare_series(self, left, right):
  234. # see gh-4982
  235. # Make sure we can compare Timestamps on the right AND left hand side.
  236. ser = Series(date_range("20010101", periods=10), name="dates")
  237. s_nat = ser.copy(deep=True)
  238. ser[0] = Timestamp("nat")
  239. ser[3] = Timestamp("nat")
  240. left_f = getattr(operator, left)
  241. right_f = getattr(operator, right)
  242. # No NaT
  243. expected = left_f(ser, Timestamp("20010109"))
  244. result = right_f(Timestamp("20010109"), ser)
  245. tm.assert_series_equal(result, expected)
  246. # NaT
  247. expected = left_f(ser, Timestamp("nat"))
  248. result = right_f(Timestamp("nat"), ser)
  249. tm.assert_series_equal(result, expected)
  250. # Compare to Timestamp with series containing NaT
  251. expected = left_f(s_nat, Timestamp("20010109"))
  252. result = right_f(Timestamp("20010109"), s_nat)
  253. tm.assert_series_equal(result, expected)
  254. # Compare to NaT with series containing NaT
  255. expected = left_f(s_nat, NaT)
  256. result = right_f(NaT, s_nat)
  257. tm.assert_series_equal(result, expected)
  258. def test_dt64arr_timestamp_equality(self, box_with_array):
  259. # GH#11034
  260. box = box_with_array
  261. ser = Series([Timestamp("2000-01-29 01:59:00"), Timestamp("2000-01-30"), NaT])
  262. ser = tm.box_expected(ser, box)
  263. xbox = get_upcast_box(ser, ser, True)
  264. result = ser != ser
  265. expected = tm.box_expected([False, False, True], xbox)
  266. tm.assert_equal(result, expected)
  267. if box is pd.DataFrame:
  268. # alignment for frame vs series comparisons deprecated
  269. # in GH#46795 enforced 2.0
  270. with pytest.raises(ValueError, match="not aligned"):
  271. ser != ser[0]
  272. else:
  273. result = ser != ser[0]
  274. expected = tm.box_expected([False, True, True], xbox)
  275. tm.assert_equal(result, expected)
  276. if box is pd.DataFrame:
  277. # alignment for frame vs series comparisons deprecated
  278. # in GH#46795 enforced 2.0
  279. with pytest.raises(ValueError, match="not aligned"):
  280. ser != ser[2]
  281. else:
  282. result = ser != ser[2]
  283. expected = tm.box_expected([True, True, True], xbox)
  284. tm.assert_equal(result, expected)
  285. result = ser == ser
  286. expected = tm.box_expected([True, True, False], xbox)
  287. tm.assert_equal(result, expected)
  288. if box is pd.DataFrame:
  289. # alignment for frame vs series comparisons deprecated
  290. # in GH#46795 enforced 2.0
  291. with pytest.raises(ValueError, match="not aligned"):
  292. ser == ser[0]
  293. else:
  294. result = ser == ser[0]
  295. expected = tm.box_expected([True, False, False], xbox)
  296. tm.assert_equal(result, expected)
  297. if box is pd.DataFrame:
  298. # alignment for frame vs series comparisons deprecated
  299. # in GH#46795 enforced 2.0
  300. with pytest.raises(ValueError, match="not aligned"):
  301. ser == ser[2]
  302. else:
  303. result = ser == ser[2]
  304. expected = tm.box_expected([False, False, False], xbox)
  305. tm.assert_equal(result, expected)
  306. @pytest.mark.parametrize(
  307. "datetimelike",
  308. [
  309. Timestamp("20130101"),
  310. datetime(2013, 1, 1),
  311. np.datetime64("2013-01-01T00:00", "ns"),
  312. ],
  313. )
  314. @pytest.mark.parametrize(
  315. "op,expected",
  316. [
  317. (operator.lt, [True, False, False, False]),
  318. (operator.le, [True, True, False, False]),
  319. (operator.eq, [False, True, False, False]),
  320. (operator.gt, [False, False, False, True]),
  321. ],
  322. )
  323. def test_dt64_compare_datetime_scalar(self, datetimelike, op, expected):
  324. # GH#17965, test for ability to compare datetime64[ns] columns
  325. # to datetimelike
  326. ser = Series(
  327. [
  328. Timestamp("20120101"),
  329. Timestamp("20130101"),
  330. np.nan,
  331. Timestamp("20130103"),
  332. ],
  333. name="A",
  334. )
  335. result = op(ser, datetimelike)
  336. expected = Series(expected, name="A")
  337. tm.assert_series_equal(result, expected)
  338. class TestDatetimeIndexComparisons:
  339. # TODO: moved from tests.indexes.test_base; parametrize and de-duplicate
  340. def test_comparators(self, comparison_op):
  341. index = tm.makeDateIndex(100)
  342. element = index[len(index) // 2]
  343. element = Timestamp(element).to_datetime64()
  344. arr = np.array(index)
  345. arr_result = comparison_op(arr, element)
  346. index_result = comparison_op(index, element)
  347. assert isinstance(index_result, np.ndarray)
  348. tm.assert_numpy_array_equal(arr_result, index_result)
  349. @pytest.mark.parametrize(
  350. "other",
  351. [datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
  352. )
  353. def test_dti_cmp_datetimelike(self, other, tz_naive_fixture):
  354. tz = tz_naive_fixture
  355. dti = date_range("2016-01-01", periods=2, tz=tz)
  356. if tz is not None:
  357. if isinstance(other, np.datetime64):
  358. # no tzaware version available
  359. return
  360. other = localize_pydatetime(other, dti.tzinfo)
  361. result = dti == other
  362. expected = np.array([True, False])
  363. tm.assert_numpy_array_equal(result, expected)
  364. result = dti > other
  365. expected = np.array([False, True])
  366. tm.assert_numpy_array_equal(result, expected)
  367. result = dti >= other
  368. expected = np.array([True, True])
  369. tm.assert_numpy_array_equal(result, expected)
  370. result = dti < other
  371. expected = np.array([False, False])
  372. tm.assert_numpy_array_equal(result, expected)
  373. result = dti <= other
  374. expected = np.array([True, False])
  375. tm.assert_numpy_array_equal(result, expected)
  376. @pytest.mark.parametrize("dtype", [None, object])
  377. def test_dti_cmp_nat(self, dtype, box_with_array):
  378. left = DatetimeIndex([Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")])
  379. right = DatetimeIndex([NaT, NaT, Timestamp("2011-01-03")])
  380. left = tm.box_expected(left, box_with_array)
  381. right = tm.box_expected(right, box_with_array)
  382. xbox = get_upcast_box(left, right, True)
  383. lhs, rhs = left, right
  384. if dtype is object:
  385. lhs, rhs = left.astype(object), right.astype(object)
  386. result = rhs == lhs
  387. expected = np.array([False, False, True])
  388. expected = tm.box_expected(expected, xbox)
  389. tm.assert_equal(result, expected)
  390. result = lhs != rhs
  391. expected = np.array([True, True, False])
  392. expected = tm.box_expected(expected, xbox)
  393. tm.assert_equal(result, expected)
  394. expected = np.array([False, False, False])
  395. expected = tm.box_expected(expected, xbox)
  396. tm.assert_equal(lhs == NaT, expected)
  397. tm.assert_equal(NaT == rhs, expected)
  398. expected = np.array([True, True, True])
  399. expected = tm.box_expected(expected, xbox)
  400. tm.assert_equal(lhs != NaT, expected)
  401. tm.assert_equal(NaT != lhs, expected)
  402. expected = np.array([False, False, False])
  403. expected = tm.box_expected(expected, xbox)
  404. tm.assert_equal(lhs < NaT, expected)
  405. tm.assert_equal(NaT > lhs, expected)
  406. def test_dti_cmp_nat_behaves_like_float_cmp_nan(self):
  407. fidx1 = pd.Index([1.0, np.nan, 3.0, np.nan, 5.0, 7.0])
  408. fidx2 = pd.Index([2.0, 3.0, np.nan, np.nan, 6.0, 7.0])
  409. didx1 = DatetimeIndex(
  410. ["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
  411. )
  412. didx2 = DatetimeIndex(
  413. ["2014-02-01", "2014-03-01", NaT, NaT, "2014-06-01", "2014-07-01"]
  414. )
  415. darr = np.array(
  416. [
  417. np.datetime64("2014-02-01 00:00"),
  418. np.datetime64("2014-03-01 00:00"),
  419. np.datetime64("nat"),
  420. np.datetime64("nat"),
  421. np.datetime64("2014-06-01 00:00"),
  422. np.datetime64("2014-07-01 00:00"),
  423. ]
  424. )
  425. cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]
  426. # Check pd.NaT is handles as the same as np.nan
  427. with tm.assert_produces_warning(None):
  428. for idx1, idx2 in cases:
  429. result = idx1 < idx2
  430. expected = np.array([True, False, False, False, True, False])
  431. tm.assert_numpy_array_equal(result, expected)
  432. result = idx2 > idx1
  433. expected = np.array([True, False, False, False, True, False])
  434. tm.assert_numpy_array_equal(result, expected)
  435. result = idx1 <= idx2
  436. expected = np.array([True, False, False, False, True, True])
  437. tm.assert_numpy_array_equal(result, expected)
  438. result = idx2 >= idx1
  439. expected = np.array([True, False, False, False, True, True])
  440. tm.assert_numpy_array_equal(result, expected)
  441. result = idx1 == idx2
  442. expected = np.array([False, False, False, False, False, True])
  443. tm.assert_numpy_array_equal(result, expected)
  444. result = idx1 != idx2
  445. expected = np.array([True, True, True, True, True, False])
  446. tm.assert_numpy_array_equal(result, expected)
  447. with tm.assert_produces_warning(None):
  448. for idx1, val in [(fidx1, np.nan), (didx1, NaT)]:
  449. result = idx1 < val
  450. expected = np.array([False, False, False, False, False, False])
  451. tm.assert_numpy_array_equal(result, expected)
  452. result = idx1 > val
  453. tm.assert_numpy_array_equal(result, expected)
  454. result = idx1 <= val
  455. tm.assert_numpy_array_equal(result, expected)
  456. result = idx1 >= val
  457. tm.assert_numpy_array_equal(result, expected)
  458. result = idx1 == val
  459. tm.assert_numpy_array_equal(result, expected)
  460. result = idx1 != val
  461. expected = np.array([True, True, True, True, True, True])
  462. tm.assert_numpy_array_equal(result, expected)
  463. # Check pd.NaT is handles as the same as np.nan
  464. with tm.assert_produces_warning(None):
  465. for idx1, val in [(fidx1, 3), (didx1, datetime(2014, 3, 1))]:
  466. result = idx1 < val
  467. expected = np.array([True, False, False, False, False, False])
  468. tm.assert_numpy_array_equal(result, expected)
  469. result = idx1 > val
  470. expected = np.array([False, False, False, False, True, True])
  471. tm.assert_numpy_array_equal(result, expected)
  472. result = idx1 <= val
  473. expected = np.array([True, False, True, False, False, False])
  474. tm.assert_numpy_array_equal(result, expected)
  475. result = idx1 >= val
  476. expected = np.array([False, False, True, False, True, True])
  477. tm.assert_numpy_array_equal(result, expected)
  478. result = idx1 == val
  479. expected = np.array([False, False, True, False, False, False])
  480. tm.assert_numpy_array_equal(result, expected)
  481. result = idx1 != val
  482. expected = np.array([True, True, False, True, True, True])
  483. tm.assert_numpy_array_equal(result, expected)
  484. def test_comparison_tzawareness_compat(self, comparison_op, box_with_array):
  485. # GH#18162
  486. op = comparison_op
  487. box = box_with_array
  488. dr = date_range("2016-01-01", periods=6)
  489. dz = dr.tz_localize("US/Pacific")
  490. dr = tm.box_expected(dr, box)
  491. dz = tm.box_expected(dz, box)
  492. if box is pd.DataFrame:
  493. tolist = lambda x: x.astype(object).values.tolist()[0]
  494. else:
  495. tolist = list
  496. if op not in [operator.eq, operator.ne]:
  497. msg = (
  498. r"Invalid comparison between dtype=datetime64\[ns.*\] "
  499. "and (Timestamp|DatetimeArray|list|ndarray)"
  500. )
  501. with pytest.raises(TypeError, match=msg):
  502. op(dr, dz)
  503. with pytest.raises(TypeError, match=msg):
  504. op(dr, tolist(dz))
  505. with pytest.raises(TypeError, match=msg):
  506. op(dr, np.array(tolist(dz), dtype=object))
  507. with pytest.raises(TypeError, match=msg):
  508. op(dz, dr)
  509. with pytest.raises(TypeError, match=msg):
  510. op(dz, tolist(dr))
  511. with pytest.raises(TypeError, match=msg):
  512. op(dz, np.array(tolist(dr), dtype=object))
  513. # The aware==aware and naive==naive comparisons should *not* raise
  514. assert np.all(dr == dr)
  515. assert np.all(dr == tolist(dr))
  516. assert np.all(tolist(dr) == dr)
  517. assert np.all(np.array(tolist(dr), dtype=object) == dr)
  518. assert np.all(dr == np.array(tolist(dr), dtype=object))
  519. assert np.all(dz == dz)
  520. assert np.all(dz == tolist(dz))
  521. assert np.all(tolist(dz) == dz)
  522. assert np.all(np.array(tolist(dz), dtype=object) == dz)
  523. assert np.all(dz == np.array(tolist(dz), dtype=object))
  524. def test_comparison_tzawareness_compat_scalars(self, comparison_op, box_with_array):
  525. # GH#18162
  526. op = comparison_op
  527. dr = date_range("2016-01-01", periods=6)
  528. dz = dr.tz_localize("US/Pacific")
  529. dr = tm.box_expected(dr, box_with_array)
  530. dz = tm.box_expected(dz, box_with_array)
  531. # Check comparisons against scalar Timestamps
  532. ts = Timestamp("2000-03-14 01:59")
  533. ts_tz = Timestamp("2000-03-14 01:59", tz="Europe/Amsterdam")
  534. assert np.all(dr > ts)
  535. msg = r"Invalid comparison between dtype=datetime64\[ns.*\] and Timestamp"
  536. if op not in [operator.eq, operator.ne]:
  537. with pytest.raises(TypeError, match=msg):
  538. op(dr, ts_tz)
  539. assert np.all(dz > ts_tz)
  540. if op not in [operator.eq, operator.ne]:
  541. with pytest.raises(TypeError, match=msg):
  542. op(dz, ts)
  543. if op not in [operator.eq, operator.ne]:
  544. # GH#12601: Check comparison against Timestamps and DatetimeIndex
  545. with pytest.raises(TypeError, match=msg):
  546. op(ts, dz)
  547. @pytest.mark.parametrize(
  548. "other",
  549. [datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
  550. )
  551. # Bug in NumPy? https://github.com/numpy/numpy/issues/13841
  552. # Raising in __eq__ will fallback to NumPy, which warns, fails,
  553. # then re-raises the original exception. So we just need to ignore.
  554. @pytest.mark.filterwarnings("ignore:elementwise comp:DeprecationWarning")
  555. def test_scalar_comparison_tzawareness(
  556. self, comparison_op, other, tz_aware_fixture, box_with_array
  557. ):
  558. op = comparison_op
  559. tz = tz_aware_fixture
  560. dti = date_range("2016-01-01", periods=2, tz=tz)
  561. dtarr = tm.box_expected(dti, box_with_array)
  562. xbox = get_upcast_box(dtarr, other, True)
  563. if op in [operator.eq, operator.ne]:
  564. exbool = op is operator.ne
  565. expected = np.array([exbool, exbool], dtype=bool)
  566. expected = tm.box_expected(expected, xbox)
  567. result = op(dtarr, other)
  568. tm.assert_equal(result, expected)
  569. result = op(other, dtarr)
  570. tm.assert_equal(result, expected)
  571. else:
  572. msg = (
  573. r"Invalid comparison between dtype=datetime64\[ns, .*\] "
  574. f"and {type(other).__name__}"
  575. )
  576. with pytest.raises(TypeError, match=msg):
  577. op(dtarr, other)
  578. with pytest.raises(TypeError, match=msg):
  579. op(other, dtarr)
  580. def test_nat_comparison_tzawareness(self, comparison_op):
  581. # GH#19276
  582. # tzaware DatetimeIndex should not raise when compared to NaT
  583. op = comparison_op
  584. dti = DatetimeIndex(
  585. ["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
  586. )
  587. expected = np.array([op == operator.ne] * len(dti))
  588. result = op(dti, NaT)
  589. tm.assert_numpy_array_equal(result, expected)
  590. result = op(dti.tz_localize("US/Pacific"), NaT)
  591. tm.assert_numpy_array_equal(result, expected)
  592. def test_dti_cmp_str(self, tz_naive_fixture):
  593. # GH#22074
  594. # regardless of tz, we expect these comparisons are valid
  595. tz = tz_naive_fixture
  596. rng = date_range("1/1/2000", periods=10, tz=tz)
  597. other = "1/1/2000"
  598. result = rng == other
  599. expected = np.array([True] + [False] * 9)
  600. tm.assert_numpy_array_equal(result, expected)
  601. result = rng != other
  602. expected = np.array([False] + [True] * 9)
  603. tm.assert_numpy_array_equal(result, expected)
  604. result = rng < other
  605. expected = np.array([False] * 10)
  606. tm.assert_numpy_array_equal(result, expected)
  607. result = rng <= other
  608. expected = np.array([True] + [False] * 9)
  609. tm.assert_numpy_array_equal(result, expected)
  610. result = rng > other
  611. expected = np.array([False] + [True] * 9)
  612. tm.assert_numpy_array_equal(result, expected)
  613. result = rng >= other
  614. expected = np.array([True] * 10)
  615. tm.assert_numpy_array_equal(result, expected)
  616. def test_dti_cmp_list(self):
  617. rng = date_range("1/1/2000", periods=10)
  618. result = rng == list(rng)
  619. expected = rng == rng
  620. tm.assert_numpy_array_equal(result, expected)
  621. @pytest.mark.parametrize(
  622. "other",
  623. [
  624. pd.timedelta_range("1D", periods=10),
  625. pd.timedelta_range("1D", periods=10).to_series(),
  626. pd.timedelta_range("1D", periods=10).asi8.view("m8[ns]"),
  627. ],
  628. ids=lambda x: type(x).__name__,
  629. )
  630. def test_dti_cmp_tdi_tzawareness(self, other):
  631. # GH#22074
  632. # reversion test that we _don't_ call _assert_tzawareness_compat
  633. # when comparing against TimedeltaIndex
  634. dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
  635. result = dti == other
  636. expected = np.array([False] * 10)
  637. tm.assert_numpy_array_equal(result, expected)
  638. result = dti != other
  639. expected = np.array([True] * 10)
  640. tm.assert_numpy_array_equal(result, expected)
  641. msg = "Invalid comparison between"
  642. with pytest.raises(TypeError, match=msg):
  643. dti < other
  644. with pytest.raises(TypeError, match=msg):
  645. dti <= other
  646. with pytest.raises(TypeError, match=msg):
  647. dti > other
  648. with pytest.raises(TypeError, match=msg):
  649. dti >= other
  650. def test_dti_cmp_object_dtype(self):
  651. # GH#22074
  652. dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
  653. other = dti.astype("O")
  654. result = dti == other
  655. expected = np.array([True] * 10)
  656. tm.assert_numpy_array_equal(result, expected)
  657. other = dti.tz_localize(None)
  658. result = dti != other
  659. tm.assert_numpy_array_equal(result, expected)
  660. other = np.array(list(dti[:5]) + [Timedelta(days=1)] * 5)
  661. result = dti == other
  662. expected = np.array([True] * 5 + [False] * 5)
  663. tm.assert_numpy_array_equal(result, expected)
  664. msg = ">=' not supported between instances of 'Timestamp' and 'Timedelta'"
  665. with pytest.raises(TypeError, match=msg):
  666. dti >= other
  667. # ------------------------------------------------------------------
  668. # Arithmetic
  669. class TestDatetime64Arithmetic:
  670. # This class is intended for "finished" tests that are fully parametrized
  671. # over DataFrame/Series/Index/DatetimeArray
  672. # -------------------------------------------------------------
  673. # Addition/Subtraction of timedelta-like
  674. @pytest.mark.arm_slow
  675. def test_dt64arr_add_timedeltalike_scalar(
  676. self, tz_naive_fixture, two_hours, box_with_array
  677. ):
  678. # GH#22005, GH#22163 check DataFrame doesn't raise TypeError
  679. tz = tz_naive_fixture
  680. rng = date_range("2000-01-01", "2000-02-01", tz=tz)
  681. expected = date_range("2000-01-01 02:00", "2000-02-01 02:00", tz=tz)
  682. rng = tm.box_expected(rng, box_with_array)
  683. expected = tm.box_expected(expected, box_with_array)
  684. result = rng + two_hours
  685. tm.assert_equal(result, expected)
  686. result = two_hours + rng
  687. tm.assert_equal(result, expected)
  688. rng += two_hours
  689. tm.assert_equal(rng, expected)
  690. def test_dt64arr_sub_timedeltalike_scalar(
  691. self, tz_naive_fixture, two_hours, box_with_array
  692. ):
  693. tz = tz_naive_fixture
  694. rng = date_range("2000-01-01", "2000-02-01", tz=tz)
  695. expected = date_range("1999-12-31 22:00", "2000-01-31 22:00", tz=tz)
  696. rng = tm.box_expected(rng, box_with_array)
  697. expected = tm.box_expected(expected, box_with_array)
  698. result = rng - two_hours
  699. tm.assert_equal(result, expected)
  700. rng -= two_hours
  701. tm.assert_equal(rng, expected)
  702. def test_dt64_array_sub_dt_with_different_timezone(self, box_with_array):
  703. t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
  704. t1 = tm.box_expected(t1, box_with_array)
  705. t2 = Timestamp("20130101").tz_localize("CET")
  706. tnaive = Timestamp(20130101)
  707. result = t1 - t2
  708. expected = TimedeltaIndex(
  709. ["0 days 06:00:00", "1 days 06:00:00", "2 days 06:00:00"]
  710. )
  711. expected = tm.box_expected(expected, box_with_array)
  712. tm.assert_equal(result, expected)
  713. result = t2 - t1
  714. expected = TimedeltaIndex(
  715. ["-1 days +18:00:00", "-2 days +18:00:00", "-3 days +18:00:00"]
  716. )
  717. expected = tm.box_expected(expected, box_with_array)
  718. tm.assert_equal(result, expected)
  719. msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
  720. with pytest.raises(TypeError, match=msg):
  721. t1 - tnaive
  722. with pytest.raises(TypeError, match=msg):
  723. tnaive - t1
  724. def test_dt64_array_sub_dt64_array_with_different_timezone(self, box_with_array):
  725. t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
  726. t1 = tm.box_expected(t1, box_with_array)
  727. t2 = date_range("20130101", periods=3).tz_localize("CET")
  728. t2 = tm.box_expected(t2, box_with_array)
  729. tnaive = date_range("20130101", periods=3)
  730. result = t1 - t2
  731. expected = TimedeltaIndex(
  732. ["0 days 06:00:00", "0 days 06:00:00", "0 days 06:00:00"]
  733. )
  734. expected = tm.box_expected(expected, box_with_array)
  735. tm.assert_equal(result, expected)
  736. result = t2 - t1
  737. expected = TimedeltaIndex(
  738. ["-1 days +18:00:00", "-1 days +18:00:00", "-1 days +18:00:00"]
  739. )
  740. expected = tm.box_expected(expected, box_with_array)
  741. tm.assert_equal(result, expected)
  742. msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
  743. with pytest.raises(TypeError, match=msg):
  744. t1 - tnaive
  745. with pytest.raises(TypeError, match=msg):
  746. tnaive - t1
  747. def test_dt64arr_add_sub_td64_nat(self, box_with_array, tz_naive_fixture):
  748. # GH#23320 special handling for timedelta64("NaT")
  749. tz = tz_naive_fixture
  750. dti = date_range("1994-04-01", periods=9, tz=tz, freq="QS")
  751. other = np.timedelta64("NaT")
  752. expected = DatetimeIndex(["NaT"] * 9, tz=tz)
  753. obj = tm.box_expected(dti, box_with_array)
  754. expected = tm.box_expected(expected, box_with_array)
  755. result = obj + other
  756. tm.assert_equal(result, expected)
  757. result = other + obj
  758. tm.assert_equal(result, expected)
  759. result = obj - other
  760. tm.assert_equal(result, expected)
  761. msg = "cannot subtract"
  762. with pytest.raises(TypeError, match=msg):
  763. other - obj
  764. def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array):
  765. tz = tz_naive_fixture
  766. dti = date_range("2016-01-01", periods=3, tz=tz)
  767. tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
  768. tdarr = tdi.values
  769. expected = date_range("2015-12-31", "2016-01-02", periods=3, tz=tz)
  770. dtarr = tm.box_expected(dti, box_with_array)
  771. expected = tm.box_expected(expected, box_with_array)
  772. result = dtarr + tdarr
  773. tm.assert_equal(result, expected)
  774. result = tdarr + dtarr
  775. tm.assert_equal(result, expected)
  776. expected = date_range("2016-01-02", "2016-01-04", periods=3, tz=tz)
  777. expected = tm.box_expected(expected, box_with_array)
  778. result = dtarr - tdarr
  779. tm.assert_equal(result, expected)
  780. msg = "cannot subtract|(bad|unsupported) operand type for unary"
  781. with pytest.raises(TypeError, match=msg):
  782. tdarr - dtarr
  783. # -----------------------------------------------------------------
  784. # Subtraction of datetime-like scalars
  785. @pytest.mark.parametrize(
  786. "ts",
  787. [
  788. Timestamp("2013-01-01"),
  789. Timestamp("2013-01-01").to_pydatetime(),
  790. Timestamp("2013-01-01").to_datetime64(),
  791. # GH#7996, GH#22163 ensure non-nano datetime64 is converted to nano
  792. # for DataFrame operation
  793. np.datetime64("2013-01-01", "D"),
  794. ],
  795. )
  796. def test_dt64arr_sub_dtscalar(self, box_with_array, ts):
  797. # GH#8554, GH#22163 DataFrame op should _not_ return dt64 dtype
  798. idx = date_range("2013-01-01", periods=3)._with_freq(None)
  799. idx = tm.box_expected(idx, box_with_array)
  800. expected = TimedeltaIndex(["0 Days", "1 Day", "2 Days"])
  801. expected = tm.box_expected(expected, box_with_array)
  802. result = idx - ts
  803. tm.assert_equal(result, expected)
  804. result = ts - idx
  805. tm.assert_equal(result, -expected)
  806. tm.assert_equal(result, -expected)
  807. def test_dt64arr_sub_timestamp_tzaware(self, box_with_array):
  808. ser = date_range("2014-03-17", periods=2, freq="D", tz="US/Eastern")
  809. ser = ser._with_freq(None)
  810. ts = ser[0]
  811. ser = tm.box_expected(ser, box_with_array)
  812. delta_series = Series([np.timedelta64(0, "D"), np.timedelta64(1, "D")])
  813. expected = tm.box_expected(delta_series, box_with_array)
  814. tm.assert_equal(ser - ts, expected)
  815. tm.assert_equal(ts - ser, -expected)
  816. def test_dt64arr_sub_NaT(self, box_with_array):
  817. # GH#18808
  818. dti = DatetimeIndex([NaT, Timestamp("19900315")])
  819. ser = tm.box_expected(dti, box_with_array)
  820. result = ser - NaT
  821. expected = Series([NaT, NaT], dtype="timedelta64[ns]")
  822. expected = tm.box_expected(expected, box_with_array)
  823. tm.assert_equal(result, expected)
  824. dti_tz = dti.tz_localize("Asia/Tokyo")
  825. ser_tz = tm.box_expected(dti_tz, box_with_array)
  826. result = ser_tz - NaT
  827. expected = Series([NaT, NaT], dtype="timedelta64[ns]")
  828. expected = tm.box_expected(expected, box_with_array)
  829. tm.assert_equal(result, expected)
  830. # -------------------------------------------------------------
  831. # Subtraction of datetime-like array-like
  832. def test_dt64arr_sub_dt64object_array(self, box_with_array, tz_naive_fixture):
  833. dti = date_range("2016-01-01", periods=3, tz=tz_naive_fixture)
  834. expected = dti - dti
  835. obj = tm.box_expected(dti, box_with_array)
  836. expected = tm.box_expected(expected, box_with_array).astype(object)
  837. with tm.assert_produces_warning(PerformanceWarning):
  838. result = obj - obj.astype(object)
  839. tm.assert_equal(result, expected)
  840. def test_dt64arr_naive_sub_dt64ndarray(self, box_with_array):
  841. dti = date_range("2016-01-01", periods=3, tz=None)
  842. dt64vals = dti.values
  843. dtarr = tm.box_expected(dti, box_with_array)
  844. expected = dtarr - dtarr
  845. result = dtarr - dt64vals
  846. tm.assert_equal(result, expected)
  847. result = dt64vals - dtarr
  848. tm.assert_equal(result, expected)
  849. def test_dt64arr_aware_sub_dt64ndarray_raises(
  850. self, tz_aware_fixture, box_with_array
  851. ):
  852. tz = tz_aware_fixture
  853. dti = date_range("2016-01-01", periods=3, tz=tz)
  854. dt64vals = dti.values
  855. dtarr = tm.box_expected(dti, box_with_array)
  856. msg = "Cannot subtract tz-naive and tz-aware datetime"
  857. with pytest.raises(TypeError, match=msg):
  858. dtarr - dt64vals
  859. with pytest.raises(TypeError, match=msg):
  860. dt64vals - dtarr
  861. # -------------------------------------------------------------
  862. # Addition of datetime-like others (invalid)
  863. def test_dt64arr_add_dtlike_raises(self, tz_naive_fixture, box_with_array):
  864. # GH#22163 ensure DataFrame doesn't cast Timestamp to i8
  865. # GH#9631
  866. tz = tz_naive_fixture
  867. dti = date_range("2016-01-01", periods=3, tz=tz)
  868. if tz is None:
  869. dti2 = dti.tz_localize("US/Eastern")
  870. else:
  871. dti2 = dti.tz_localize(None)
  872. dtarr = tm.box_expected(dti, box_with_array)
  873. assert_cannot_add(dtarr, dti.values)
  874. assert_cannot_add(dtarr, dti)
  875. assert_cannot_add(dtarr, dtarr)
  876. assert_cannot_add(dtarr, dti[0])
  877. assert_cannot_add(dtarr, dti[0].to_pydatetime())
  878. assert_cannot_add(dtarr, dti[0].to_datetime64())
  879. assert_cannot_add(dtarr, dti2[0])
  880. assert_cannot_add(dtarr, dti2[0].to_pydatetime())
  881. assert_cannot_add(dtarr, np.datetime64("2011-01-01", "D"))
  882. # -------------------------------------------------------------
  883. # Other Invalid Addition/Subtraction
  884. # Note: freq here includes both Tick and non-Tick offsets; this is
  885. # relevant because historically integer-addition was allowed if we had
  886. # a freq.
  887. @pytest.mark.parametrize("freq", ["H", "D", "W", "M", "MS", "Q", "B", None])
  888. @pytest.mark.parametrize("dtype", [None, "uint8"])
  889. def test_dt64arr_addsub_intlike(
  890. self, dtype, box_with_array, freq, tz_naive_fixture
  891. ):
  892. # GH#19959, GH#19123, GH#19012
  893. tz = tz_naive_fixture
  894. if box_with_array is pd.DataFrame:
  895. # alignment headaches
  896. return
  897. if freq is None:
  898. dti = DatetimeIndex(["NaT", "2017-04-05 06:07:08"], tz=tz)
  899. else:
  900. dti = date_range("2016-01-01", periods=2, freq=freq, tz=tz)
  901. obj = box_with_array(dti)
  902. other = np.array([4, -1])
  903. if dtype is not None:
  904. other = other.astype(dtype)
  905. msg = "|".join(
  906. [
  907. "Addition/subtraction of integers",
  908. "cannot subtract DatetimeArray from",
  909. # IntegerArray
  910. "can only perform ops with numeric values",
  911. "unsupported operand type.*Categorical",
  912. r"unsupported operand type\(s\) for -: 'int' and 'Timestamp'",
  913. ]
  914. )
  915. assert_invalid_addsub_type(obj, 1, msg)
  916. assert_invalid_addsub_type(obj, np.int64(2), msg)
  917. assert_invalid_addsub_type(obj, np.array(3, dtype=np.int64), msg)
  918. assert_invalid_addsub_type(obj, other, msg)
  919. assert_invalid_addsub_type(obj, np.array(other), msg)
  920. assert_invalid_addsub_type(obj, pd.array(other), msg)
  921. assert_invalid_addsub_type(obj, pd.Categorical(other), msg)
  922. assert_invalid_addsub_type(obj, pd.Index(other), msg)
  923. assert_invalid_addsub_type(obj, Series(other), msg)
  924. @pytest.mark.parametrize(
  925. "other",
  926. [
  927. 3.14,
  928. np.array([2.0, 3.0]),
  929. # GH#13078 datetime +/- Period is invalid
  930. Period("2011-01-01", freq="D"),
  931. # https://github.com/pandas-dev/pandas/issues/10329
  932. time(1, 2, 3),
  933. ],
  934. )
  935. @pytest.mark.parametrize("dti_freq", [None, "D"])
  936. def test_dt64arr_add_sub_invalid(self, dti_freq, other, box_with_array):
  937. dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
  938. dtarr = tm.box_expected(dti, box_with_array)
  939. msg = "|".join(
  940. [
  941. "unsupported operand type",
  942. "cannot (add|subtract)",
  943. "cannot use operands with types",
  944. "ufunc '?(add|subtract)'? cannot use operands with types",
  945. "Concatenation operation is not implemented for NumPy arrays",
  946. ]
  947. )
  948. assert_invalid_addsub_type(dtarr, other, msg)
  949. @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "H"])
  950. @pytest.mark.parametrize("dti_freq", [None, "D"])
  951. def test_dt64arr_add_sub_parr(
  952. self, dti_freq, pi_freq, box_with_array, box_with_array2
  953. ):
  954. # GH#20049 subtracting PeriodIndex should raise TypeError
  955. dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
  956. pi = dti.to_period(pi_freq)
  957. dtarr = tm.box_expected(dti, box_with_array)
  958. parr = tm.box_expected(pi, box_with_array2)
  959. msg = "|".join(
  960. [
  961. "cannot (add|subtract)",
  962. "unsupported operand",
  963. "descriptor.*requires",
  964. "ufunc.*cannot use operands",
  965. ]
  966. )
  967. assert_invalid_addsub_type(dtarr, parr, msg)
  968. def test_dt64arr_addsub_time_objects_raises(self, box_with_array, tz_naive_fixture):
  969. # https://github.com/pandas-dev/pandas/issues/10329
  970. tz = tz_naive_fixture
  971. obj1 = date_range("2012-01-01", periods=3, tz=tz)
  972. obj2 = [time(i, i, i) for i in range(3)]
  973. obj1 = tm.box_expected(obj1, box_with_array)
  974. obj2 = tm.box_expected(obj2, box_with_array)
  975. msg = "|".join(
  976. [
  977. "unsupported operand",
  978. "cannot subtract DatetimeArray from ndarray",
  979. ]
  980. )
  981. with warnings.catch_warnings(record=True):
  982. # pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
  983. # applied to Series or DatetimeIndex
  984. # we aren't testing that here, so ignore.
  985. warnings.simplefilter("ignore", PerformanceWarning)
  986. assert_invalid_addsub_type(obj1, obj2, msg=msg)
  987. # -------------------------------------------------------------
  988. # Other invalid operations
  989. @pytest.mark.parametrize(
  990. "dt64_series",
  991. [
  992. Series([Timestamp("19900315"), Timestamp("19900315")]),
  993. Series([NaT, Timestamp("19900315")]),
  994. Series([NaT, NaT], dtype="datetime64[ns]"),
  995. ],
  996. )
  997. @pytest.mark.parametrize("one", [1, 1.0, np.array(1)])
  998. def test_dt64_mul_div_numeric_invalid(self, one, dt64_series, box_with_array):
  999. obj = tm.box_expected(dt64_series, box_with_array)
  1000. msg = "cannot perform .* with this index type"
  1001. # multiplication
  1002. with pytest.raises(TypeError, match=msg):
  1003. obj * one
  1004. with pytest.raises(TypeError, match=msg):
  1005. one * obj
  1006. # division
  1007. with pytest.raises(TypeError, match=msg):
  1008. obj / one
  1009. with pytest.raises(TypeError, match=msg):
  1010. one / obj
  1011. class TestDatetime64DateOffsetArithmetic:
  1012. # -------------------------------------------------------------
  1013. # Tick DateOffsets
  1014. # TODO: parametrize over timezone?
  1015. def test_dt64arr_series_add_tick_DateOffset(self, box_with_array):
  1016. # GH#4532
  1017. # operate with pd.offsets
  1018. ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
  1019. expected = Series(
  1020. [Timestamp("20130101 9:01:05"), Timestamp("20130101 9:02:05")]
  1021. )
  1022. ser = tm.box_expected(ser, box_with_array)
  1023. expected = tm.box_expected(expected, box_with_array)
  1024. result = ser + pd.offsets.Second(5)
  1025. tm.assert_equal(result, expected)
  1026. result2 = pd.offsets.Second(5) + ser
  1027. tm.assert_equal(result2, expected)
  1028. def test_dt64arr_series_sub_tick_DateOffset(self, box_with_array):
  1029. # GH#4532
  1030. # operate with pd.offsets
  1031. ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
  1032. expected = Series(
  1033. [Timestamp("20130101 9:00:55"), Timestamp("20130101 9:01:55")]
  1034. )
  1035. ser = tm.box_expected(ser, box_with_array)
  1036. expected = tm.box_expected(expected, box_with_array)
  1037. result = ser - pd.offsets.Second(5)
  1038. tm.assert_equal(result, expected)
  1039. result2 = -pd.offsets.Second(5) + ser
  1040. tm.assert_equal(result2, expected)
  1041. msg = "(bad|unsupported) operand type for unary"
  1042. with pytest.raises(TypeError, match=msg):
  1043. pd.offsets.Second(5) - ser
  1044. @pytest.mark.parametrize(
  1045. "cls_name", ["Day", "Hour", "Minute", "Second", "Milli", "Micro", "Nano"]
  1046. )
  1047. def test_dt64arr_add_sub_tick_DateOffset_smoke(self, cls_name, box_with_array):
  1048. # GH#4532
  1049. # smoke tests for valid DateOffsets
  1050. ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
  1051. ser = tm.box_expected(ser, box_with_array)
  1052. offset_cls = getattr(pd.offsets, cls_name)
  1053. ser + offset_cls(5)
  1054. offset_cls(5) + ser
  1055. ser - offset_cls(5)
  1056. def test_dti_add_tick_tzaware(self, tz_aware_fixture, box_with_array):
  1057. # GH#21610, GH#22163 ensure DataFrame doesn't return object-dtype
  1058. tz = tz_aware_fixture
  1059. if tz == "US/Pacific":
  1060. dates = date_range("2012-11-01", periods=3, tz=tz)
  1061. offset = dates + pd.offsets.Hour(5)
  1062. assert dates[0] + pd.offsets.Hour(5) == offset[0]
  1063. dates = date_range("2010-11-01 00:00", periods=3, tz=tz, freq="H")
  1064. expected = DatetimeIndex(
  1065. ["2010-11-01 05:00", "2010-11-01 06:00", "2010-11-01 07:00"],
  1066. freq="H",
  1067. tz=tz,
  1068. )
  1069. dates = tm.box_expected(dates, box_with_array)
  1070. expected = tm.box_expected(expected, box_with_array)
  1071. for scalar in [pd.offsets.Hour(5), np.timedelta64(5, "h"), timedelta(hours=5)]:
  1072. offset = dates + scalar
  1073. tm.assert_equal(offset, expected)
  1074. offset = scalar + dates
  1075. tm.assert_equal(offset, expected)
  1076. roundtrip = offset - scalar
  1077. tm.assert_equal(roundtrip, dates)
  1078. msg = "|".join(
  1079. ["bad operand type for unary -", "cannot subtract DatetimeArray"]
  1080. )
  1081. with pytest.raises(TypeError, match=msg):
  1082. scalar - dates
  1083. # -------------------------------------------------------------
  1084. # RelativeDelta DateOffsets
  1085. def test_dt64arr_add_sub_relativedelta_offsets(self, box_with_array):
  1086. # GH#10699
  1087. vec = DatetimeIndex(
  1088. [
  1089. Timestamp("2000-01-05 00:15:00"),
  1090. Timestamp("2000-01-31 00:23:00"),
  1091. Timestamp("2000-01-01"),
  1092. Timestamp("2000-03-31"),
  1093. Timestamp("2000-02-29"),
  1094. Timestamp("2000-12-31"),
  1095. Timestamp("2000-05-15"),
  1096. Timestamp("2001-06-15"),
  1097. ]
  1098. )
  1099. vec = tm.box_expected(vec, box_with_array)
  1100. vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
  1101. # DateOffset relativedelta fastpath
  1102. relative_kwargs = [
  1103. ("years", 2),
  1104. ("months", 5),
  1105. ("days", 3),
  1106. ("hours", 5),
  1107. ("minutes", 10),
  1108. ("seconds", 2),
  1109. ("microseconds", 5),
  1110. ]
  1111. for i, (unit, value) in enumerate(relative_kwargs):
  1112. off = DateOffset(**{unit: value})
  1113. expected = DatetimeIndex([x + off for x in vec_items])
  1114. expected = tm.box_expected(expected, box_with_array)
  1115. tm.assert_equal(expected, vec + off)
  1116. expected = DatetimeIndex([x - off for x in vec_items])
  1117. expected = tm.box_expected(expected, box_with_array)
  1118. tm.assert_equal(expected, vec - off)
  1119. off = DateOffset(**dict(relative_kwargs[: i + 1]))
  1120. expected = DatetimeIndex([x + off for x in vec_items])
  1121. expected = tm.box_expected(expected, box_with_array)
  1122. tm.assert_equal(expected, vec + off)
  1123. expected = DatetimeIndex([x - off for x in vec_items])
  1124. expected = tm.box_expected(expected, box_with_array)
  1125. tm.assert_equal(expected, vec - off)
  1126. msg = "(bad|unsupported) operand type for unary"
  1127. with pytest.raises(TypeError, match=msg):
  1128. off - vec
  1129. # -------------------------------------------------------------
  1130. # Non-Tick, Non-RelativeDelta DateOffsets
  1131. # TODO: redundant with test_dt64arr_add_sub_DateOffset? that includes
  1132. # tz-aware cases which this does not
  1133. @pytest.mark.parametrize(
  1134. "cls_and_kwargs",
  1135. [
  1136. "YearBegin",
  1137. ("YearBegin", {"month": 5}),
  1138. "YearEnd",
  1139. ("YearEnd", {"month": 5}),
  1140. "MonthBegin",
  1141. "MonthEnd",
  1142. "SemiMonthEnd",
  1143. "SemiMonthBegin",
  1144. "Week",
  1145. ("Week", {"weekday": 3}),
  1146. "Week",
  1147. ("Week", {"weekday": 6}),
  1148. "BusinessDay",
  1149. "BDay",
  1150. "QuarterEnd",
  1151. "QuarterBegin",
  1152. "CustomBusinessDay",
  1153. "CDay",
  1154. "CBMonthEnd",
  1155. "CBMonthBegin",
  1156. "BMonthBegin",
  1157. "BMonthEnd",
  1158. "BusinessHour",
  1159. "BYearBegin",
  1160. "BYearEnd",
  1161. "BQuarterBegin",
  1162. ("LastWeekOfMonth", {"weekday": 2}),
  1163. (
  1164. "FY5253Quarter",
  1165. {
  1166. "qtr_with_extra_week": 1,
  1167. "startingMonth": 1,
  1168. "weekday": 2,
  1169. "variation": "nearest",
  1170. },
  1171. ),
  1172. ("FY5253", {"weekday": 0, "startingMonth": 2, "variation": "nearest"}),
  1173. ("WeekOfMonth", {"weekday": 2, "week": 2}),
  1174. "Easter",
  1175. ("DateOffset", {"day": 4}),
  1176. ("DateOffset", {"month": 5}),
  1177. ],
  1178. )
  1179. @pytest.mark.parametrize("normalize", [True, False])
  1180. @pytest.mark.parametrize("n", [0, 5])
  1181. def test_dt64arr_add_sub_DateOffsets(
  1182. self, box_with_array, n, normalize, cls_and_kwargs
  1183. ):
  1184. # GH#10699
  1185. # assert vectorized operation matches pointwise operations
  1186. if isinstance(cls_and_kwargs, tuple):
  1187. # If cls_name param is a tuple, then 2nd entry is kwargs for
  1188. # the offset constructor
  1189. cls_name, kwargs = cls_and_kwargs
  1190. else:
  1191. cls_name = cls_and_kwargs
  1192. kwargs = {}
  1193. if n == 0 and cls_name in [
  1194. "WeekOfMonth",
  1195. "LastWeekOfMonth",
  1196. "FY5253Quarter",
  1197. "FY5253",
  1198. ]:
  1199. # passing n = 0 is invalid for these offset classes
  1200. return
  1201. vec = DatetimeIndex(
  1202. [
  1203. Timestamp("2000-01-05 00:15:00"),
  1204. Timestamp("2000-01-31 00:23:00"),
  1205. Timestamp("2000-01-01"),
  1206. Timestamp("2000-03-31"),
  1207. Timestamp("2000-02-29"),
  1208. Timestamp("2000-12-31"),
  1209. Timestamp("2000-05-15"),
  1210. Timestamp("2001-06-15"),
  1211. ]
  1212. )
  1213. vec = tm.box_expected(vec, box_with_array)
  1214. vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
  1215. offset_cls = getattr(pd.offsets, cls_name)
  1216. with warnings.catch_warnings(record=True):
  1217. # pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
  1218. # applied to Series or DatetimeIndex
  1219. # we aren't testing that here, so ignore.
  1220. warnings.simplefilter("ignore", PerformanceWarning)
  1221. offset = offset_cls(n, normalize=normalize, **kwargs)
  1222. expected = DatetimeIndex([x + offset for x in vec_items])
  1223. expected = tm.box_expected(expected, box_with_array)
  1224. tm.assert_equal(expected, vec + offset)
  1225. expected = DatetimeIndex([x - offset for x in vec_items])
  1226. expected = tm.box_expected(expected, box_with_array)
  1227. tm.assert_equal(expected, vec - offset)
  1228. expected = DatetimeIndex([offset + x for x in vec_items])
  1229. expected = tm.box_expected(expected, box_with_array)
  1230. tm.assert_equal(expected, offset + vec)
  1231. msg = "(bad|unsupported) operand type for unary"
  1232. with pytest.raises(TypeError, match=msg):
  1233. offset - vec
  1234. def test_dt64arr_add_sub_DateOffset(self, box_with_array):
  1235. # GH#10699
  1236. s = date_range("2000-01-01", "2000-01-31", name="a")
  1237. s = tm.box_expected(s, box_with_array)
  1238. result = s + DateOffset(years=1)
  1239. result2 = DateOffset(years=1) + s
  1240. exp = date_range("2001-01-01", "2001-01-31", name="a")._with_freq(None)
  1241. exp = tm.box_expected(exp, box_with_array)
  1242. tm.assert_equal(result, exp)
  1243. tm.assert_equal(result2, exp)
  1244. result = s - DateOffset(years=1)
  1245. exp = date_range("1999-01-01", "1999-01-31", name="a")._with_freq(None)
  1246. exp = tm.box_expected(exp, box_with_array)
  1247. tm.assert_equal(result, exp)
  1248. s = DatetimeIndex(
  1249. [
  1250. Timestamp("2000-01-15 00:15:00", tz="US/Central"),
  1251. Timestamp("2000-02-15", tz="US/Central"),
  1252. ],
  1253. name="a",
  1254. )
  1255. s = tm.box_expected(s, box_with_array)
  1256. result = s + pd.offsets.Day()
  1257. result2 = pd.offsets.Day() + s
  1258. exp = DatetimeIndex(
  1259. [
  1260. Timestamp("2000-01-16 00:15:00", tz="US/Central"),
  1261. Timestamp("2000-02-16", tz="US/Central"),
  1262. ],
  1263. name="a",
  1264. )
  1265. exp = tm.box_expected(exp, box_with_array)
  1266. tm.assert_equal(result, exp)
  1267. tm.assert_equal(result2, exp)
  1268. s = DatetimeIndex(
  1269. [
  1270. Timestamp("2000-01-15 00:15:00", tz="US/Central"),
  1271. Timestamp("2000-02-15", tz="US/Central"),
  1272. ],
  1273. name="a",
  1274. )
  1275. s = tm.box_expected(s, box_with_array)
  1276. result = s + pd.offsets.MonthEnd()
  1277. result2 = pd.offsets.MonthEnd() + s
  1278. exp = DatetimeIndex(
  1279. [
  1280. Timestamp("2000-01-31 00:15:00", tz="US/Central"),
  1281. Timestamp("2000-02-29", tz="US/Central"),
  1282. ],
  1283. name="a",
  1284. )
  1285. exp = tm.box_expected(exp, box_with_array)
  1286. tm.assert_equal(result, exp)
  1287. tm.assert_equal(result2, exp)
  1288. @pytest.mark.parametrize(
  1289. "other",
  1290. [
  1291. np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)]),
  1292. np.array([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()]),
  1293. np.array( # matching offsets
  1294. [pd.offsets.DateOffset(years=1), pd.offsets.DateOffset(years=1)]
  1295. ),
  1296. ],
  1297. )
  1298. @pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
  1299. @pytest.mark.parametrize("box_other", [True, False])
  1300. def test_dt64arr_add_sub_offset_array(
  1301. self, tz_naive_fixture, box_with_array, box_other, op, other
  1302. ):
  1303. # GH#18849
  1304. # GH#10699 array of offsets
  1305. tz = tz_naive_fixture
  1306. dti = date_range("2017-01-01", periods=2, tz=tz)
  1307. dtarr = tm.box_expected(dti, box_with_array)
  1308. other = np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)])
  1309. expected = DatetimeIndex([op(dti[n], other[n]) for n in range(len(dti))])
  1310. expected = tm.box_expected(expected, box_with_array).astype(object)
  1311. if box_other:
  1312. other = tm.box_expected(other, box_with_array)
  1313. if box_with_array is pd.array and op is roperator.radd:
  1314. # We expect a PandasArray, not ndarray[object] here
  1315. expected = pd.array(expected, dtype=object)
  1316. with tm.assert_produces_warning(PerformanceWarning):
  1317. res = op(dtarr, other)
  1318. tm.assert_equal(res, expected)
  1319. @pytest.mark.parametrize(
  1320. "op, offset, exp, exp_freq",
  1321. [
  1322. (
  1323. "__add__",
  1324. DateOffset(months=3, days=10),
  1325. [
  1326. Timestamp("2014-04-11"),
  1327. Timestamp("2015-04-11"),
  1328. Timestamp("2016-04-11"),
  1329. Timestamp("2017-04-11"),
  1330. ],
  1331. None,
  1332. ),
  1333. (
  1334. "__add__",
  1335. DateOffset(months=3),
  1336. [
  1337. Timestamp("2014-04-01"),
  1338. Timestamp("2015-04-01"),
  1339. Timestamp("2016-04-01"),
  1340. Timestamp("2017-04-01"),
  1341. ],
  1342. "AS-APR",
  1343. ),
  1344. (
  1345. "__sub__",
  1346. DateOffset(months=3, days=10),
  1347. [
  1348. Timestamp("2013-09-21"),
  1349. Timestamp("2014-09-21"),
  1350. Timestamp("2015-09-21"),
  1351. Timestamp("2016-09-21"),
  1352. ],
  1353. None,
  1354. ),
  1355. (
  1356. "__sub__",
  1357. DateOffset(months=3),
  1358. [
  1359. Timestamp("2013-10-01"),
  1360. Timestamp("2014-10-01"),
  1361. Timestamp("2015-10-01"),
  1362. Timestamp("2016-10-01"),
  1363. ],
  1364. "AS-OCT",
  1365. ),
  1366. ],
  1367. )
  1368. def test_dti_add_sub_nonzero_mth_offset(
  1369. self, op, offset, exp, exp_freq, tz_aware_fixture, box_with_array
  1370. ):
  1371. # GH 26258
  1372. tz = tz_aware_fixture
  1373. date = date_range(start="01 Jan 2014", end="01 Jan 2017", freq="AS", tz=tz)
  1374. date = tm.box_expected(date, box_with_array, False)
  1375. mth = getattr(date, op)
  1376. result = mth(offset)
  1377. expected = DatetimeIndex(exp, tz=tz)
  1378. expected = tm.box_expected(expected, box_with_array, False)
  1379. tm.assert_equal(result, expected)
  1380. class TestDatetime64OverflowHandling:
  1381. # TODO: box + de-duplicate
  1382. def test_dt64_overflow_masking(self, box_with_array):
  1383. # GH#25317
  1384. left = Series([Timestamp("1969-12-31")])
  1385. right = Series([NaT])
  1386. left = tm.box_expected(left, box_with_array)
  1387. right = tm.box_expected(right, box_with_array)
  1388. expected = TimedeltaIndex([NaT])
  1389. expected = tm.box_expected(expected, box_with_array)
  1390. result = left - right
  1391. tm.assert_equal(result, expected)
  1392. def test_dt64_series_arith_overflow(self):
  1393. # GH#12534, fixed by GH#19024
  1394. dt = Timestamp("1700-01-31")
  1395. td = Timedelta("20000 Days")
  1396. dti = date_range("1949-09-30", freq="100Y", periods=4)
  1397. ser = Series(dti)
  1398. msg = "Overflow in int64 addition"
  1399. with pytest.raises(OverflowError, match=msg):
  1400. ser - dt
  1401. with pytest.raises(OverflowError, match=msg):
  1402. dt - ser
  1403. with pytest.raises(OverflowError, match=msg):
  1404. ser + td
  1405. with pytest.raises(OverflowError, match=msg):
  1406. td + ser
  1407. ser.iloc[-1] = NaT
  1408. expected = Series(
  1409. ["2004-10-03", "2104-10-04", "2204-10-04", "NaT"], dtype="datetime64[ns]"
  1410. )
  1411. res = ser + td
  1412. tm.assert_series_equal(res, expected)
  1413. res = td + ser
  1414. tm.assert_series_equal(res, expected)
  1415. ser.iloc[1:] = NaT
  1416. expected = Series(["91279 Days", "NaT", "NaT", "NaT"], dtype="timedelta64[ns]")
  1417. res = ser - dt
  1418. tm.assert_series_equal(res, expected)
  1419. res = dt - ser
  1420. tm.assert_series_equal(res, -expected)
  1421. def test_datetimeindex_sub_timestamp_overflow(self):
  1422. dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max])
  1423. dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min])
  1424. tsneg = Timestamp("1950-01-01").as_unit("ns")
  1425. ts_neg_variants = [
  1426. tsneg,
  1427. tsneg.to_pydatetime(),
  1428. tsneg.to_datetime64().astype("datetime64[ns]"),
  1429. tsneg.to_datetime64().astype("datetime64[D]"),
  1430. ]
  1431. tspos = Timestamp("1980-01-01").as_unit("ns")
  1432. ts_pos_variants = [
  1433. tspos,
  1434. tspos.to_pydatetime(),
  1435. tspos.to_datetime64().astype("datetime64[ns]"),
  1436. tspos.to_datetime64().astype("datetime64[D]"),
  1437. ]
  1438. msg = "Overflow in int64 addition"
  1439. for variant in ts_neg_variants:
  1440. with pytest.raises(OverflowError, match=msg):
  1441. dtimax - variant
  1442. expected = Timestamp.max._value - tspos._value
  1443. for variant in ts_pos_variants:
  1444. res = dtimax - variant
  1445. assert res[1]._value == expected
  1446. expected = Timestamp.min._value - tsneg._value
  1447. for variant in ts_neg_variants:
  1448. res = dtimin - variant
  1449. assert res[1]._value == expected
  1450. for variant in ts_pos_variants:
  1451. with pytest.raises(OverflowError, match=msg):
  1452. dtimin - variant
  1453. def test_datetimeindex_sub_datetimeindex_overflow(self):
  1454. # GH#22492, GH#22508
  1455. dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max])
  1456. dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min])
  1457. ts_neg = pd.to_datetime(["1950-01-01", "1950-01-01"])
  1458. ts_pos = pd.to_datetime(["1980-01-01", "1980-01-01"])
  1459. # General tests
  1460. expected = Timestamp.max._value - ts_pos[1]._value
  1461. result = dtimax - ts_pos
  1462. assert result[1]._value == expected
  1463. expected = Timestamp.min._value - ts_neg[1]._value
  1464. result = dtimin - ts_neg
  1465. assert result[1]._value == expected
  1466. msg = "Overflow in int64 addition"
  1467. with pytest.raises(OverflowError, match=msg):
  1468. dtimax - ts_neg
  1469. with pytest.raises(OverflowError, match=msg):
  1470. dtimin - ts_pos
  1471. # Edge cases
  1472. tmin = pd.to_datetime([Timestamp.min])
  1473. t1 = tmin + Timedelta.max + Timedelta("1us")
  1474. with pytest.raises(OverflowError, match=msg):
  1475. t1 - tmin
  1476. tmax = pd.to_datetime([Timestamp.max])
  1477. t2 = tmax + Timedelta.min - Timedelta("1us")
  1478. with pytest.raises(OverflowError, match=msg):
  1479. tmax - t2
  1480. class TestTimestampSeriesArithmetic:
  1481. def test_empty_series_add_sub(self, box_with_array):
  1482. # GH#13844
  1483. a = Series(dtype="M8[ns]")
  1484. b = Series(dtype="m8[ns]")
  1485. a = box_with_array(a)
  1486. b = box_with_array(b)
  1487. tm.assert_equal(a, a + b)
  1488. tm.assert_equal(a, a - b)
  1489. tm.assert_equal(a, b + a)
  1490. msg = "cannot subtract"
  1491. with pytest.raises(TypeError, match=msg):
  1492. b - a
  1493. def test_operators_datetimelike(self):
  1494. # ## timedelta64 ###
  1495. td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
  1496. td1.iloc[2] = np.nan
  1497. # ## datetime64 ###
  1498. dt1 = Series(
  1499. [
  1500. Timestamp("20111230"),
  1501. Timestamp("20120101"),
  1502. Timestamp("20120103"),
  1503. ]
  1504. )
  1505. dt1.iloc[2] = np.nan
  1506. dt2 = Series(
  1507. [
  1508. Timestamp("20111231"),
  1509. Timestamp("20120102"),
  1510. Timestamp("20120104"),
  1511. ]
  1512. )
  1513. dt1 - dt2
  1514. dt2 - dt1
  1515. # datetime64 with timetimedelta
  1516. dt1 + td1
  1517. td1 + dt1
  1518. dt1 - td1
  1519. # timetimedelta with datetime64
  1520. td1 + dt1
  1521. dt1 + td1
  1522. def test_dt64ser_sub_datetime_dtype(self):
  1523. ts = Timestamp(datetime(1993, 1, 7, 13, 30, 00))
  1524. dt = datetime(1993, 6, 22, 13, 30)
  1525. ser = Series([ts])
  1526. result = pd.to_timedelta(np.abs(ser - dt))
  1527. assert result.dtype == "timedelta64[ns]"
  1528. # -------------------------------------------------------------
  1529. # TODO: This next block of tests came from tests.series.test_operators,
  1530. # needs to be de-duplicated and parametrized over `box` classes
  1531. @pytest.mark.parametrize(
  1532. "left, right, op_fail",
  1533. [
  1534. [
  1535. [Timestamp("20111230"), Timestamp("20120101"), NaT],
  1536. [Timestamp("20111231"), Timestamp("20120102"), Timestamp("20120104")],
  1537. ["__sub__", "__rsub__"],
  1538. ],
  1539. [
  1540. [Timestamp("20111230"), Timestamp("20120101"), NaT],
  1541. [timedelta(minutes=5, seconds=3), timedelta(minutes=5, seconds=3), NaT],
  1542. ["__add__", "__radd__", "__sub__"],
  1543. ],
  1544. [
  1545. [
  1546. Timestamp("20111230", tz="US/Eastern"),
  1547. Timestamp("20111230", tz="US/Eastern"),
  1548. NaT,
  1549. ],
  1550. [timedelta(minutes=5, seconds=3), NaT, timedelta(minutes=5, seconds=3)],
  1551. ["__add__", "__radd__", "__sub__"],
  1552. ],
  1553. ],
  1554. )
  1555. def test_operators_datetimelike_invalid(
  1556. self, left, right, op_fail, all_arithmetic_operators
  1557. ):
  1558. # these are all TypeError ops
  1559. op_str = all_arithmetic_operators
  1560. arg1 = Series(left)
  1561. arg2 = Series(right)
  1562. # check that we are getting a TypeError
  1563. # with 'operate' (from core/ops.py) for the ops that are not
  1564. # defined
  1565. op = getattr(arg1, op_str, None)
  1566. # Previously, _validate_for_numeric_binop in core/indexes/base.py
  1567. # did this for us.
  1568. if op_str not in op_fail:
  1569. with pytest.raises(
  1570. TypeError, match="operate|[cC]annot|unsupported operand"
  1571. ):
  1572. op(arg2)
  1573. else:
  1574. # Smoke test
  1575. op(arg2)
  1576. def test_sub_single_tz(self):
  1577. # GH#12290
  1578. s1 = Series([Timestamp("2016-02-10", tz="America/Sao_Paulo")])
  1579. s2 = Series([Timestamp("2016-02-08", tz="America/Sao_Paulo")])
  1580. result = s1 - s2
  1581. expected = Series([Timedelta("2days")])
  1582. tm.assert_series_equal(result, expected)
  1583. result = s2 - s1
  1584. expected = Series([Timedelta("-2days")])
  1585. tm.assert_series_equal(result, expected)
  1586. def test_dt64tz_series_sub_dtitz(self):
  1587. # GH#19071 subtracting tzaware DatetimeIndex from tzaware Series
  1588. # (with same tz) raises, fixed by #19024
  1589. dti = date_range("1999-09-30", periods=10, tz="US/Pacific")
  1590. ser = Series(dti)
  1591. expected = Series(TimedeltaIndex(["0days"] * 10))
  1592. res = dti - ser
  1593. tm.assert_series_equal(res, expected)
  1594. res = ser - dti
  1595. tm.assert_series_equal(res, expected)
  1596. def test_sub_datetime_compat(self):
  1597. # see GH#14088
  1598. s = Series([datetime(2016, 8, 23, 12, tzinfo=pytz.utc), NaT])
  1599. dt = datetime(2016, 8, 22, 12, tzinfo=pytz.utc)
  1600. exp = Series([Timedelta("1 days"), NaT])
  1601. tm.assert_series_equal(s - dt, exp)
  1602. tm.assert_series_equal(s - Timestamp(dt), exp)
  1603. def test_dt64_series_add_mixed_tick_DateOffset(self):
  1604. # GH#4532
  1605. # operate with pd.offsets
  1606. s = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
  1607. result = s + pd.offsets.Milli(5)
  1608. result2 = pd.offsets.Milli(5) + s
  1609. expected = Series(
  1610. [Timestamp("20130101 9:01:00.005"), Timestamp("20130101 9:02:00.005")]
  1611. )
  1612. tm.assert_series_equal(result, expected)
  1613. tm.assert_series_equal(result2, expected)
  1614. result = s + pd.offsets.Minute(5) + pd.offsets.Milli(5)
  1615. expected = Series(
  1616. [Timestamp("20130101 9:06:00.005"), Timestamp("20130101 9:07:00.005")]
  1617. )
  1618. tm.assert_series_equal(result, expected)
  1619. def test_datetime64_ops_nat(self):
  1620. # GH#11349
  1621. datetime_series = Series([NaT, Timestamp("19900315")])
  1622. nat_series_dtype_timestamp = Series([NaT, NaT], dtype="datetime64[ns]")
  1623. single_nat_dtype_datetime = Series([NaT], dtype="datetime64[ns]")
  1624. # subtraction
  1625. tm.assert_series_equal(-NaT + datetime_series, nat_series_dtype_timestamp)
  1626. msg = "bad operand type for unary -: 'DatetimeArray'"
  1627. with pytest.raises(TypeError, match=msg):
  1628. -single_nat_dtype_datetime + datetime_series
  1629. tm.assert_series_equal(
  1630. -NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
  1631. )
  1632. with pytest.raises(TypeError, match=msg):
  1633. -single_nat_dtype_datetime + nat_series_dtype_timestamp
  1634. # addition
  1635. tm.assert_series_equal(
  1636. nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
  1637. )
  1638. tm.assert_series_equal(
  1639. NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
  1640. )
  1641. tm.assert_series_equal(
  1642. nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
  1643. )
  1644. tm.assert_series_equal(
  1645. NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
  1646. )
  1647. # -------------------------------------------------------------
  1648. # Timezone-Centric Tests
  1649. def test_operators_datetimelike_with_timezones(self):
  1650. tz = "US/Eastern"
  1651. dt1 = Series(date_range("2000-01-01 09:00:00", periods=5, tz=tz), name="foo")
  1652. dt2 = dt1.copy()
  1653. dt2.iloc[2] = np.nan
  1654. td1 = Series(pd.timedelta_range("1 days 1 min", periods=5, freq="H"))
  1655. td2 = td1.copy()
  1656. td2.iloc[1] = np.nan
  1657. assert td2._values.freq is None
  1658. result = dt1 + td1[0]
  1659. exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
  1660. tm.assert_series_equal(result, exp)
  1661. result = dt2 + td2[0]
  1662. exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
  1663. tm.assert_series_equal(result, exp)
  1664. # odd numpy behavior with scalar timedeltas
  1665. result = td1[0] + dt1
  1666. exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
  1667. tm.assert_series_equal(result, exp)
  1668. result = td2[0] + dt2
  1669. exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
  1670. tm.assert_series_equal(result, exp)
  1671. result = dt1 - td1[0]
  1672. exp = (dt1.dt.tz_localize(None) - td1[0]).dt.tz_localize(tz)
  1673. tm.assert_series_equal(result, exp)
  1674. msg = "(bad|unsupported) operand type for unary"
  1675. with pytest.raises(TypeError, match=msg):
  1676. td1[0] - dt1
  1677. result = dt2 - td2[0]
  1678. exp = (dt2.dt.tz_localize(None) - td2[0]).dt.tz_localize(tz)
  1679. tm.assert_series_equal(result, exp)
  1680. with pytest.raises(TypeError, match=msg):
  1681. td2[0] - dt2
  1682. result = dt1 + td1
  1683. exp = (dt1.dt.tz_localize(None) + td1).dt.tz_localize(tz)
  1684. tm.assert_series_equal(result, exp)
  1685. result = dt2 + td2
  1686. exp = (dt2.dt.tz_localize(None) + td2).dt.tz_localize(tz)
  1687. tm.assert_series_equal(result, exp)
  1688. result = dt1 - td1
  1689. exp = (dt1.dt.tz_localize(None) - td1).dt.tz_localize(tz)
  1690. tm.assert_series_equal(result, exp)
  1691. result = dt2 - td2
  1692. exp = (dt2.dt.tz_localize(None) - td2).dt.tz_localize(tz)
  1693. tm.assert_series_equal(result, exp)
  1694. msg = "cannot (add|subtract)"
  1695. with pytest.raises(TypeError, match=msg):
  1696. td1 - dt1
  1697. with pytest.raises(TypeError, match=msg):
  1698. td2 - dt2
  1699. class TestDatetimeIndexArithmetic:
  1700. # -------------------------------------------------------------
  1701. # Binary operations DatetimeIndex and TimedeltaIndex/array
  1702. def test_dti_add_tdi(self, tz_naive_fixture):
  1703. # GH#17558
  1704. tz = tz_naive_fixture
  1705. dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1706. tdi = pd.timedelta_range("0 days", periods=10)
  1707. expected = date_range("2017-01-01", periods=10, tz=tz)
  1708. expected = expected._with_freq(None)
  1709. # add with TimedeltaIndex
  1710. result = dti + tdi
  1711. tm.assert_index_equal(result, expected)
  1712. result = tdi + dti
  1713. tm.assert_index_equal(result, expected)
  1714. # add with timedelta64 array
  1715. result = dti + tdi.values
  1716. tm.assert_index_equal(result, expected)
  1717. result = tdi.values + dti
  1718. tm.assert_index_equal(result, expected)
  1719. def test_dti_iadd_tdi(self, tz_naive_fixture):
  1720. # GH#17558
  1721. tz = tz_naive_fixture
  1722. dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1723. tdi = pd.timedelta_range("0 days", periods=10)
  1724. expected = date_range("2017-01-01", periods=10, tz=tz)
  1725. expected = expected._with_freq(None)
  1726. # iadd with TimedeltaIndex
  1727. result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1728. result += tdi
  1729. tm.assert_index_equal(result, expected)
  1730. result = pd.timedelta_range("0 days", periods=10)
  1731. result += dti
  1732. tm.assert_index_equal(result, expected)
  1733. # iadd with timedelta64 array
  1734. result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1735. result += tdi.values
  1736. tm.assert_index_equal(result, expected)
  1737. result = pd.timedelta_range("0 days", periods=10)
  1738. result += dti
  1739. tm.assert_index_equal(result, expected)
  1740. def test_dti_sub_tdi(self, tz_naive_fixture):
  1741. # GH#17558
  1742. tz = tz_naive_fixture
  1743. dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1744. tdi = pd.timedelta_range("0 days", periods=10)
  1745. expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D")
  1746. expected = expected._with_freq(None)
  1747. # sub with TimedeltaIndex
  1748. result = dti - tdi
  1749. tm.assert_index_equal(result, expected)
  1750. msg = "cannot subtract .*TimedeltaArray"
  1751. with pytest.raises(TypeError, match=msg):
  1752. tdi - dti
  1753. # sub with timedelta64 array
  1754. result = dti - tdi.values
  1755. tm.assert_index_equal(result, expected)
  1756. msg = "cannot subtract a datelike from a TimedeltaArray"
  1757. with pytest.raises(TypeError, match=msg):
  1758. tdi.values - dti
  1759. def test_dti_isub_tdi(self, tz_naive_fixture):
  1760. # GH#17558
  1761. tz = tz_naive_fixture
  1762. dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1763. tdi = pd.timedelta_range("0 days", periods=10)
  1764. expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D")
  1765. expected = expected._with_freq(None)
  1766. # isub with TimedeltaIndex
  1767. result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1768. result -= tdi
  1769. tm.assert_index_equal(result, expected)
  1770. # DTA.__isub__ GH#43904
  1771. dta = dti._data.copy()
  1772. dta -= tdi
  1773. tm.assert_datetime_array_equal(dta, expected._data)
  1774. out = dti._data.copy()
  1775. np.subtract(out, tdi, out=out)
  1776. tm.assert_datetime_array_equal(out, expected._data)
  1777. msg = "cannot subtract a datelike from a TimedeltaArray"
  1778. with pytest.raises(TypeError, match=msg):
  1779. tdi -= dti
  1780. # isub with timedelta64 array
  1781. result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
  1782. result -= tdi.values
  1783. tm.assert_index_equal(result, expected)
  1784. with pytest.raises(TypeError, match=msg):
  1785. tdi.values -= dti
  1786. with pytest.raises(TypeError, match=msg):
  1787. tdi._values -= dti
  1788. # -------------------------------------------------------------
  1789. # Binary Operations DatetimeIndex and datetime-like
  1790. # TODO: A couple other tests belong in this section. Move them in
  1791. # A PR where there isn't already a giant diff.
  1792. # -------------------------------------------------------------
  1793. def test_dta_add_sub_index(self, tz_naive_fixture):
  1794. # Check that DatetimeArray defers to Index classes
  1795. dti = date_range("20130101", periods=3, tz=tz_naive_fixture)
  1796. dta = dti.array
  1797. result = dta - dti
  1798. expected = dti - dti
  1799. tm.assert_index_equal(result, expected)
  1800. tdi = result
  1801. result = dta + tdi
  1802. expected = dti + tdi
  1803. tm.assert_index_equal(result, expected)
  1804. result = dta - tdi
  1805. expected = dti - tdi
  1806. tm.assert_index_equal(result, expected)
  1807. def test_sub_dti_dti(self):
  1808. # previously performed setop (deprecated in 0.16.0), now changed to
  1809. # return subtraction -> TimeDeltaIndex (GH ...)
  1810. dti = date_range("20130101", periods=3)
  1811. dti_tz = date_range("20130101", periods=3).tz_localize("US/Eastern")
  1812. expected = TimedeltaIndex([0, 0, 0])
  1813. result = dti - dti
  1814. tm.assert_index_equal(result, expected)
  1815. result = dti_tz - dti_tz
  1816. tm.assert_index_equal(result, expected)
  1817. msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
  1818. with pytest.raises(TypeError, match=msg):
  1819. dti_tz - dti
  1820. with pytest.raises(TypeError, match=msg):
  1821. dti - dti_tz
  1822. # isub
  1823. dti -= dti
  1824. tm.assert_index_equal(dti, expected)
  1825. # different length raises ValueError
  1826. dti1 = date_range("20130101", periods=3)
  1827. dti2 = date_range("20130101", periods=4)
  1828. msg = "cannot add indices of unequal length"
  1829. with pytest.raises(ValueError, match=msg):
  1830. dti1 - dti2
  1831. # NaN propagation
  1832. dti1 = DatetimeIndex(["2012-01-01", np.nan, "2012-01-03"])
  1833. dti2 = DatetimeIndex(["2012-01-02", "2012-01-03", np.nan])
  1834. expected = TimedeltaIndex(["1 days", np.nan, np.nan])
  1835. result = dti2 - dti1
  1836. tm.assert_index_equal(result, expected)
  1837. # -------------------------------------------------------------------
  1838. # TODO: Most of this block is moved from series or frame tests, needs
  1839. # cleanup, box-parametrization, and de-duplication
  1840. @pytest.mark.parametrize("op", [operator.add, operator.sub])
  1841. def test_timedelta64_equal_timedelta_supported_ops(self, op, box_with_array):
  1842. ser = Series(
  1843. [
  1844. Timestamp("20130301"),
  1845. Timestamp("20130228 23:00:00"),
  1846. Timestamp("20130228 22:00:00"),
  1847. Timestamp("20130228 21:00:00"),
  1848. ]
  1849. )
  1850. obj = box_with_array(ser)
  1851. intervals = ["D", "h", "m", "s", "us"]
  1852. def timedelta64(*args):
  1853. # see casting notes in NumPy gh-12927
  1854. return np.sum(list(starmap(np.timedelta64, zip(args, intervals))))
  1855. for d, h, m, s, us in product(*([range(2)] * 5)):
  1856. nptd = timedelta64(d, h, m, s, us)
  1857. pytd = timedelta(days=d, hours=h, minutes=m, seconds=s, microseconds=us)
  1858. lhs = op(obj, nptd)
  1859. rhs = op(obj, pytd)
  1860. tm.assert_equal(lhs, rhs)
  1861. def test_ops_nat_mixed_datetime64_timedelta64(self):
  1862. # GH#11349
  1863. timedelta_series = Series([NaT, Timedelta("1s")])
  1864. datetime_series = Series([NaT, Timestamp("19900315")])
  1865. nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[ns]")
  1866. nat_series_dtype_timestamp = Series([NaT, NaT], dtype="datetime64[ns]")
  1867. single_nat_dtype_datetime = Series([NaT], dtype="datetime64[ns]")
  1868. single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[ns]")
  1869. # subtraction
  1870. tm.assert_series_equal(
  1871. datetime_series - single_nat_dtype_datetime, nat_series_dtype_timedelta
  1872. )
  1873. tm.assert_series_equal(
  1874. datetime_series - single_nat_dtype_timedelta, nat_series_dtype_timestamp
  1875. )
  1876. tm.assert_series_equal(
  1877. -single_nat_dtype_timedelta + datetime_series, nat_series_dtype_timestamp
  1878. )
  1879. # without a Series wrapping the NaT, it is ambiguous
  1880. # whether it is a datetime64 or timedelta64
  1881. # defaults to interpreting it as timedelta64
  1882. tm.assert_series_equal(
  1883. nat_series_dtype_timestamp - single_nat_dtype_datetime,
  1884. nat_series_dtype_timedelta,
  1885. )
  1886. tm.assert_series_equal(
  1887. nat_series_dtype_timestamp - single_nat_dtype_timedelta,
  1888. nat_series_dtype_timestamp,
  1889. )
  1890. tm.assert_series_equal(
  1891. -single_nat_dtype_timedelta + nat_series_dtype_timestamp,
  1892. nat_series_dtype_timestamp,
  1893. )
  1894. msg = "cannot subtract a datelike"
  1895. with pytest.raises(TypeError, match=msg):
  1896. timedelta_series - single_nat_dtype_datetime
  1897. # addition
  1898. tm.assert_series_equal(
  1899. nat_series_dtype_timestamp + single_nat_dtype_timedelta,
  1900. nat_series_dtype_timestamp,
  1901. )
  1902. tm.assert_series_equal(
  1903. single_nat_dtype_timedelta + nat_series_dtype_timestamp,
  1904. nat_series_dtype_timestamp,
  1905. )
  1906. tm.assert_series_equal(
  1907. nat_series_dtype_timestamp + single_nat_dtype_timedelta,
  1908. nat_series_dtype_timestamp,
  1909. )
  1910. tm.assert_series_equal(
  1911. single_nat_dtype_timedelta + nat_series_dtype_timestamp,
  1912. nat_series_dtype_timestamp,
  1913. )
  1914. tm.assert_series_equal(
  1915. nat_series_dtype_timedelta + single_nat_dtype_datetime,
  1916. nat_series_dtype_timestamp,
  1917. )
  1918. tm.assert_series_equal(
  1919. single_nat_dtype_datetime + nat_series_dtype_timedelta,
  1920. nat_series_dtype_timestamp,
  1921. )
  1922. def test_ufunc_coercions(self):
  1923. idx = date_range("2011-01-01", periods=3, freq="2D", name="x")
  1924. delta = np.timedelta64(1, "D")
  1925. exp = date_range("2011-01-02", periods=3, freq="2D", name="x")
  1926. for result in [idx + delta, np.add(idx, delta)]:
  1927. assert isinstance(result, DatetimeIndex)
  1928. tm.assert_index_equal(result, exp)
  1929. assert result.freq == "2D"
  1930. exp = date_range("2010-12-31", periods=3, freq="2D", name="x")
  1931. for result in [idx - delta, np.subtract(idx, delta)]:
  1932. assert isinstance(result, DatetimeIndex)
  1933. tm.assert_index_equal(result, exp)
  1934. assert result.freq == "2D"
  1935. # When adding/subtracting an ndarray (which has no .freq), the result
  1936. # does not infer freq
  1937. idx = idx._with_freq(None)
  1938. delta = np.array(
  1939. [np.timedelta64(1, "D"), np.timedelta64(2, "D"), np.timedelta64(3, "D")]
  1940. )
  1941. exp = DatetimeIndex(["2011-01-02", "2011-01-05", "2011-01-08"], name="x")
  1942. for result in [idx + delta, np.add(idx, delta)]:
  1943. tm.assert_index_equal(result, exp)
  1944. assert result.freq == exp.freq
  1945. exp = DatetimeIndex(["2010-12-31", "2011-01-01", "2011-01-02"], name="x")
  1946. for result in [idx - delta, np.subtract(idx, delta)]:
  1947. assert isinstance(result, DatetimeIndex)
  1948. tm.assert_index_equal(result, exp)
  1949. assert result.freq == exp.freq
  1950. def test_dti_add_series(self, tz_naive_fixture, names):
  1951. # GH#13905
  1952. tz = tz_naive_fixture
  1953. index = DatetimeIndex(
  1954. ["2016-06-28 05:30", "2016-06-28 05:31"], tz=tz, name=names[0]
  1955. )
  1956. ser = Series([Timedelta(seconds=5)] * 2, index=index, name=names[1])
  1957. expected = Series(index + Timedelta(seconds=5), index=index, name=names[2])
  1958. # passing name arg isn't enough when names[2] is None
  1959. expected.name = names[2]
  1960. assert expected.dtype == index.dtype
  1961. result = ser + index
  1962. tm.assert_series_equal(result, expected)
  1963. result2 = index + ser
  1964. tm.assert_series_equal(result2, expected)
  1965. expected = index + Timedelta(seconds=5)
  1966. result3 = ser.values + index
  1967. tm.assert_index_equal(result3, expected)
  1968. result4 = index + ser.values
  1969. tm.assert_index_equal(result4, expected)
  1970. @pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
  1971. def test_dti_addsub_offset_arraylike(
  1972. self, tz_naive_fixture, names, op, index_or_series
  1973. ):
  1974. # GH#18849, GH#19744
  1975. other_box = index_or_series
  1976. tz = tz_naive_fixture
  1977. dti = date_range("2017-01-01", periods=2, tz=tz, name=names[0])
  1978. other = other_box([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
  1979. xbox = get_upcast_box(dti, other)
  1980. with tm.assert_produces_warning(PerformanceWarning):
  1981. res = op(dti, other)
  1982. expected = DatetimeIndex(
  1983. [op(dti[n], other[n]) for n in range(len(dti))], name=names[2], freq="infer"
  1984. )
  1985. expected = tm.box_expected(expected, xbox).astype(object)
  1986. tm.assert_equal(res, expected)
  1987. @pytest.mark.parametrize("other_box", [pd.Index, np.array])
  1988. def test_dti_addsub_object_arraylike(
  1989. self, tz_naive_fixture, box_with_array, other_box
  1990. ):
  1991. tz = tz_naive_fixture
  1992. dti = date_range("2017-01-01", periods=2, tz=tz)
  1993. dtarr = tm.box_expected(dti, box_with_array)
  1994. other = other_box([pd.offsets.MonthEnd(), Timedelta(days=4)])
  1995. xbox = get_upcast_box(dtarr, other)
  1996. expected = DatetimeIndex(["2017-01-31", "2017-01-06"], tz=tz_naive_fixture)
  1997. expected = tm.box_expected(expected, xbox).astype(object)
  1998. with tm.assert_produces_warning(PerformanceWarning):
  1999. result = dtarr + other
  2000. tm.assert_equal(result, expected)
  2001. expected = DatetimeIndex(["2016-12-31", "2016-12-29"], tz=tz_naive_fixture)
  2002. expected = tm.box_expected(expected, xbox).astype(object)
  2003. with tm.assert_produces_warning(PerformanceWarning):
  2004. result = dtarr - other
  2005. tm.assert_equal(result, expected)
  2006. @pytest.mark.parametrize("years", [-1, 0, 1])
  2007. @pytest.mark.parametrize("months", [-2, 0, 2])
  2008. def test_shift_months(years, months):
  2009. dti = DatetimeIndex(
  2010. [
  2011. Timestamp("2000-01-05 00:15:00"),
  2012. Timestamp("2000-01-31 00:23:00"),
  2013. Timestamp("2000-01-01"),
  2014. Timestamp("2000-02-29"),
  2015. Timestamp("2000-12-31"),
  2016. ]
  2017. )
  2018. actual = DatetimeIndex(shift_months(dti.asi8, years * 12 + months))
  2019. raw = [x + pd.offsets.DateOffset(years=years, months=months) for x in dti]
  2020. expected = DatetimeIndex(raw)
  2021. tm.assert_index_equal(actual, expected)
  2022. def test_dt64arr_addsub_object_dtype_2d():
  2023. # block-wise DataFrame operations will require operating on 2D
  2024. # DatetimeArray/TimedeltaArray, so check that specifically.
  2025. dti = date_range("1994-02-13", freq="2W", periods=4)
  2026. dta = dti._data.reshape((4, 1))
  2027. other = np.array([[pd.offsets.Day(n)] for n in range(4)])
  2028. assert other.shape == dta.shape
  2029. with tm.assert_produces_warning(PerformanceWarning):
  2030. result = dta + other
  2031. with tm.assert_produces_warning(PerformanceWarning):
  2032. expected = (dta[:, 0] + other[:, 0]).reshape(-1, 1)
  2033. tm.assert_numpy_array_equal(result, expected)
  2034. with tm.assert_produces_warning(PerformanceWarning):
  2035. # Case where we expect to get a TimedeltaArray back
  2036. result2 = dta - dta.astype(object)
  2037. assert result2.shape == (4, 1)
  2038. assert all(td._value == 0 for td in result2.ravel())
  2039. def test_non_nano_dt64_addsub_np_nat_scalars():
  2040. # GH 52295
  2041. ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
  2042. result = ser - np.datetime64("nat", "ms")
  2043. expected = Series([NaT] * 3, dtype="timedelta64[ms]")
  2044. tm.assert_series_equal(result, expected)
  2045. result = ser + np.timedelta64("nat", "ms")
  2046. expected = Series([NaT] * 3, dtype="datetime64[ms]")
  2047. tm.assert_series_equal(result, expected)
  2048. def test_non_nano_dt64_addsub_np_nat_scalars_unitless():
  2049. # GH 52295
  2050. # TODO: Can we default to the ser unit?
  2051. ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
  2052. result = ser - np.datetime64("nat")
  2053. expected = Series([NaT] * 3, dtype="timedelta64[ns]")
  2054. tm.assert_series_equal(result, expected)
  2055. result = ser + np.timedelta64("nat")
  2056. expected = Series([NaT] * 3, dtype="datetime64[ns]")
  2057. tm.assert_series_equal(result, expected)
  2058. def test_non_nano_dt64_addsub_np_nat_scalars_unsupported_unit():
  2059. # GH 52295
  2060. ser = Series([12332, 23243, 33243], dtype="datetime64[s]")
  2061. result = ser - np.datetime64("nat", "D")
  2062. expected = Series([NaT] * 3, dtype="timedelta64[s]")
  2063. tm.assert_series_equal(result, expected)
  2064. result = ser + np.timedelta64("nat", "D")
  2065. expected = Series([NaT] * 3, dtype="datetime64[s]")
  2066. tm.assert_series_equal(result, expected)