test_datetime64.py 91 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334
  1. # -*- coding: utf-8 -*-
  2. # Arithmetic tests for DataFrame/Series/Index/Array classes that should
  3. # behave identically.
  4. # Specifically for datetime64 and datetime64tz dtypes
  5. from datetime import datetime, timedelta
  6. from itertools import product, starmap
  7. import operator
  8. import warnings
  9. import numpy as np
  10. import pytest
  11. import pytz
  12. from pandas._libs.tslibs.conversion import localize_pydatetime
  13. from pandas._libs.tslibs.offsets import shift_months
  14. from pandas.compat.numpy import np_datetime64_compat
  15. from pandas.errors import NullFrequencyError, PerformanceWarning
  16. import pandas as pd
  17. from pandas import (
  18. DatetimeIndex, NaT, Period, Series, Timedelta, TimedeltaIndex, Timestamp,
  19. date_range)
  20. from pandas.core.indexes.datetimes import _to_M8
  21. import pandas.util.testing as tm
  22. def assert_all(obj):
  23. """
  24. Test helper to call call obj.all() the appropriate number of times on
  25. a Series or DataFrame.
  26. """
  27. if isinstance(obj, pd.DataFrame):
  28. assert obj.all().all()
  29. else:
  30. assert obj.all()
  31. # ------------------------------------------------------------------
  32. # Comparisons
  33. class TestDatetime64DataFrameComparison(object):
  34. @pytest.mark.parametrize('timestamps', [
  35. [pd.Timestamp('2012-01-01 13:00:00+00:00')] * 2,
  36. [pd.Timestamp('2012-01-01 13:00:00')] * 2])
  37. def test_tz_aware_scalar_comparison(self, timestamps):
  38. # GH#15966
  39. df = pd.DataFrame({'test': timestamps})
  40. expected = pd.DataFrame({'test': [False, False]})
  41. tm.assert_frame_equal(df == -1, expected)
  42. def test_dt64_nat_comparison(self):
  43. # GH#22242, GH#22163 DataFrame considered NaT == ts incorrectly
  44. ts = pd.Timestamp.now()
  45. df = pd.DataFrame([ts, pd.NaT])
  46. expected = pd.DataFrame([True, False])
  47. result = df == ts
  48. tm.assert_frame_equal(result, expected)
  49. class TestDatetime64SeriesComparison(object):
  50. # TODO: moved from tests.series.test_operators; needs cleanup
  51. @pytest.mark.parametrize('pair', [
  52. ([pd.Timestamp('2011-01-01'), NaT, pd.Timestamp('2011-01-03')],
  53. [NaT, NaT, pd.Timestamp('2011-01-03')]),
  54. ([pd.Timedelta('1 days'), NaT, pd.Timedelta('3 days')],
  55. [NaT, NaT, pd.Timedelta('3 days')]),
  56. ([pd.Period('2011-01', freq='M'), NaT,
  57. pd.Period('2011-03', freq='M')],
  58. [NaT, NaT, pd.Period('2011-03', freq='M')]),
  59. ])
  60. @pytest.mark.parametrize('reverse', [True, False])
  61. @pytest.mark.parametrize('box', [Series, pd.Index])
  62. @pytest.mark.parametrize('dtype', [None, object])
  63. def test_nat_comparisons(self, dtype, box, reverse, pair):
  64. l, r = pair
  65. if reverse:
  66. # add lhs / rhs switched data
  67. l, r = r, l
  68. left = Series(l, dtype=dtype)
  69. right = box(r, dtype=dtype)
  70. # Series, Index
  71. expected = Series([False, False, True])
  72. tm.assert_series_equal(left == right, expected)
  73. expected = Series([True, True, False])
  74. tm.assert_series_equal(left != right, expected)
  75. expected = Series([False, False, False])
  76. tm.assert_series_equal(left < right, expected)
  77. expected = Series([False, False, False])
  78. tm.assert_series_equal(left > right, expected)
  79. expected = Series([False, False, True])
  80. tm.assert_series_equal(left >= right, expected)
  81. expected = Series([False, False, True])
  82. tm.assert_series_equal(left <= right, expected)
  83. def test_comparison_invalid(self, box_with_array):
  84. # GH#4968
  85. # invalid date/int comparisons
  86. xbox = box_with_array if box_with_array is not pd.Index else np.ndarray
  87. ser = Series(range(5))
  88. ser2 = Series(pd.date_range('20010101', periods=5))
  89. ser = tm.box_expected(ser, box_with_array)
  90. ser2 = tm.box_expected(ser2, box_with_array)
  91. for (x, y) in [(ser, ser2), (ser2, ser)]:
  92. result = x == y
  93. expected = tm.box_expected([False] * 5, xbox)
  94. tm.assert_equal(result, expected)
  95. result = x != y
  96. expected = tm.box_expected([True] * 5, xbox)
  97. tm.assert_equal(result, expected)
  98. with pytest.raises(TypeError):
  99. x >= y
  100. with pytest.raises(TypeError):
  101. x > y
  102. with pytest.raises(TypeError):
  103. x < y
  104. with pytest.raises(TypeError):
  105. x <= y
  106. @pytest.mark.parametrize('data', [
  107. [Timestamp('2011-01-01'), NaT, Timestamp('2011-01-03')],
  108. [Timedelta('1 days'), NaT, Timedelta('3 days')],
  109. [Period('2011-01', freq='M'), NaT, Period('2011-03', freq='M')]
  110. ])
  111. @pytest.mark.parametrize('dtype', [None, object])
  112. def test_nat_comparisons_scalar(self, dtype, data, box_with_array):
  113. if box_with_array is tm.to_array and dtype is object:
  114. # dont bother testing ndarray comparison methods as this fails
  115. # on older numpys (since they check object identity)
  116. return
  117. xbox = box_with_array if box_with_array is not pd.Index else np.ndarray
  118. left = Series(data, dtype=dtype)
  119. left = tm.box_expected(left, box_with_array)
  120. expected = [False, False, False]
  121. expected = tm.box_expected(expected, xbox)
  122. tm.assert_equal(left == NaT, expected)
  123. tm.assert_equal(NaT == left, expected)
  124. expected = [True, True, True]
  125. expected = tm.box_expected(expected, xbox)
  126. tm.assert_equal(left != NaT, expected)
  127. tm.assert_equal(NaT != left, expected)
  128. expected = [False, False, False]
  129. expected = tm.box_expected(expected, xbox)
  130. tm.assert_equal(left < NaT, expected)
  131. tm.assert_equal(NaT > left, expected)
  132. tm.assert_equal(left <= NaT, expected)
  133. tm.assert_equal(NaT >= left, expected)
  134. tm.assert_equal(left > NaT, expected)
  135. tm.assert_equal(NaT < left, expected)
  136. tm.assert_equal(left >= NaT, expected)
  137. tm.assert_equal(NaT <= left, expected)
  138. def test_series_comparison_scalars(self):
  139. series = Series(date_range('1/1/2000', periods=10))
  140. val = datetime(2000, 1, 4)
  141. result = series > val
  142. expected = Series([x > val for x in series])
  143. tm.assert_series_equal(result, expected)
  144. val = series[5]
  145. result = series > val
  146. expected = Series([x > val for x in series])
  147. tm.assert_series_equal(result, expected)
  148. def test_dt64_ser_cmp_date_warning(self):
  149. # https://github.com/pandas-dev/pandas/issues/21359
  150. # Remove this test and enble invalid test below
  151. ser = pd.Series(pd.date_range('20010101', periods=10), name='dates')
  152. date = ser.iloc[0].to_pydatetime().date()
  153. with tm.assert_produces_warning(FutureWarning) as m:
  154. result = ser == date
  155. expected = pd.Series([True] + [False] * 9, name='dates')
  156. tm.assert_series_equal(result, expected)
  157. assert "Comparing Series of datetimes " in str(m[0].message)
  158. assert "will not compare equal" in str(m[0].message)
  159. with tm.assert_produces_warning(FutureWarning) as m:
  160. result = ser != date
  161. tm.assert_series_equal(result, ~expected)
  162. assert "will not compare equal" in str(m[0].message)
  163. with tm.assert_produces_warning(FutureWarning) as m:
  164. result = ser <= date
  165. tm.assert_series_equal(result, expected)
  166. assert "a TypeError will be raised" in str(m[0].message)
  167. with tm.assert_produces_warning(FutureWarning) as m:
  168. result = ser < date
  169. tm.assert_series_equal(result, pd.Series([False] * 10, name='dates'))
  170. assert "a TypeError will be raised" in str(m[0].message)
  171. with tm.assert_produces_warning(FutureWarning) as m:
  172. result = ser >= date
  173. tm.assert_series_equal(result, pd.Series([True] * 10, name='dates'))
  174. assert "a TypeError will be raised" in str(m[0].message)
  175. with tm.assert_produces_warning(FutureWarning) as m:
  176. result = ser > date
  177. tm.assert_series_equal(result, pd.Series([False] + [True] * 9,
  178. name='dates'))
  179. assert "a TypeError will be raised" in str(m[0].message)
  180. @pytest.mark.skip(reason="GH#21359")
  181. def test_dt64ser_cmp_date_invalid(self, box_with_array):
  182. # GH#19800 datetime.date comparison raises to
  183. # match DatetimeIndex/Timestamp. This also matches the behavior
  184. # of stdlib datetime.datetime
  185. ser = pd.date_range('20010101', periods=10)
  186. date = ser.iloc[0].to_pydatetime().date()
  187. ser = tm.box_expected(ser, box_with_array)
  188. assert not (ser == date).any()
  189. assert (ser != date).all()
  190. with pytest.raises(TypeError):
  191. ser > date
  192. with pytest.raises(TypeError):
  193. ser < date
  194. with pytest.raises(TypeError):
  195. ser >= date
  196. with pytest.raises(TypeError):
  197. ser <= date
  198. @pytest.mark.parametrize("left,right", [
  199. ("lt", "gt"),
  200. ("le", "ge"),
  201. ("eq", "eq"),
  202. ("ne", "ne"),
  203. ])
  204. def test_timestamp_compare_series(self, left, right):
  205. # see gh-4982
  206. # Make sure we can compare Timestamps on the right AND left hand side.
  207. ser = pd.Series(pd.date_range("20010101", periods=10), name="dates")
  208. s_nat = ser.copy(deep=True)
  209. ser[0] = pd.Timestamp("nat")
  210. ser[3] = pd.Timestamp("nat")
  211. left_f = getattr(operator, left)
  212. right_f = getattr(operator, right)
  213. # No NaT
  214. expected = left_f(ser, pd.Timestamp("20010109"))
  215. result = right_f(pd.Timestamp("20010109"), ser)
  216. tm.assert_series_equal(result, expected)
  217. # NaT
  218. expected = left_f(ser, pd.Timestamp("nat"))
  219. result = right_f(pd.Timestamp("nat"), ser)
  220. tm.assert_series_equal(result, expected)
  221. # Compare to Timestamp with series containing NaT
  222. expected = left_f(s_nat, pd.Timestamp("20010109"))
  223. result = right_f(pd.Timestamp("20010109"), s_nat)
  224. tm.assert_series_equal(result, expected)
  225. # Compare to NaT with series containing NaT
  226. expected = left_f(s_nat, pd.Timestamp("nat"))
  227. result = right_f(pd.Timestamp("nat"), s_nat)
  228. tm.assert_series_equal(result, expected)
  229. def test_dt64arr_timestamp_equality(self, box_with_array):
  230. # GH#11034
  231. xbox = box_with_array if box_with_array is not pd.Index else np.ndarray
  232. ser = pd.Series([pd.Timestamp('2000-01-29 01:59:00'), 'NaT'])
  233. ser = tm.box_expected(ser, box_with_array)
  234. result = ser != ser
  235. expected = tm.box_expected([False, True], xbox)
  236. tm.assert_equal(result, expected)
  237. result = ser != ser[0]
  238. expected = tm.box_expected([False, True], xbox)
  239. tm.assert_equal(result, expected)
  240. result = ser != ser[1]
  241. expected = tm.box_expected([True, True], xbox)
  242. tm.assert_equal(result, expected)
  243. result = ser == ser
  244. expected = tm.box_expected([True, False], xbox)
  245. tm.assert_equal(result, expected)
  246. result = ser == ser[0]
  247. expected = tm.box_expected([True, False], xbox)
  248. tm.assert_equal(result, expected)
  249. result = ser == ser[1]
  250. expected = tm.box_expected([False, False], xbox)
  251. tm.assert_equal(result, expected)
  252. @pytest.mark.parametrize('op', [operator.eq, operator.ne,
  253. operator.gt, operator.ge,
  254. operator.lt, operator.le])
  255. def test_comparison_tzawareness_compat(self, op):
  256. # GH#18162
  257. dr = pd.date_range('2016-01-01', periods=6)
  258. dz = dr.tz_localize('US/Pacific')
  259. # Check that there isn't a problem aware-aware and naive-naive do not
  260. # raise
  261. naive_series = Series(dr)
  262. aware_series = Series(dz)
  263. with pytest.raises(TypeError):
  264. op(dz, naive_series)
  265. with pytest.raises(TypeError):
  266. op(dr, aware_series)
  267. # TODO: implement _assert_tzawareness_compat for the reverse
  268. # comparison with the Series on the left-hand side
  269. class TestDatetimeIndexComparisons(object):
  270. # TODO: moved from tests.indexes.test_base; parametrize and de-duplicate
  271. @pytest.mark.parametrize("op", [
  272. operator.eq, operator.ne, operator.gt, operator.lt,
  273. operator.ge, operator.le
  274. ])
  275. def test_comparators(self, op):
  276. index = tm.makeDateIndex(100)
  277. element = index[len(index) // 2]
  278. element = _to_M8(element)
  279. arr = np.array(index)
  280. arr_result = op(arr, element)
  281. index_result = op(index, element)
  282. assert isinstance(index_result, np.ndarray)
  283. tm.assert_numpy_array_equal(arr_result, index_result)
  284. @pytest.mark.parametrize('other', [datetime(2016, 1, 1),
  285. Timestamp('2016-01-01'),
  286. np.datetime64('2016-01-01')])
  287. def test_dti_cmp_datetimelike(self, other, tz_naive_fixture):
  288. tz = tz_naive_fixture
  289. dti = pd.date_range('2016-01-01', periods=2, tz=tz)
  290. if tz is not None:
  291. if isinstance(other, np.datetime64):
  292. # no tzaware version available
  293. return
  294. other = localize_pydatetime(other, dti.tzinfo)
  295. result = dti == other
  296. expected = np.array([True, False])
  297. tm.assert_numpy_array_equal(result, expected)
  298. result = dti > other
  299. expected = np.array([False, True])
  300. tm.assert_numpy_array_equal(result, expected)
  301. result = dti >= other
  302. expected = np.array([True, True])
  303. tm.assert_numpy_array_equal(result, expected)
  304. result = dti < other
  305. expected = np.array([False, False])
  306. tm.assert_numpy_array_equal(result, expected)
  307. result = dti <= other
  308. expected = np.array([True, False])
  309. tm.assert_numpy_array_equal(result, expected)
  310. def dt64arr_cmp_non_datetime(self, tz_naive_fixture, box_with_array):
  311. # GH#19301 by convention datetime.date is not considered comparable
  312. # to Timestamp or DatetimeIndex. This may change in the future.
  313. tz = tz_naive_fixture
  314. dti = pd.date_range('2016-01-01', periods=2, tz=tz)
  315. dtarr = tm.box_expected(dti, box_with_array)
  316. other = datetime(2016, 1, 1).date()
  317. assert not (dtarr == other).any()
  318. assert (dtarr != other).all()
  319. with pytest.raises(TypeError):
  320. dtarr < other
  321. with pytest.raises(TypeError):
  322. dtarr <= other
  323. with pytest.raises(TypeError):
  324. dtarr > other
  325. with pytest.raises(TypeError):
  326. dtarr >= other
  327. @pytest.mark.parametrize('other', [None, np.nan, pd.NaT])
  328. def test_dti_eq_null_scalar(self, other, tz_naive_fixture):
  329. # GH#19301
  330. tz = tz_naive_fixture
  331. dti = pd.date_range('2016-01-01', periods=2, tz=tz)
  332. assert not (dti == other).any()
  333. @pytest.mark.parametrize('other', [None, np.nan, pd.NaT])
  334. def test_dti_ne_null_scalar(self, other, tz_naive_fixture):
  335. # GH#19301
  336. tz = tz_naive_fixture
  337. dti = pd.date_range('2016-01-01', periods=2, tz=tz)
  338. assert (dti != other).all()
  339. @pytest.mark.parametrize('other', [None, np.nan])
  340. def test_dti_cmp_null_scalar_inequality(self, tz_naive_fixture, other,
  341. box_with_array):
  342. # GH#19301
  343. tz = tz_naive_fixture
  344. dti = pd.date_range('2016-01-01', periods=2, tz=tz)
  345. # FIXME: ValueError with transpose
  346. dtarr = tm.box_expected(dti, box_with_array, transpose=False)
  347. with pytest.raises(TypeError):
  348. dtarr < other
  349. with pytest.raises(TypeError):
  350. dtarr <= other
  351. with pytest.raises(TypeError):
  352. dtarr > other
  353. with pytest.raises(TypeError):
  354. dtarr >= other
  355. @pytest.mark.parametrize('dtype', [None, object])
  356. def test_dti_cmp_nat(self, dtype, box_with_array):
  357. if box_with_array is tm.to_array and dtype is object:
  358. # dont bother testing ndarray comparison methods as this fails
  359. # on older numpys (since they check object identity)
  360. return
  361. xbox = box_with_array if box_with_array is not pd.Index else np.ndarray
  362. left = pd.DatetimeIndex([pd.Timestamp('2011-01-01'), pd.NaT,
  363. pd.Timestamp('2011-01-03')])
  364. right = pd.DatetimeIndex([pd.NaT, pd.NaT, pd.Timestamp('2011-01-03')])
  365. left = tm.box_expected(left, box_with_array)
  366. right = tm.box_expected(right, box_with_array)
  367. lhs, rhs = left, right
  368. if dtype is object:
  369. lhs, rhs = left.astype(object), right.astype(object)
  370. result = rhs == lhs
  371. expected = np.array([False, False, True])
  372. expected = tm.box_expected(expected, xbox)
  373. tm.assert_equal(result, expected)
  374. result = lhs != rhs
  375. expected = np.array([True, True, False])
  376. expected = tm.box_expected(expected, xbox)
  377. tm.assert_equal(result, expected)
  378. expected = np.array([False, False, False])
  379. expected = tm.box_expected(expected, xbox)
  380. tm.assert_equal(lhs == pd.NaT, expected)
  381. tm.assert_equal(pd.NaT == rhs, expected)
  382. expected = np.array([True, True, True])
  383. expected = tm.box_expected(expected, xbox)
  384. tm.assert_equal(lhs != pd.NaT, expected)
  385. tm.assert_equal(pd.NaT != lhs, expected)
  386. expected = np.array([False, False, False])
  387. expected = tm.box_expected(expected, xbox)
  388. tm.assert_equal(lhs < pd.NaT, expected)
  389. tm.assert_equal(pd.NaT > lhs, expected)
  390. def test_dti_cmp_nat_behaves_like_float_cmp_nan(self):
  391. fidx1 = pd.Index([1.0, np.nan, 3.0, np.nan, 5.0, 7.0])
  392. fidx2 = pd.Index([2.0, 3.0, np.nan, np.nan, 6.0, 7.0])
  393. didx1 = pd.DatetimeIndex(['2014-01-01', pd.NaT, '2014-03-01', pd.NaT,
  394. '2014-05-01', '2014-07-01'])
  395. didx2 = pd.DatetimeIndex(['2014-02-01', '2014-03-01', pd.NaT, pd.NaT,
  396. '2014-06-01', '2014-07-01'])
  397. darr = np.array([np_datetime64_compat('2014-02-01 00:00Z'),
  398. np_datetime64_compat('2014-03-01 00:00Z'),
  399. np_datetime64_compat('nat'), np.datetime64('nat'),
  400. np_datetime64_compat('2014-06-01 00:00Z'),
  401. np_datetime64_compat('2014-07-01 00:00Z')])
  402. cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]
  403. # Check pd.NaT is handles as the same as np.nan
  404. with tm.assert_produces_warning(None):
  405. for idx1, idx2 in cases:
  406. result = idx1 < idx2
  407. expected = np.array([True, False, False, False, True, False])
  408. tm.assert_numpy_array_equal(result, expected)
  409. result = idx2 > idx1
  410. expected = np.array([True, False, False, False, True, False])
  411. tm.assert_numpy_array_equal(result, expected)
  412. result = idx1 <= idx2
  413. expected = np.array([True, False, False, False, True, True])
  414. tm.assert_numpy_array_equal(result, expected)
  415. result = idx2 >= idx1
  416. expected = np.array([True, False, False, False, True, True])
  417. tm.assert_numpy_array_equal(result, expected)
  418. result = idx1 == idx2
  419. expected = np.array([False, False, False, False, False, True])
  420. tm.assert_numpy_array_equal(result, expected)
  421. result = idx1 != idx2
  422. expected = np.array([True, True, True, True, True, False])
  423. tm.assert_numpy_array_equal(result, expected)
  424. with tm.assert_produces_warning(None):
  425. for idx1, val in [(fidx1, np.nan), (didx1, pd.NaT)]:
  426. result = idx1 < val
  427. expected = np.array([False, False, False, False, False, False])
  428. tm.assert_numpy_array_equal(result, expected)
  429. result = idx1 > val
  430. tm.assert_numpy_array_equal(result, expected)
  431. result = idx1 <= val
  432. tm.assert_numpy_array_equal(result, expected)
  433. result = idx1 >= val
  434. tm.assert_numpy_array_equal(result, expected)
  435. result = idx1 == val
  436. tm.assert_numpy_array_equal(result, expected)
  437. result = idx1 != val
  438. expected = np.array([True, True, True, True, True, True])
  439. tm.assert_numpy_array_equal(result, expected)
  440. # Check pd.NaT is handles as the same as np.nan
  441. with tm.assert_produces_warning(None):
  442. for idx1, val in [(fidx1, 3), (didx1, datetime(2014, 3, 1))]:
  443. result = idx1 < val
  444. expected = np.array([True, False, False, False, False, False])
  445. tm.assert_numpy_array_equal(result, expected)
  446. result = idx1 > val
  447. expected = np.array([False, False, False, False, True, True])
  448. tm.assert_numpy_array_equal(result, expected)
  449. result = idx1 <= val
  450. expected = np.array([True, False, True, False, False, False])
  451. tm.assert_numpy_array_equal(result, expected)
  452. result = idx1 >= val
  453. expected = np.array([False, False, True, False, True, True])
  454. tm.assert_numpy_array_equal(result, expected)
  455. result = idx1 == val
  456. expected = np.array([False, False, True, False, False, False])
  457. tm.assert_numpy_array_equal(result, expected)
  458. result = idx1 != val
  459. expected = np.array([True, True, False, True, True, True])
  460. tm.assert_numpy_array_equal(result, expected)
  461. @pytest.mark.parametrize('op', [operator.eq, operator.ne,
  462. operator.gt, operator.ge,
  463. operator.lt, operator.le])
  464. def test_comparison_tzawareness_compat(self, op, box_with_array):
  465. # GH#18162
  466. dr = pd.date_range('2016-01-01', periods=6)
  467. dz = dr.tz_localize('US/Pacific')
  468. # FIXME: ValueError with transpose
  469. dr = tm.box_expected(dr, box_with_array, transpose=False)
  470. dz = tm.box_expected(dz, box_with_array, transpose=False)
  471. with pytest.raises(TypeError):
  472. op(dr, dz)
  473. if box_with_array is not pd.DataFrame:
  474. # DataFrame op is invalid until transpose bug is fixed
  475. with pytest.raises(TypeError):
  476. op(dr, list(dz))
  477. with pytest.raises(TypeError):
  478. op(dr, np.array(list(dz), dtype=object))
  479. with pytest.raises(TypeError):
  480. op(dz, dr)
  481. if box_with_array is not pd.DataFrame:
  482. # DataFrame op is invalid until transpose bug is fixed
  483. with pytest.raises(TypeError):
  484. op(dz, list(dr))
  485. with pytest.raises(TypeError):
  486. op(dz, np.array(list(dr), dtype=object))
  487. # Check that there isn't a problem aware-aware and naive-naive do not
  488. # raise
  489. assert_all(dr == dr)
  490. assert_all(dz == dz)
  491. if box_with_array is not pd.DataFrame:
  492. # DataFrame doesn't align the lists correctly unless we transpose,
  493. # which we cannot do at the moment
  494. assert (dr == list(dr)).all()
  495. assert (dz == list(dz)).all()
  496. # Check comparisons against scalar Timestamps
  497. ts = pd.Timestamp('2000-03-14 01:59')
  498. ts_tz = pd.Timestamp('2000-03-14 01:59', tz='Europe/Amsterdam')
  499. assert_all(dr > ts)
  500. with pytest.raises(TypeError):
  501. op(dr, ts_tz)
  502. assert_all(dz > ts_tz)
  503. with pytest.raises(TypeError):
  504. op(dz, ts)
  505. # GH#12601: Check comparison against Timestamps and DatetimeIndex
  506. with pytest.raises(TypeError):
  507. op(ts, dz)
  508. @pytest.mark.parametrize('op', [operator.eq, operator.ne,
  509. operator.gt, operator.ge,
  510. operator.lt, operator.le])
  511. @pytest.mark.parametrize('other', [datetime(2016, 1, 1),
  512. Timestamp('2016-01-01'),
  513. np.datetime64('2016-01-01')])
  514. def test_scalar_comparison_tzawareness(self, op, other, tz_aware_fixture,
  515. box_with_array):
  516. tz = tz_aware_fixture
  517. dti = pd.date_range('2016-01-01', periods=2, tz=tz)
  518. # FIXME: ValueError with transpose
  519. dtarr = tm.box_expected(dti, box_with_array, transpose=False)
  520. with pytest.raises(TypeError):
  521. op(dtarr, other)
  522. with pytest.raises(TypeError):
  523. op(other, dtarr)
  524. @pytest.mark.parametrize('op', [operator.eq, operator.ne,
  525. operator.gt, operator.ge,
  526. operator.lt, operator.le])
  527. def test_nat_comparison_tzawareness(self, op):
  528. # GH#19276
  529. # tzaware DatetimeIndex should not raise when compared to NaT
  530. dti = pd.DatetimeIndex(['2014-01-01', pd.NaT, '2014-03-01', pd.NaT,
  531. '2014-05-01', '2014-07-01'])
  532. expected = np.array([op == operator.ne] * len(dti))
  533. result = op(dti, pd.NaT)
  534. tm.assert_numpy_array_equal(result, expected)
  535. result = op(dti.tz_localize('US/Pacific'), pd.NaT)
  536. tm.assert_numpy_array_equal(result, expected)
  537. def test_dti_cmp_str(self, tz_naive_fixture):
  538. # GH#22074
  539. # regardless of tz, we expect these comparisons are valid
  540. tz = tz_naive_fixture
  541. rng = date_range('1/1/2000', periods=10, tz=tz)
  542. other = '1/1/2000'
  543. result = rng == other
  544. expected = np.array([True] + [False] * 9)
  545. tm.assert_numpy_array_equal(result, expected)
  546. result = rng != other
  547. expected = np.array([False] + [True] * 9)
  548. tm.assert_numpy_array_equal(result, expected)
  549. result = rng < other
  550. expected = np.array([False] * 10)
  551. tm.assert_numpy_array_equal(result, expected)
  552. result = rng <= other
  553. expected = np.array([True] + [False] * 9)
  554. tm.assert_numpy_array_equal(result, expected)
  555. result = rng > other
  556. expected = np.array([False] + [True] * 9)
  557. tm.assert_numpy_array_equal(result, expected)
  558. result = rng >= other
  559. expected = np.array([True] * 10)
  560. tm.assert_numpy_array_equal(result, expected)
  561. @pytest.mark.parametrize('other', ['foo', 99, 4.0,
  562. object(), timedelta(days=2)])
  563. def test_dt64arr_cmp_scalar_invalid(self, other, tz_naive_fixture,
  564. box_with_array):
  565. # GH#22074
  566. tz = tz_naive_fixture
  567. xbox = box_with_array if box_with_array is not pd.Index else np.ndarray
  568. rng = date_range('1/1/2000', periods=10, tz=tz)
  569. # FIXME: ValueError with transpose
  570. rng = tm.box_expected(rng, box_with_array, transpose=False)
  571. result = rng == other
  572. expected = np.array([False] * 10)
  573. expected = tm.box_expected(expected, xbox, transpose=False)
  574. tm.assert_equal(result, expected)
  575. result = rng != other
  576. expected = np.array([True] * 10)
  577. expected = tm.box_expected(expected, xbox, transpose=False)
  578. tm.assert_equal(result, expected)
  579. with pytest.raises(TypeError):
  580. rng < other
  581. with pytest.raises(TypeError):
  582. rng <= other
  583. with pytest.raises(TypeError):
  584. rng > other
  585. with pytest.raises(TypeError):
  586. rng >= other
  587. def test_dti_cmp_list(self):
  588. rng = date_range('1/1/2000', periods=10)
  589. result = rng == list(rng)
  590. expected = rng == rng
  591. tm.assert_numpy_array_equal(result, expected)
  592. @pytest.mark.parametrize('other', [
  593. pd.timedelta_range('1D', periods=10),
  594. pd.timedelta_range('1D', periods=10).to_series(),
  595. pd.timedelta_range('1D', periods=10).asi8.view('m8[ns]')
  596. ], ids=lambda x: type(x).__name__)
  597. def test_dti_cmp_tdi_tzawareness(self, other):
  598. # GH#22074
  599. # reversion test that we _don't_ call _assert_tzawareness_compat
  600. # when comparing against TimedeltaIndex
  601. dti = date_range('2000-01-01', periods=10, tz='Asia/Tokyo')
  602. result = dti == other
  603. expected = np.array([False] * 10)
  604. tm.assert_numpy_array_equal(result, expected)
  605. result = dti != other
  606. expected = np.array([True] * 10)
  607. tm.assert_numpy_array_equal(result, expected)
  608. with pytest.raises(TypeError):
  609. dti < other
  610. with pytest.raises(TypeError):
  611. dti <= other
  612. with pytest.raises(TypeError):
  613. dti > other
  614. with pytest.raises(TypeError):
  615. dti >= other
  616. def test_dti_cmp_object_dtype(self):
  617. # GH#22074
  618. dti = date_range('2000-01-01', periods=10, tz='Asia/Tokyo')
  619. other = dti.astype('O')
  620. result = dti == other
  621. expected = np.array([True] * 10)
  622. tm.assert_numpy_array_equal(result, expected)
  623. other = dti.tz_localize(None)
  624. with pytest.raises(TypeError):
  625. # tzawareness failure
  626. dti != other
  627. other = np.array(list(dti[:5]) + [Timedelta(days=1)] * 5)
  628. result = dti == other
  629. expected = np.array([True] * 5 + [False] * 5)
  630. tm.assert_numpy_array_equal(result, expected)
  631. with pytest.raises(TypeError):
  632. dti >= other
  633. # ------------------------------------------------------------------
  634. # Arithmetic
  635. class TestDatetime64Arithmetic(object):
  636. # This class is intended for "finished" tests that are fully parametrized
  637. # over DataFrame/Series/Index/DatetimeArray
  638. # -------------------------------------------------------------
  639. # Addition/Subtraction of timedelta-like
  640. def test_dt64arr_add_timedeltalike_scalar(self, tz_naive_fixture,
  641. two_hours, box_with_array):
  642. # GH#22005, GH#22163 check DataFrame doesn't raise TypeError
  643. tz = tz_naive_fixture
  644. rng = pd.date_range('2000-01-01', '2000-02-01', tz=tz)
  645. expected = pd.date_range('2000-01-01 02:00',
  646. '2000-02-01 02:00', tz=tz)
  647. # FIXME: calling with transpose=True raises ValueError
  648. rng = tm.box_expected(rng, box_with_array, transpose=False)
  649. expected = tm.box_expected(expected, box_with_array, transpose=False)
  650. result = rng + two_hours
  651. tm.assert_equal(result, expected)
  652. def test_dt64arr_iadd_timedeltalike_scalar(self, tz_naive_fixture,
  653. two_hours, box_with_array):
  654. tz = tz_naive_fixture
  655. rng = pd.date_range('2000-01-01', '2000-02-01', tz=tz)
  656. expected = pd.date_range('2000-01-01 02:00',
  657. '2000-02-01 02:00', tz=tz)
  658. # FIXME: calling with transpose=True raises ValueError
  659. rng = tm.box_expected(rng, box_with_array, transpose=False)
  660. expected = tm.box_expected(expected, box_with_array, transpose=False)
  661. rng += two_hours
  662. tm.assert_equal(rng, expected)
  663. def test_dt64arr_sub_timedeltalike_scalar(self, tz_naive_fixture,
  664. two_hours, box_with_array):
  665. tz = tz_naive_fixture
  666. rng = pd.date_range('2000-01-01', '2000-02-01', tz=tz)
  667. expected = pd.date_range('1999-12-31 22:00',
  668. '2000-01-31 22:00', tz=tz)
  669. # FIXME: calling with transpose=True raises ValueError
  670. rng = tm.box_expected(rng, box_with_array, transpose=False)
  671. expected = tm.box_expected(expected, box_with_array, transpose=False)
  672. result = rng - two_hours
  673. tm.assert_equal(result, expected)
  674. def test_dt64arr_isub_timedeltalike_scalar(self, tz_naive_fixture,
  675. two_hours, box_with_array):
  676. tz = tz_naive_fixture
  677. rng = pd.date_range('2000-01-01', '2000-02-01', tz=tz)
  678. expected = pd.date_range('1999-12-31 22:00',
  679. '2000-01-31 22:00', tz=tz)
  680. # FIXME: calling with transpose=True raises ValueError
  681. rng = tm.box_expected(rng, box_with_array, transpose=False)
  682. expected = tm.box_expected(expected, box_with_array, transpose=False)
  683. rng -= two_hours
  684. tm.assert_equal(rng, expected)
  685. def test_dt64arr_add_td64_scalar(self, box_with_array):
  686. # scalar timedeltas/np.timedelta64 objects
  687. # operate with np.timedelta64 correctly
  688. ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')])
  689. expected = Series([Timestamp('20130101 9:01:01'),
  690. Timestamp('20130101 9:02:01')])
  691. dtarr = tm.box_expected(ser, box_with_array)
  692. expected = tm.box_expected(expected, box_with_array)
  693. result = dtarr + np.timedelta64(1, 's')
  694. tm.assert_equal(result, expected)
  695. result = np.timedelta64(1, 's') + dtarr
  696. tm.assert_equal(result, expected)
  697. expected = Series([Timestamp('20130101 9:01:00.005'),
  698. Timestamp('20130101 9:02:00.005')])
  699. expected = tm.box_expected(expected, box_with_array)
  700. result = dtarr + np.timedelta64(5, 'ms')
  701. tm.assert_equal(result, expected)
  702. result = np.timedelta64(5, 'ms') + dtarr
  703. tm.assert_equal(result, expected)
  704. def test_dt64arr_add_sub_td64_nat(self, box_with_array, tz_naive_fixture):
  705. # GH#23320 special handling for timedelta64("NaT")
  706. tz = tz_naive_fixture
  707. dti = pd.date_range("1994-04-01", periods=9, tz=tz, freq="QS")
  708. other = np.timedelta64("NaT")
  709. expected = pd.DatetimeIndex(["NaT"] * 9, tz=tz)
  710. # FIXME: fails with transpose=True due to tz-aware DataFrame
  711. # transpose bug
  712. obj = tm.box_expected(dti, box_with_array, transpose=False)
  713. expected = tm.box_expected(expected, box_with_array, transpose=False)
  714. result = obj + other
  715. tm.assert_equal(result, expected)
  716. result = other + obj
  717. tm.assert_equal(result, expected)
  718. result = obj - other
  719. tm.assert_equal(result, expected)
  720. with pytest.raises(TypeError):
  721. other - obj
  722. def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture,
  723. box_with_array):
  724. if box_with_array is pd.DataFrame:
  725. pytest.xfail("FIXME: ValueError with transpose; "
  726. "alignment error without")
  727. tz = tz_naive_fixture
  728. dti = pd.date_range('2016-01-01', periods=3, tz=tz)
  729. tdi = pd.TimedeltaIndex(['-1 Day', '-1 Day', '-1 Day'])
  730. tdarr = tdi.values
  731. expected = pd.date_range('2015-12-31', periods=3, tz=tz)
  732. dtarr = tm.box_expected(dti, box_with_array)
  733. expected = tm.box_expected(expected, box_with_array)
  734. result = dtarr + tdarr
  735. tm.assert_equal(result, expected)
  736. result = tdarr + dtarr
  737. tm.assert_equal(result, expected)
  738. expected = pd.date_range('2016-01-02', periods=3, tz=tz)
  739. expected = tm.box_expected(expected, box_with_array)
  740. result = dtarr - tdarr
  741. tm.assert_equal(result, expected)
  742. with pytest.raises(TypeError):
  743. tdarr - dtarr
  744. # -----------------------------------------------------------------
  745. # Subtraction of datetime-like scalars
  746. @pytest.mark.parametrize('ts', [
  747. pd.Timestamp('2013-01-01'),
  748. pd.Timestamp('2013-01-01').to_pydatetime(),
  749. pd.Timestamp('2013-01-01').to_datetime64()])
  750. def test_dt64arr_sub_dtscalar(self, box_with_array, ts):
  751. # GH#8554, GH#22163 DataFrame op should _not_ return dt64 dtype
  752. idx = pd.date_range('2013-01-01', periods=3)
  753. idx = tm.box_expected(idx, box_with_array)
  754. expected = pd.TimedeltaIndex(['0 Days', '1 Day', '2 Days'])
  755. expected = tm.box_expected(expected, box_with_array)
  756. result = idx - ts
  757. tm.assert_equal(result, expected)
  758. def test_dt64arr_sub_datetime64_not_ns(self, box_with_array):
  759. # GH#7996, GH#22163 ensure non-nano datetime64 is converted to nano
  760. # for DataFrame operation
  761. dt64 = np.datetime64('2013-01-01')
  762. assert dt64.dtype == 'datetime64[D]'
  763. dti = pd.date_range('20130101', periods=3)
  764. dtarr = tm.box_expected(dti, box_with_array)
  765. expected = pd.TimedeltaIndex(['0 Days', '1 Day', '2 Days'])
  766. expected = tm.box_expected(expected, box_with_array)
  767. result = dtarr - dt64
  768. tm.assert_equal(result, expected)
  769. result = dt64 - dtarr
  770. tm.assert_equal(result, -expected)
  771. def test_dt64arr_sub_timestamp(self, box_with_array):
  772. ser = pd.date_range('2014-03-17', periods=2, freq='D',
  773. tz='US/Eastern')
  774. ts = ser[0]
  775. # FIXME: transpose raises ValueError
  776. ser = tm.box_expected(ser, box_with_array, transpose=False)
  777. delta_series = pd.Series([np.timedelta64(0, 'D'),
  778. np.timedelta64(1, 'D')])
  779. expected = tm.box_expected(delta_series, box_with_array,
  780. transpose=False)
  781. tm.assert_equal(ser - ts, expected)
  782. tm.assert_equal(ts - ser, -expected)
  783. def test_dt64arr_sub_NaT(self, box_with_array):
  784. # GH#18808
  785. dti = pd.DatetimeIndex([pd.NaT, pd.Timestamp('19900315')])
  786. ser = tm.box_expected(dti, box_with_array, transpose=False)
  787. result = ser - pd.NaT
  788. expected = pd.Series([pd.NaT, pd.NaT], dtype='timedelta64[ns]')
  789. # FIXME: raises ValueError with transpose
  790. expected = tm.box_expected(expected, box_with_array, transpose=False)
  791. tm.assert_equal(result, expected)
  792. dti_tz = dti.tz_localize('Asia/Tokyo')
  793. ser_tz = tm.box_expected(dti_tz, box_with_array, transpose=False)
  794. result = ser_tz - pd.NaT
  795. expected = pd.Series([pd.NaT, pd.NaT], dtype='timedelta64[ns]')
  796. expected = tm.box_expected(expected, box_with_array, transpose=False)
  797. tm.assert_equal(result, expected)
  798. # -------------------------------------------------------------
  799. # Subtraction of datetime-like array-like
  800. def test_dt64arr_naive_sub_dt64ndarray(self, box_with_array):
  801. dti = pd.date_range('2016-01-01', periods=3, tz=None)
  802. dt64vals = dti.values
  803. dtarr = tm.box_expected(dti, box_with_array)
  804. expected = dtarr - dtarr
  805. result = dtarr - dt64vals
  806. tm.assert_equal(result, expected)
  807. result = dt64vals - dtarr
  808. tm.assert_equal(result, expected)
  809. def test_dt64arr_aware_sub_dt64ndarray_raises(self, tz_aware_fixture,
  810. box_with_array):
  811. if box_with_array is pd.DataFrame:
  812. pytest.xfail("FIXME: ValueError with transpose; "
  813. "alignment error without")
  814. tz = tz_aware_fixture
  815. dti = pd.date_range('2016-01-01', periods=3, tz=tz)
  816. dt64vals = dti.values
  817. dtarr = tm.box_expected(dti, box_with_array)
  818. with pytest.raises(TypeError):
  819. dtarr - dt64vals
  820. with pytest.raises(TypeError):
  821. dt64vals - dtarr
  822. # -------------------------------------------------------------
  823. # Addition of datetime-like others (invalid)
  824. def test_dt64arr_add_dt64ndarray_raises(self, tz_naive_fixture,
  825. box_with_array):
  826. if box_with_array is pd.DataFrame:
  827. pytest.xfail("FIXME: ValueError with transpose; "
  828. "alignment error without")
  829. tz = tz_naive_fixture
  830. dti = pd.date_range('2016-01-01', periods=3, tz=tz)
  831. dt64vals = dti.values
  832. dtarr = tm.box_expected(dti, box_with_array)
  833. with pytest.raises(TypeError):
  834. dtarr + dt64vals
  835. with pytest.raises(TypeError):
  836. dt64vals + dtarr
  837. def test_dt64arr_add_timestamp_raises(self, box_with_array):
  838. # GH#22163 ensure DataFrame doesn't cast Timestamp to i8
  839. idx = DatetimeIndex(['2011-01-01', '2011-01-02'])
  840. idx = tm.box_expected(idx, box_with_array)
  841. msg = "cannot add"
  842. with pytest.raises(TypeError, match=msg):
  843. idx + Timestamp('2011-01-01')
  844. with pytest.raises(TypeError, match=msg):
  845. Timestamp('2011-01-01') + idx
  846. # -------------------------------------------------------------
  847. # Other Invalid Addition/Subtraction
  848. @pytest.mark.parametrize('other', [3.14, np.array([2.0, 3.0])])
  849. def test_dt64arr_add_sub_float(self, other, box_with_array):
  850. dti = DatetimeIndex(['2011-01-01', '2011-01-02'], freq='D')
  851. dtarr = tm.box_expected(dti, box_with_array)
  852. with pytest.raises(TypeError):
  853. dtarr + other
  854. with pytest.raises(TypeError):
  855. other + dtarr
  856. with pytest.raises(TypeError):
  857. dtarr - other
  858. with pytest.raises(TypeError):
  859. other - dtarr
  860. @pytest.mark.parametrize('pi_freq', ['D', 'W', 'Q', 'H'])
  861. @pytest.mark.parametrize('dti_freq', [None, 'D'])
  862. def test_dt64arr_add_sub_parr(self, dti_freq, pi_freq,
  863. box_with_array, box_with_array2):
  864. # GH#20049 subtracting PeriodIndex should raise TypeError
  865. dti = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], freq=dti_freq)
  866. pi = dti.to_period(pi_freq)
  867. dtarr = tm.box_expected(dti, box_with_array)
  868. parr = tm.box_expected(pi, box_with_array2)
  869. with pytest.raises(TypeError):
  870. dtarr + parr
  871. with pytest.raises(TypeError):
  872. parr + dtarr
  873. with pytest.raises(TypeError):
  874. dtarr - parr
  875. with pytest.raises(TypeError):
  876. parr - dtarr
  877. @pytest.mark.parametrize('dti_freq', [None, 'D'])
  878. def test_dt64arr_add_sub_period_scalar(self, dti_freq, box_with_array):
  879. # GH#13078
  880. # not supported, check TypeError
  881. per = pd.Period('2011-01-01', freq='D')
  882. idx = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], freq=dti_freq)
  883. dtarr = tm.box_expected(idx, box_with_array)
  884. with pytest.raises(TypeError):
  885. dtarr + per
  886. with pytest.raises(TypeError):
  887. per + dtarr
  888. with pytest.raises(TypeError):
  889. dtarr - per
  890. with pytest.raises(TypeError):
  891. per - dtarr
  892. class TestDatetime64DateOffsetArithmetic(object):
  893. # -------------------------------------------------------------
  894. # Tick DateOffsets
  895. # TODO: parametrize over timezone?
  896. def test_dt64arr_series_add_tick_DateOffset(self, box_with_array):
  897. # GH#4532
  898. # operate with pd.offsets
  899. ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')])
  900. expected = Series([Timestamp('20130101 9:01:05'),
  901. Timestamp('20130101 9:02:05')])
  902. ser = tm.box_expected(ser, box_with_array)
  903. expected = tm.box_expected(expected, box_with_array)
  904. result = ser + pd.offsets.Second(5)
  905. tm.assert_equal(result, expected)
  906. result2 = pd.offsets.Second(5) + ser
  907. tm.assert_equal(result2, expected)
  908. def test_dt64arr_series_sub_tick_DateOffset(self, box_with_array):
  909. # GH#4532
  910. # operate with pd.offsets
  911. ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')])
  912. expected = Series([Timestamp('20130101 9:00:55'),
  913. Timestamp('20130101 9:01:55')])
  914. ser = tm.box_expected(ser, box_with_array)
  915. expected = tm.box_expected(expected, box_with_array)
  916. result = ser - pd.offsets.Second(5)
  917. tm.assert_equal(result, expected)
  918. result2 = -pd.offsets.Second(5) + ser
  919. tm.assert_equal(result2, expected)
  920. with pytest.raises(TypeError):
  921. pd.offsets.Second(5) - ser
  922. @pytest.mark.parametrize('cls_name', ['Day', 'Hour', 'Minute', 'Second',
  923. 'Milli', 'Micro', 'Nano'])
  924. def test_dt64arr_add_sub_tick_DateOffset_smoke(self, cls_name,
  925. box_with_array):
  926. # GH#4532
  927. # smoke tests for valid DateOffsets
  928. ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')])
  929. ser = tm.box_expected(ser, box_with_array)
  930. offset_cls = getattr(pd.offsets, cls_name)
  931. ser + offset_cls(5)
  932. offset_cls(5) + ser
  933. ser - offset_cls(5)
  934. def test_dti_add_tick_tzaware(self, tz_aware_fixture, box_with_array):
  935. # GH#21610, GH#22163 ensure DataFrame doesn't return object-dtype
  936. tz = tz_aware_fixture
  937. if tz == 'US/Pacific':
  938. dates = date_range('2012-11-01', periods=3, tz=tz)
  939. offset = dates + pd.offsets.Hour(5)
  940. assert dates[0] + pd.offsets.Hour(5) == offset[0]
  941. dates = date_range('2010-11-01 00:00',
  942. periods=3, tz=tz, freq='H')
  943. expected = DatetimeIndex(['2010-11-01 05:00', '2010-11-01 06:00',
  944. '2010-11-01 07:00'], freq='H', tz=tz)
  945. # FIXME: these raise ValueError with transpose=True
  946. dates = tm.box_expected(dates, box_with_array, transpose=False)
  947. expected = tm.box_expected(expected, box_with_array, transpose=False)
  948. # TODO: parametrize over the scalar being added? radd? sub?
  949. offset = dates + pd.offsets.Hour(5)
  950. tm.assert_equal(offset, expected)
  951. offset = dates + np.timedelta64(5, 'h')
  952. tm.assert_equal(offset, expected)
  953. offset = dates + timedelta(hours=5)
  954. tm.assert_equal(offset, expected)
  955. # -------------------------------------------------------------
  956. # RelativeDelta DateOffsets
  957. def test_dt64arr_add_sub_relativedelta_offsets(self, box_with_array):
  958. # GH#10699
  959. vec = DatetimeIndex([Timestamp('2000-01-05 00:15:00'),
  960. Timestamp('2000-01-31 00:23:00'),
  961. Timestamp('2000-01-01'),
  962. Timestamp('2000-03-31'),
  963. Timestamp('2000-02-29'),
  964. Timestamp('2000-12-31'),
  965. Timestamp('2000-05-15'),
  966. Timestamp('2001-06-15')])
  967. vec = tm.box_expected(vec, box_with_array)
  968. vec_items = vec.squeeze() if box_with_array is pd.DataFrame else vec
  969. # DateOffset relativedelta fastpath
  970. relative_kwargs = [('years', 2), ('months', 5), ('days', 3),
  971. ('hours', 5), ('minutes', 10), ('seconds', 2),
  972. ('microseconds', 5)]
  973. for i, kwd in enumerate(relative_kwargs):
  974. off = pd.DateOffset(**dict([kwd]))
  975. expected = DatetimeIndex([x + off for x in vec_items])
  976. expected = tm.box_expected(expected, box_with_array)
  977. tm.assert_equal(expected, vec + off)
  978. expected = DatetimeIndex([x - off for x in vec_items])
  979. expected = tm.box_expected(expected, box_with_array)
  980. tm.assert_equal(expected, vec - off)
  981. off = pd.DateOffset(**dict(relative_kwargs[:i + 1]))
  982. expected = DatetimeIndex([x + off for x in vec_items])
  983. expected = tm.box_expected(expected, box_with_array)
  984. tm.assert_equal(expected, vec + off)
  985. expected = DatetimeIndex([x - off for x in vec_items])
  986. expected = tm.box_expected(expected, box_with_array)
  987. tm.assert_equal(expected, vec - off)
  988. with pytest.raises(TypeError):
  989. off - vec
  990. # -------------------------------------------------------------
  991. # Non-Tick, Non-RelativeDelta DateOffsets
  992. # TODO: redundant with test_dt64arr_add_sub_DateOffset? that includes
  993. # tz-aware cases which this does not
  994. @pytest.mark.parametrize('cls_and_kwargs', [
  995. 'YearBegin', ('YearBegin', {'month': 5}),
  996. 'YearEnd', ('YearEnd', {'month': 5}),
  997. 'MonthBegin', 'MonthEnd',
  998. 'SemiMonthEnd', 'SemiMonthBegin',
  999. 'Week', ('Week', {'weekday': 3}),
  1000. 'Week', ('Week', {'weekday': 6}),
  1001. 'BusinessDay', 'BDay', 'QuarterEnd', 'QuarterBegin',
  1002. 'CustomBusinessDay', 'CDay', 'CBMonthEnd',
  1003. 'CBMonthBegin', 'BMonthBegin', 'BMonthEnd',
  1004. 'BusinessHour', 'BYearBegin', 'BYearEnd',
  1005. 'BQuarterBegin', ('LastWeekOfMonth', {'weekday': 2}),
  1006. ('FY5253Quarter', {'qtr_with_extra_week': 1,
  1007. 'startingMonth': 1,
  1008. 'weekday': 2,
  1009. 'variation': 'nearest'}),
  1010. ('FY5253', {'weekday': 0, 'startingMonth': 2, 'variation': 'nearest'}),
  1011. ('WeekOfMonth', {'weekday': 2, 'week': 2}),
  1012. 'Easter', ('DateOffset', {'day': 4}),
  1013. ('DateOffset', {'month': 5})])
  1014. @pytest.mark.parametrize('normalize', [True, False])
  1015. @pytest.mark.parametrize('n', [0, 5])
  1016. def test_dt64arr_add_sub_DateOffsets(self, box_with_array,
  1017. n, normalize, cls_and_kwargs):
  1018. # GH#10699
  1019. # assert vectorized operation matches pointwise operations
  1020. if isinstance(cls_and_kwargs, tuple):
  1021. # If cls_name param is a tuple, then 2nd entry is kwargs for
  1022. # the offset constructor
  1023. cls_name, kwargs = cls_and_kwargs
  1024. else:
  1025. cls_name = cls_and_kwargs
  1026. kwargs = {}
  1027. if n == 0 and cls_name in ['WeekOfMonth', 'LastWeekOfMonth',
  1028. 'FY5253Quarter', 'FY5253']:
  1029. # passing n = 0 is invalid for these offset classes
  1030. return
  1031. vec = DatetimeIndex([Timestamp('2000-01-05 00:15:00'),
  1032. Timestamp('2000-01-31 00:23:00'),
  1033. Timestamp('2000-01-01'),
  1034. Timestamp('2000-03-31'),
  1035. Timestamp('2000-02-29'),
  1036. Timestamp('2000-12-31'),
  1037. Timestamp('2000-05-15'),
  1038. Timestamp('2001-06-15')])
  1039. vec = tm.box_expected(vec, box_with_array)
  1040. vec_items = vec.squeeze() if box_with_array is pd.DataFrame else vec
  1041. offset_cls = getattr(pd.offsets, cls_name)
  1042. with warnings.catch_warnings(record=True):
  1043. # pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
  1044. # applied to Series or DatetimeIndex
  1045. # we aren't testing that here, so ignore.
  1046. warnings.simplefilter("ignore", PerformanceWarning)
  1047. offset = offset_cls(n, normalize=normalize, **kwargs)
  1048. expected = DatetimeIndex([x + offset for x in vec_items])
  1049. expected = tm.box_expected(expected, box_with_array)
  1050. tm.assert_equal(expected, vec + offset)
  1051. expected = DatetimeIndex([x - offset for x in vec_items])
  1052. expected = tm.box_expected(expected, box_with_array)
  1053. tm.assert_equal(expected, vec - offset)
  1054. expected = DatetimeIndex([offset + x for x in vec_items])
  1055. expected = tm.box_expected(expected, box_with_array)
  1056. tm.assert_equal(expected, offset + vec)
  1057. with pytest.raises(TypeError):
  1058. offset - vec
  1059. def test_dt64arr_add_sub_DateOffset(self, box_with_array):
  1060. # GH#10699
  1061. s = date_range('2000-01-01', '2000-01-31', name='a')
  1062. s = tm.box_expected(s, box_with_array)
  1063. result = s + pd.DateOffset(years=1)
  1064. result2 = pd.DateOffset(years=1) + s
  1065. exp = date_range('2001-01-01', '2001-01-31', name='a')
  1066. exp = tm.box_expected(exp, box_with_array)
  1067. tm.assert_equal(result, exp)
  1068. tm.assert_equal(result2, exp)
  1069. result = s - pd.DateOffset(years=1)
  1070. exp = date_range('1999-01-01', '1999-01-31', name='a')
  1071. exp = tm.box_expected(exp, box_with_array)
  1072. tm.assert_equal(result, exp)
  1073. s = DatetimeIndex([Timestamp('2000-01-15 00:15:00', tz='US/Central'),
  1074. Timestamp('2000-02-15', tz='US/Central')], name='a')
  1075. # FIXME: ValueError with tzaware DataFrame transpose
  1076. s = tm.box_expected(s, box_with_array, transpose=False)
  1077. result = s + pd.offsets.Day()
  1078. result2 = pd.offsets.Day() + s
  1079. exp = DatetimeIndex([Timestamp('2000-01-16 00:15:00', tz='US/Central'),
  1080. Timestamp('2000-02-16', tz='US/Central')],
  1081. name='a')
  1082. exp = tm.box_expected(exp, box_with_array, transpose=False)
  1083. tm.assert_equal(result, exp)
  1084. tm.assert_equal(result2, exp)
  1085. s = DatetimeIndex([Timestamp('2000-01-15 00:15:00', tz='US/Central'),
  1086. Timestamp('2000-02-15', tz='US/Central')], name='a')
  1087. s = tm.box_expected(s, box_with_array, transpose=False)
  1088. result = s + pd.offsets.MonthEnd()
  1089. result2 = pd.offsets.MonthEnd() + s
  1090. exp = DatetimeIndex([Timestamp('2000-01-31 00:15:00', tz='US/Central'),
  1091. Timestamp('2000-02-29', tz='US/Central')],
  1092. name='a')
  1093. exp = tm.box_expected(exp, box_with_array, transpose=False)
  1094. tm.assert_equal(result, exp)
  1095. tm.assert_equal(result2, exp)
  1096. # TODO: __sub__, __rsub__
  1097. def test_dt64arr_add_mixed_offset_array(self, box_with_array):
  1098. # GH#10699
  1099. # array of offsets
  1100. s = DatetimeIndex([Timestamp('2000-1-1'), Timestamp('2000-2-1')])
  1101. s = tm.box_expected(s, box_with_array)
  1102. warn = None if box_with_array is pd.DataFrame else PerformanceWarning
  1103. with tm.assert_produces_warning(warn,
  1104. clear=[pd.core.arrays.datetimelike]):
  1105. other = pd.Index([pd.offsets.DateOffset(years=1),
  1106. pd.offsets.MonthEnd()])
  1107. other = tm.box_expected(other, box_with_array)
  1108. result = s + other
  1109. exp = DatetimeIndex([Timestamp('2001-1-1'),
  1110. Timestamp('2000-2-29')])
  1111. exp = tm.box_expected(exp, box_with_array)
  1112. tm.assert_equal(result, exp)
  1113. # same offset
  1114. other = pd.Index([pd.offsets.DateOffset(years=1),
  1115. pd.offsets.DateOffset(years=1)])
  1116. other = tm.box_expected(other, box_with_array)
  1117. result = s + other
  1118. exp = DatetimeIndex([Timestamp('2001-1-1'),
  1119. Timestamp('2001-2-1')])
  1120. exp = tm.box_expected(exp, box_with_array)
  1121. tm.assert_equal(result, exp)
  1122. # TODO: overlap with test_dt64arr_add_mixed_offset_array?
  1123. def test_dt64arr_add_sub_offset_ndarray(self, tz_naive_fixture,
  1124. box_with_array):
  1125. # GH#18849
  1126. if box_with_array is pd.DataFrame:
  1127. pytest.xfail("FIXME: ValueError with transpose; "
  1128. "alignment error without")
  1129. tz = tz_naive_fixture
  1130. dti = pd.date_range('2017-01-01', periods=2, tz=tz)
  1131. dtarr = tm.box_expected(dti, box_with_array)
  1132. other = np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)])
  1133. warn = None if box_with_array is pd.DataFrame else PerformanceWarning
  1134. with tm.assert_produces_warning(warn,
  1135. clear=[pd.core.arrays.datetimelike]):
  1136. res = dtarr + other
  1137. expected = DatetimeIndex([dti[n] + other[n] for n in range(len(dti))],
  1138. name=dti.name, freq='infer')
  1139. expected = tm.box_expected(expected, box_with_array)
  1140. tm.assert_equal(res, expected)
  1141. with tm.assert_produces_warning(warn,
  1142. clear=[pd.core.arrays.datetimelike]):
  1143. res2 = other + dtarr
  1144. tm.assert_equal(res2, expected)
  1145. with tm.assert_produces_warning(warn,
  1146. clear=[pd.core.arrays.datetimelike]):
  1147. res = dtarr - other
  1148. expected = DatetimeIndex([dti[n] - other[n] for n in range(len(dti))],
  1149. name=dti.name, freq='infer')
  1150. expected = tm.box_expected(expected, box_with_array)
  1151. tm.assert_equal(res, expected)
  1152. class TestDatetime64OverflowHandling(object):
  1153. # TODO: box + de-duplicate
  1154. def test_dt64_overflow_masking(self, box_with_array):
  1155. # GH#25317
  1156. left = Series([Timestamp('1969-12-31')])
  1157. right = Series([NaT])
  1158. left = tm.box_expected(left, box_with_array)
  1159. right = tm.box_expected(right, box_with_array)
  1160. expected = TimedeltaIndex([NaT])
  1161. expected = tm.box_expected(expected, box_with_array)
  1162. result = left - right
  1163. tm.assert_equal(result, expected)
  1164. def test_dt64_series_arith_overflow(self):
  1165. # GH#12534, fixed by GH#19024
  1166. dt = pd.Timestamp('1700-01-31')
  1167. td = pd.Timedelta('20000 Days')
  1168. dti = pd.date_range('1949-09-30', freq='100Y', periods=4)
  1169. ser = pd.Series(dti)
  1170. with pytest.raises(OverflowError):
  1171. ser - dt
  1172. with pytest.raises(OverflowError):
  1173. dt - ser
  1174. with pytest.raises(OverflowError):
  1175. ser + td
  1176. with pytest.raises(OverflowError):
  1177. td + ser
  1178. ser.iloc[-1] = pd.NaT
  1179. expected = pd.Series(['2004-10-03', '2104-10-04', '2204-10-04', 'NaT'],
  1180. dtype='datetime64[ns]')
  1181. res = ser + td
  1182. tm.assert_series_equal(res, expected)
  1183. res = td + ser
  1184. tm.assert_series_equal(res, expected)
  1185. ser.iloc[1:] = pd.NaT
  1186. expected = pd.Series(['91279 Days', 'NaT', 'NaT', 'NaT'],
  1187. dtype='timedelta64[ns]')
  1188. res = ser - dt
  1189. tm.assert_series_equal(res, expected)
  1190. res = dt - ser
  1191. tm.assert_series_equal(res, -expected)
  1192. def test_datetimeindex_sub_timestamp_overflow(self):
  1193. dtimax = pd.to_datetime(['now', pd.Timestamp.max])
  1194. dtimin = pd.to_datetime(['now', pd.Timestamp.min])
  1195. tsneg = Timestamp('1950-01-01')
  1196. ts_neg_variants = [tsneg,
  1197. tsneg.to_pydatetime(),
  1198. tsneg.to_datetime64().astype('datetime64[ns]'),
  1199. tsneg.to_datetime64().astype('datetime64[D]')]
  1200. tspos = Timestamp('1980-01-01')
  1201. ts_pos_variants = [tspos,
  1202. tspos.to_pydatetime(),
  1203. tspos.to_datetime64().astype('datetime64[ns]'),
  1204. tspos.to_datetime64().astype('datetime64[D]')]
  1205. for variant in ts_neg_variants:
  1206. with pytest.raises(OverflowError):
  1207. dtimax - variant
  1208. expected = pd.Timestamp.max.value - tspos.value
  1209. for variant in ts_pos_variants:
  1210. res = dtimax - variant
  1211. assert res[1].value == expected
  1212. expected = pd.Timestamp.min.value - tsneg.value
  1213. for variant in ts_neg_variants:
  1214. res = dtimin - variant
  1215. assert res[1].value == expected
  1216. for variant in ts_pos_variants:
  1217. with pytest.raises(OverflowError):
  1218. dtimin - variant
  1219. def test_datetimeindex_sub_datetimeindex_overflow(self):
  1220. # GH#22492, GH#22508
  1221. dtimax = pd.to_datetime(['now', pd.Timestamp.max])
  1222. dtimin = pd.to_datetime(['now', pd.Timestamp.min])
  1223. ts_neg = pd.to_datetime(['1950-01-01', '1950-01-01'])
  1224. ts_pos = pd.to_datetime(['1980-01-01', '1980-01-01'])
  1225. # General tests
  1226. expected = pd.Timestamp.max.value - ts_pos[1].value
  1227. result = dtimax - ts_pos
  1228. assert result[1].value == expected
  1229. expected = pd.Timestamp.min.value - ts_neg[1].value
  1230. result = dtimin - ts_neg
  1231. assert result[1].value == expected
  1232. with pytest.raises(OverflowError):
  1233. dtimax - ts_neg
  1234. with pytest.raises(OverflowError):
  1235. dtimin - ts_pos
  1236. # Edge cases
  1237. tmin = pd.to_datetime([pd.Timestamp.min])
  1238. t1 = tmin + pd.Timedelta.max + pd.Timedelta('1us')
  1239. with pytest.raises(OverflowError):
  1240. t1 - tmin
  1241. tmax = pd.to_datetime([pd.Timestamp.max])
  1242. t2 = tmax + pd.Timedelta.min - pd.Timedelta('1us')
  1243. with pytest.raises(OverflowError):
  1244. tmax - t2
  1245. class TestTimestampSeriesArithmetic(object):
  1246. def test_empty_series_add_sub(self):
  1247. # GH#13844
  1248. a = Series(dtype='M8[ns]')
  1249. b = Series(dtype='m8[ns]')
  1250. tm.assert_series_equal(a, a + b)
  1251. tm.assert_series_equal(a, a - b)
  1252. tm.assert_series_equal(a, b + a)
  1253. with pytest.raises(TypeError):
  1254. b - a
  1255. def test_operators_datetimelike(self):
  1256. # ## timedelta64 ###
  1257. td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
  1258. td1.iloc[2] = np.nan
  1259. # ## datetime64 ###
  1260. dt1 = Series([pd.Timestamp('20111230'), pd.Timestamp('20120101'),
  1261. pd.Timestamp('20120103')])
  1262. dt1.iloc[2] = np.nan
  1263. dt2 = Series([pd.Timestamp('20111231'), pd.Timestamp('20120102'),
  1264. pd.Timestamp('20120104')])
  1265. dt1 - dt2
  1266. dt2 - dt1
  1267. # ## datetime64 with timetimedelta ###
  1268. dt1 + td1
  1269. td1 + dt1
  1270. dt1 - td1
  1271. # TODO: Decide if this ought to work.
  1272. # td1 - dt1
  1273. # ## timetimedelta with datetime64 ###
  1274. td1 + dt1
  1275. dt1 + td1
  1276. def test_dt64ser_sub_datetime_dtype(self):
  1277. ts = Timestamp(datetime(1993, 1, 7, 13, 30, 00))
  1278. dt = datetime(1993, 6, 22, 13, 30)
  1279. ser = Series([ts])
  1280. result = pd.to_timedelta(np.abs(ser - dt))
  1281. assert result.dtype == 'timedelta64[ns]'
  1282. # -------------------------------------------------------------
  1283. # TODO: This next block of tests came from tests.series.test_operators,
  1284. # needs to be de-duplicated and parametrized over `box` classes
  1285. def test_operators_datetimelike_invalid(self, all_arithmetic_operators):
  1286. # these are all TypeEror ops
  1287. op_str = all_arithmetic_operators
  1288. def check(get_ser, test_ser):
  1289. # check that we are getting a TypeError
  1290. # with 'operate' (from core/ops.py) for the ops that are not
  1291. # defined
  1292. op = getattr(get_ser, op_str, None)
  1293. # Previously, _validate_for_numeric_binop in core/indexes/base.py
  1294. # did this for us.
  1295. with pytest.raises(TypeError,
  1296. match='operate|[cC]annot|unsupported operand'):
  1297. op(test_ser)
  1298. # ## timedelta64 ###
  1299. td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
  1300. td1.iloc[2] = np.nan
  1301. # ## datetime64 ###
  1302. dt1 = Series([Timestamp('20111230'), Timestamp('20120101'),
  1303. Timestamp('20120103')])
  1304. dt1.iloc[2] = np.nan
  1305. dt2 = Series([Timestamp('20111231'), Timestamp('20120102'),
  1306. Timestamp('20120104')])
  1307. if op_str not in ['__sub__', '__rsub__']:
  1308. check(dt1, dt2)
  1309. # ## datetime64 with timetimedelta ###
  1310. # TODO(jreback) __rsub__ should raise?
  1311. if op_str not in ['__add__', '__radd__', '__sub__']:
  1312. check(dt1, td1)
  1313. # 8260, 10763
  1314. # datetime64 with tz
  1315. tz = 'US/Eastern'
  1316. dt1 = Series(date_range('2000-01-01 09:00:00', periods=5,
  1317. tz=tz), name='foo')
  1318. dt2 = dt1.copy()
  1319. dt2.iloc[2] = np.nan
  1320. td1 = Series(pd.timedelta_range('1 days 1 min', periods=5, freq='H'))
  1321. td2 = td1.copy()
  1322. td2.iloc[1] = np.nan
  1323. if op_str not in ['__add__', '__radd__', '__sub__', '__rsub__']:
  1324. check(dt2, td2)
  1325. def test_sub_single_tz(self):
  1326. # GH#12290
  1327. s1 = Series([pd.Timestamp('2016-02-10', tz='America/Sao_Paulo')])
  1328. s2 = Series([pd.Timestamp('2016-02-08', tz='America/Sao_Paulo')])
  1329. result = s1 - s2
  1330. expected = Series([Timedelta('2days')])
  1331. tm.assert_series_equal(result, expected)
  1332. result = s2 - s1
  1333. expected = Series([Timedelta('-2days')])
  1334. tm.assert_series_equal(result, expected)
  1335. def test_dt64tz_series_sub_dtitz(self):
  1336. # GH#19071 subtracting tzaware DatetimeIndex from tzaware Series
  1337. # (with same tz) raises, fixed by #19024
  1338. dti = pd.date_range('1999-09-30', periods=10, tz='US/Pacific')
  1339. ser = pd.Series(dti)
  1340. expected = pd.Series(pd.TimedeltaIndex(['0days'] * 10))
  1341. res = dti - ser
  1342. tm.assert_series_equal(res, expected)
  1343. res = ser - dti
  1344. tm.assert_series_equal(res, expected)
  1345. def test_sub_datetime_compat(self):
  1346. # see GH#14088
  1347. s = Series([datetime(2016, 8, 23, 12, tzinfo=pytz.utc), pd.NaT])
  1348. dt = datetime(2016, 8, 22, 12, tzinfo=pytz.utc)
  1349. exp = Series([Timedelta('1 days'), pd.NaT])
  1350. tm.assert_series_equal(s - dt, exp)
  1351. tm.assert_series_equal(s - Timestamp(dt), exp)
  1352. def test_dt64_series_add_mixed_tick_DateOffset(self):
  1353. # GH#4532
  1354. # operate with pd.offsets
  1355. s = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')])
  1356. result = s + pd.offsets.Milli(5)
  1357. result2 = pd.offsets.Milli(5) + s
  1358. expected = Series([Timestamp('20130101 9:01:00.005'),
  1359. Timestamp('20130101 9:02:00.005')])
  1360. tm.assert_series_equal(result, expected)
  1361. tm.assert_series_equal(result2, expected)
  1362. result = s + pd.offsets.Minute(5) + pd.offsets.Milli(5)
  1363. expected = Series([Timestamp('20130101 9:06:00.005'),
  1364. Timestamp('20130101 9:07:00.005')])
  1365. tm.assert_series_equal(result, expected)
  1366. def test_datetime64_ops_nat(self):
  1367. # GH#11349
  1368. datetime_series = Series([NaT, Timestamp('19900315')])
  1369. nat_series_dtype_timestamp = Series([NaT, NaT], dtype='datetime64[ns]')
  1370. single_nat_dtype_datetime = Series([NaT], dtype='datetime64[ns]')
  1371. # subtraction
  1372. tm.assert_series_equal(-NaT + datetime_series,
  1373. nat_series_dtype_timestamp)
  1374. with pytest.raises(TypeError):
  1375. -single_nat_dtype_datetime + datetime_series
  1376. tm.assert_series_equal(-NaT + nat_series_dtype_timestamp,
  1377. nat_series_dtype_timestamp)
  1378. with pytest.raises(TypeError):
  1379. -single_nat_dtype_datetime + nat_series_dtype_timestamp
  1380. # addition
  1381. tm.assert_series_equal(nat_series_dtype_timestamp + NaT,
  1382. nat_series_dtype_timestamp)
  1383. tm.assert_series_equal(NaT + nat_series_dtype_timestamp,
  1384. nat_series_dtype_timestamp)
  1385. tm.assert_series_equal(nat_series_dtype_timestamp + NaT,
  1386. nat_series_dtype_timestamp)
  1387. tm.assert_series_equal(NaT + nat_series_dtype_timestamp,
  1388. nat_series_dtype_timestamp)
  1389. # -------------------------------------------------------------
  1390. # Invalid Operations
  1391. # TODO: this block also needs to be de-duplicated and parametrized
  1392. @pytest.mark.parametrize('dt64_series', [
  1393. Series([Timestamp('19900315'), Timestamp('19900315')]),
  1394. Series([pd.NaT, Timestamp('19900315')]),
  1395. Series([pd.NaT, pd.NaT], dtype='datetime64[ns]')])
  1396. @pytest.mark.parametrize('one', [1, 1.0, np.array(1)])
  1397. def test_dt64_mul_div_numeric_invalid(self, one, dt64_series):
  1398. # multiplication
  1399. with pytest.raises(TypeError):
  1400. dt64_series * one
  1401. with pytest.raises(TypeError):
  1402. one * dt64_series
  1403. # division
  1404. with pytest.raises(TypeError):
  1405. dt64_series / one
  1406. with pytest.raises(TypeError):
  1407. one / dt64_series
  1408. @pytest.mark.parametrize('op', ['__add__', '__radd__',
  1409. '__sub__', '__rsub__'])
  1410. @pytest.mark.parametrize('tz', [None, 'Asia/Tokyo'])
  1411. def test_dt64_series_add_intlike(self, tz, op):
  1412. # GH#19123
  1413. dti = pd.DatetimeIndex(['2016-01-02', '2016-02-03', 'NaT'], tz=tz)
  1414. ser = Series(dti)
  1415. other = Series([20, 30, 40], dtype='uint8')
  1416. method = getattr(ser, op)
  1417. with pytest.raises(TypeError):
  1418. method(1)
  1419. with pytest.raises(TypeError):
  1420. method(other)
  1421. with pytest.raises(TypeError):
  1422. method(other.values)
  1423. with pytest.raises(TypeError):
  1424. method(pd.Index(other))
  1425. # -------------------------------------------------------------
  1426. # Timezone-Centric Tests
  1427. def test_operators_datetimelike_with_timezones(self):
  1428. tz = 'US/Eastern'
  1429. dt1 = Series(date_range('2000-01-01 09:00:00', periods=5,
  1430. tz=tz), name='foo')
  1431. dt2 = dt1.copy()
  1432. dt2.iloc[2] = np.nan
  1433. td1 = Series(pd.timedelta_range('1 days 1 min', periods=5, freq='H'))
  1434. td2 = td1.copy()
  1435. td2.iloc[1] = np.nan
  1436. result = dt1 + td1[0]
  1437. exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
  1438. tm.assert_series_equal(result, exp)
  1439. result = dt2 + td2[0]
  1440. exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
  1441. tm.assert_series_equal(result, exp)
  1442. # odd numpy behavior with scalar timedeltas
  1443. result = td1[0] + dt1
  1444. exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
  1445. tm.assert_series_equal(result, exp)
  1446. result = td2[0] + dt2
  1447. exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
  1448. tm.assert_series_equal(result, exp)
  1449. result = dt1 - td1[0]
  1450. exp = (dt1.dt.tz_localize(None) - td1[0]).dt.tz_localize(tz)
  1451. tm.assert_series_equal(result, exp)
  1452. with pytest.raises(TypeError):
  1453. td1[0] - dt1
  1454. result = dt2 - td2[0]
  1455. exp = (dt2.dt.tz_localize(None) - td2[0]).dt.tz_localize(tz)
  1456. tm.assert_series_equal(result, exp)
  1457. with pytest.raises(TypeError):
  1458. td2[0] - dt2
  1459. result = dt1 + td1
  1460. exp = (dt1.dt.tz_localize(None) + td1).dt.tz_localize(tz)
  1461. tm.assert_series_equal(result, exp)
  1462. result = dt2 + td2
  1463. exp = (dt2.dt.tz_localize(None) + td2).dt.tz_localize(tz)
  1464. tm.assert_series_equal(result, exp)
  1465. result = dt1 - td1
  1466. exp = (dt1.dt.tz_localize(None) - td1).dt.tz_localize(tz)
  1467. tm.assert_series_equal(result, exp)
  1468. result = dt2 - td2
  1469. exp = (dt2.dt.tz_localize(None) - td2).dt.tz_localize(tz)
  1470. tm.assert_series_equal(result, exp)
  1471. with pytest.raises(TypeError):
  1472. td1 - dt1
  1473. with pytest.raises(TypeError):
  1474. td2 - dt2
  1475. class TestDatetimeIndexArithmetic(object):
  1476. # -------------------------------------------------------------
  1477. # Binary operations DatetimeIndex and int
  1478. def test_dti_add_int(self, tz_naive_fixture, one):
  1479. # Variants of `one` for #19012
  1480. tz = tz_naive_fixture
  1481. rng = pd.date_range('2000-01-01 09:00', freq='H',
  1482. periods=10, tz=tz)
  1483. with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
  1484. result = rng + one
  1485. expected = pd.date_range('2000-01-01 10:00', freq='H',
  1486. periods=10, tz=tz)
  1487. tm.assert_index_equal(result, expected)
  1488. def test_dti_iadd_int(self, tz_naive_fixture, one):
  1489. tz = tz_naive_fixture
  1490. rng = pd.date_range('2000-01-01 09:00', freq='H',
  1491. periods=10, tz=tz)
  1492. expected = pd.date_range('2000-01-01 10:00', freq='H',
  1493. periods=10, tz=tz)
  1494. with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
  1495. rng += one
  1496. tm.assert_index_equal(rng, expected)
  1497. def test_dti_sub_int(self, tz_naive_fixture, one):
  1498. tz = tz_naive_fixture
  1499. rng = pd.date_range('2000-01-01 09:00', freq='H',
  1500. periods=10, tz=tz)
  1501. with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
  1502. result = rng - one
  1503. expected = pd.date_range('2000-01-01 08:00', freq='H',
  1504. periods=10, tz=tz)
  1505. tm.assert_index_equal(result, expected)
  1506. def test_dti_isub_int(self, tz_naive_fixture, one):
  1507. tz = tz_naive_fixture
  1508. rng = pd.date_range('2000-01-01 09:00', freq='H',
  1509. periods=10, tz=tz)
  1510. expected = pd.date_range('2000-01-01 08:00', freq='H',
  1511. periods=10, tz=tz)
  1512. with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
  1513. rng -= one
  1514. tm.assert_index_equal(rng, expected)
  1515. # -------------------------------------------------------------
  1516. # __add__/__sub__ with integer arrays
  1517. @pytest.mark.parametrize('freq', ['H', 'D'])
  1518. @pytest.mark.parametrize('int_holder', [np.array, pd.Index])
  1519. def test_dti_add_intarray_tick(self, int_holder, freq):
  1520. # GH#19959
  1521. dti = pd.date_range('2016-01-01', periods=2, freq=freq)
  1522. other = int_holder([4, -1])
  1523. with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
  1524. expected = DatetimeIndex([dti[n] + other[n]
  1525. for n in range(len(dti))])
  1526. result = dti + other
  1527. tm.assert_index_equal(result, expected)
  1528. with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
  1529. result = other + dti
  1530. tm.assert_index_equal(result, expected)
  1531. @pytest.mark.parametrize('freq', ['W', 'M', 'MS', 'Q'])
  1532. @pytest.mark.parametrize('int_holder', [np.array, pd.Index])
  1533. def test_dti_add_intarray_non_tick(self, int_holder, freq):
  1534. # GH#19959
  1535. dti = pd.date_range('2016-01-01', periods=2, freq=freq)
  1536. other = int_holder([4, -1])
  1537. with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
  1538. expected = DatetimeIndex([dti[n] + other[n]
  1539. for n in range(len(dti))])
  1540. # tm.assert_produces_warning does not handle cases where we expect
  1541. # two warnings, in this case PerformanceWarning and FutureWarning.
  1542. # Until that is fixed, we don't catch either
  1543. with warnings.catch_warnings():
  1544. warnings.simplefilter("ignore")
  1545. result = dti + other
  1546. tm.assert_index_equal(result, expected)
  1547. with warnings.catch_warnings():
  1548. warnings.simplefilter("ignore")
  1549. result = other + dti
  1550. tm.assert_index_equal(result, expected)
  1551. @pytest.mark.parametrize('int_holder', [np.array, pd.Index])
  1552. def test_dti_add_intarray_no_freq(self, int_holder):
  1553. # GH#19959
  1554. dti = pd.DatetimeIndex(['2016-01-01', 'NaT', '2017-04-05 06:07:08'])
  1555. other = int_holder([9, 4, -1])
  1556. with pytest.raises(NullFrequencyError):
  1557. dti + other
  1558. with pytest.raises(NullFrequencyError):
  1559. other + dti
  1560. with pytest.raises(NullFrequencyError):
  1561. dti - other
  1562. with pytest.raises(TypeError):
  1563. other - dti
  1564. # -------------------------------------------------------------
  1565. # Binary operations DatetimeIndex and TimedeltaIndex/array
  1566. def test_dti_add_tdi(self, tz_naive_fixture):
  1567. # GH#17558
  1568. tz = tz_naive_fixture
  1569. dti = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1570. tdi = pd.timedelta_range('0 days', periods=10)
  1571. expected = pd.date_range('2017-01-01', periods=10, tz=tz)
  1572. # add with TimdeltaIndex
  1573. result = dti + tdi
  1574. tm.assert_index_equal(result, expected)
  1575. result = tdi + dti
  1576. tm.assert_index_equal(result, expected)
  1577. # add with timedelta64 array
  1578. result = dti + tdi.values
  1579. tm.assert_index_equal(result, expected)
  1580. result = tdi.values + dti
  1581. tm.assert_index_equal(result, expected)
  1582. def test_dti_iadd_tdi(self, tz_naive_fixture):
  1583. # GH#17558
  1584. tz = tz_naive_fixture
  1585. dti = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1586. tdi = pd.timedelta_range('0 days', periods=10)
  1587. expected = pd.date_range('2017-01-01', periods=10, tz=tz)
  1588. # iadd with TimdeltaIndex
  1589. result = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1590. result += tdi
  1591. tm.assert_index_equal(result, expected)
  1592. result = pd.timedelta_range('0 days', periods=10)
  1593. result += dti
  1594. tm.assert_index_equal(result, expected)
  1595. # iadd with timedelta64 array
  1596. result = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1597. result += tdi.values
  1598. tm.assert_index_equal(result, expected)
  1599. result = pd.timedelta_range('0 days', periods=10)
  1600. result += dti
  1601. tm.assert_index_equal(result, expected)
  1602. def test_dti_sub_tdi(self, tz_naive_fixture):
  1603. # GH#17558
  1604. tz = tz_naive_fixture
  1605. dti = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1606. tdi = pd.timedelta_range('0 days', periods=10)
  1607. expected = pd.date_range('2017-01-01', periods=10, tz=tz, freq='-1D')
  1608. # sub with TimedeltaIndex
  1609. result = dti - tdi
  1610. tm.assert_index_equal(result, expected)
  1611. msg = 'cannot subtract .*TimedeltaArray'
  1612. with pytest.raises(TypeError, match=msg):
  1613. tdi - dti
  1614. # sub with timedelta64 array
  1615. result = dti - tdi.values
  1616. tm.assert_index_equal(result, expected)
  1617. msg = 'cannot subtract DatetimeArray from'
  1618. with pytest.raises(TypeError, match=msg):
  1619. tdi.values - dti
  1620. def test_dti_isub_tdi(self, tz_naive_fixture):
  1621. # GH#17558
  1622. tz = tz_naive_fixture
  1623. dti = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1624. tdi = pd.timedelta_range('0 days', periods=10)
  1625. expected = pd.date_range('2017-01-01', periods=10, tz=tz, freq='-1D')
  1626. # isub with TimedeltaIndex
  1627. result = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1628. result -= tdi
  1629. tm.assert_index_equal(result, expected)
  1630. msg = 'cannot subtract .* from a TimedeltaArray'
  1631. with pytest.raises(TypeError, match=msg):
  1632. tdi -= dti
  1633. # isub with timedelta64 array
  1634. result = DatetimeIndex([Timestamp('2017-01-01', tz=tz)] * 10)
  1635. result -= tdi.values
  1636. tm.assert_index_equal(result, expected)
  1637. msg = '|'.join(['cannot perform __neg__ with this index type:',
  1638. 'ufunc subtract cannot use operands with types',
  1639. 'cannot subtract DatetimeArray from'])
  1640. with pytest.raises(TypeError, match=msg):
  1641. tdi.values -= dti
  1642. # -------------------------------------------------------------
  1643. # Binary Operations DatetimeIndex and datetime-like
  1644. # TODO: A couple other tests belong in this section. Move them in
  1645. # A PR where there isn't already a giant diff.
  1646. @pytest.mark.parametrize('addend', [
  1647. datetime(2011, 1, 1),
  1648. DatetimeIndex(['2011-01-01', '2011-01-02']),
  1649. DatetimeIndex(['2011-01-01', '2011-01-02']).tz_localize('US/Eastern'),
  1650. np.datetime64('2011-01-01'),
  1651. Timestamp('2011-01-01')
  1652. ], ids=lambda x: type(x).__name__)
  1653. @pytest.mark.parametrize('tz', [None, 'US/Eastern'])
  1654. def test_add_datetimelike_and_dti(self, addend, tz):
  1655. # GH#9631
  1656. dti = DatetimeIndex(['2011-01-01', '2011-01-02']).tz_localize(tz)
  1657. msg = ('cannot add DatetimeArray and {0}'
  1658. .format(type(addend).__name__)).replace('DatetimeIndex',
  1659. 'DatetimeArray')
  1660. with pytest.raises(TypeError, match=msg):
  1661. dti + addend
  1662. with pytest.raises(TypeError, match=msg):
  1663. addend + dti
  1664. # -------------------------------------------------------------
  1665. def test_sub_dti_dti(self):
  1666. # previously performed setop (deprecated in 0.16.0), now changed to
  1667. # return subtraction -> TimeDeltaIndex (GH ...)
  1668. dti = date_range('20130101', periods=3)
  1669. dti_tz = date_range('20130101', periods=3).tz_localize('US/Eastern')
  1670. dti_tz2 = date_range('20130101', periods=3).tz_localize('UTC')
  1671. expected = TimedeltaIndex([0, 0, 0])
  1672. result = dti - dti
  1673. tm.assert_index_equal(result, expected)
  1674. result = dti_tz - dti_tz
  1675. tm.assert_index_equal(result, expected)
  1676. with pytest.raises(TypeError):
  1677. dti_tz - dti
  1678. with pytest.raises(TypeError):
  1679. dti - dti_tz
  1680. with pytest.raises(TypeError):
  1681. dti_tz - dti_tz2
  1682. # isub
  1683. dti -= dti
  1684. tm.assert_index_equal(dti, expected)
  1685. # different length raises ValueError
  1686. dti1 = date_range('20130101', periods=3)
  1687. dti2 = date_range('20130101', periods=4)
  1688. with pytest.raises(ValueError):
  1689. dti1 - dti2
  1690. # NaN propagation
  1691. dti1 = DatetimeIndex(['2012-01-01', np.nan, '2012-01-03'])
  1692. dti2 = DatetimeIndex(['2012-01-02', '2012-01-03', np.nan])
  1693. expected = TimedeltaIndex(['1 days', np.nan, np.nan])
  1694. result = dti2 - dti1
  1695. tm.assert_index_equal(result, expected)
  1696. # -------------------------------------------------------------------
  1697. # TODO: Most of this block is moved from series or frame tests, needs
  1698. # cleanup, box-parametrization, and de-duplication
  1699. @pytest.mark.parametrize('op', [operator.add, operator.sub])
  1700. def test_timedelta64_equal_timedelta_supported_ops(self, op):
  1701. ser = Series([Timestamp('20130301'),
  1702. Timestamp('20130228 23:00:00'),
  1703. Timestamp('20130228 22:00:00'),
  1704. Timestamp('20130228 21:00:00')])
  1705. intervals = ['D', 'h', 'm', 's', 'us']
  1706. # TODO: unused
  1707. # npy16_mappings = {'D': 24 * 60 * 60 * 1000000,
  1708. # 'h': 60 * 60 * 1000000,
  1709. # 'm': 60 * 1000000,
  1710. # 's': 1000000,
  1711. # 'us': 1}
  1712. def timedelta64(*args):
  1713. return sum(starmap(np.timedelta64, zip(args, intervals)))
  1714. for d, h, m, s, us in product(*([range(2)] * 5)):
  1715. nptd = timedelta64(d, h, m, s, us)
  1716. pytd = timedelta(days=d, hours=h, minutes=m, seconds=s,
  1717. microseconds=us)
  1718. lhs = op(ser, nptd)
  1719. rhs = op(ser, pytd)
  1720. tm.assert_series_equal(lhs, rhs)
  1721. def test_ops_nat_mixed_datetime64_timedelta64(self):
  1722. # GH#11349
  1723. timedelta_series = Series([NaT, Timedelta('1s')])
  1724. datetime_series = Series([NaT, Timestamp('19900315')])
  1725. nat_series_dtype_timedelta = Series([NaT, NaT],
  1726. dtype='timedelta64[ns]')
  1727. nat_series_dtype_timestamp = Series([NaT, NaT], dtype='datetime64[ns]')
  1728. single_nat_dtype_datetime = Series([NaT], dtype='datetime64[ns]')
  1729. single_nat_dtype_timedelta = Series([NaT], dtype='timedelta64[ns]')
  1730. # subtraction
  1731. tm.assert_series_equal(datetime_series - single_nat_dtype_datetime,
  1732. nat_series_dtype_timedelta)
  1733. tm.assert_series_equal(datetime_series - single_nat_dtype_timedelta,
  1734. nat_series_dtype_timestamp)
  1735. tm.assert_series_equal(-single_nat_dtype_timedelta + datetime_series,
  1736. nat_series_dtype_timestamp)
  1737. # without a Series wrapping the NaT, it is ambiguous
  1738. # whether it is a datetime64 or timedelta64
  1739. # defaults to interpreting it as timedelta64
  1740. tm.assert_series_equal(nat_series_dtype_timestamp -
  1741. single_nat_dtype_datetime,
  1742. nat_series_dtype_timedelta)
  1743. tm.assert_series_equal(nat_series_dtype_timestamp -
  1744. single_nat_dtype_timedelta,
  1745. nat_series_dtype_timestamp)
  1746. tm.assert_series_equal(-single_nat_dtype_timedelta +
  1747. nat_series_dtype_timestamp,
  1748. nat_series_dtype_timestamp)
  1749. with pytest.raises(TypeError):
  1750. timedelta_series - single_nat_dtype_datetime
  1751. # addition
  1752. tm.assert_series_equal(nat_series_dtype_timestamp +
  1753. single_nat_dtype_timedelta,
  1754. nat_series_dtype_timestamp)
  1755. tm.assert_series_equal(single_nat_dtype_timedelta +
  1756. nat_series_dtype_timestamp,
  1757. nat_series_dtype_timestamp)
  1758. tm.assert_series_equal(nat_series_dtype_timestamp +
  1759. single_nat_dtype_timedelta,
  1760. nat_series_dtype_timestamp)
  1761. tm.assert_series_equal(single_nat_dtype_timedelta +
  1762. nat_series_dtype_timestamp,
  1763. nat_series_dtype_timestamp)
  1764. tm.assert_series_equal(nat_series_dtype_timedelta +
  1765. single_nat_dtype_datetime,
  1766. nat_series_dtype_timestamp)
  1767. tm.assert_series_equal(single_nat_dtype_datetime +
  1768. nat_series_dtype_timedelta,
  1769. nat_series_dtype_timestamp)
  1770. def test_ufunc_coercions(self):
  1771. idx = date_range('2011-01-01', periods=3, freq='2D', name='x')
  1772. delta = np.timedelta64(1, 'D')
  1773. for result in [idx + delta, np.add(idx, delta)]:
  1774. assert isinstance(result, DatetimeIndex)
  1775. exp = date_range('2011-01-02', periods=3, freq='2D', name='x')
  1776. tm.assert_index_equal(result, exp)
  1777. assert result.freq == '2D'
  1778. for result in [idx - delta, np.subtract(idx, delta)]:
  1779. assert isinstance(result, DatetimeIndex)
  1780. exp = date_range('2010-12-31', periods=3, freq='2D', name='x')
  1781. tm.assert_index_equal(result, exp)
  1782. assert result.freq == '2D'
  1783. delta = np.array([np.timedelta64(1, 'D'), np.timedelta64(2, 'D'),
  1784. np.timedelta64(3, 'D')])
  1785. for result in [idx + delta, np.add(idx, delta)]:
  1786. assert isinstance(result, DatetimeIndex)
  1787. exp = DatetimeIndex(['2011-01-02', '2011-01-05', '2011-01-08'],
  1788. freq='3D', name='x')
  1789. tm.assert_index_equal(result, exp)
  1790. assert result.freq == '3D'
  1791. for result in [idx - delta, np.subtract(idx, delta)]:
  1792. assert isinstance(result, DatetimeIndex)
  1793. exp = DatetimeIndex(['2010-12-31', '2011-01-01', '2011-01-02'],
  1794. freq='D', name='x')
  1795. tm.assert_index_equal(result, exp)
  1796. assert result.freq == 'D'
  1797. @pytest.mark.parametrize('names', [('foo', None, None),
  1798. ('baz', 'bar', None),
  1799. ('bar', 'bar', 'bar')])
  1800. @pytest.mark.parametrize('tz', [None, 'America/Chicago'])
  1801. def test_dti_add_series(self, tz, names):
  1802. # GH#13905
  1803. index = DatetimeIndex(['2016-06-28 05:30', '2016-06-28 05:31'],
  1804. tz=tz, name=names[0])
  1805. ser = Series([Timedelta(seconds=5)] * 2,
  1806. index=index, name=names[1])
  1807. expected = Series(index + Timedelta(seconds=5),
  1808. index=index, name=names[2])
  1809. # passing name arg isn't enough when names[2] is None
  1810. expected.name = names[2]
  1811. assert expected.dtype == index.dtype
  1812. result = ser + index
  1813. tm.assert_series_equal(result, expected)
  1814. result2 = index + ser
  1815. tm.assert_series_equal(result2, expected)
  1816. expected = index + Timedelta(seconds=5)
  1817. result3 = ser.values + index
  1818. tm.assert_index_equal(result3, expected)
  1819. result4 = index + ser.values
  1820. tm.assert_index_equal(result4, expected)
  1821. @pytest.mark.parametrize('names', [(None, None, None),
  1822. ('foo', 'bar', None),
  1823. ('foo', 'foo', 'foo')])
  1824. def test_dti_add_offset_index(self, tz_naive_fixture, names):
  1825. # GH#18849, GH#19744
  1826. tz = tz_naive_fixture
  1827. dti = pd.date_range('2017-01-01', periods=2, tz=tz, name=names[0])
  1828. other = pd.Index([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)],
  1829. name=names[1])
  1830. with tm.assert_produces_warning(PerformanceWarning,
  1831. clear=[pd.core.arrays.datetimelike]):
  1832. res = dti + other
  1833. expected = DatetimeIndex([dti[n] + other[n] for n in range(len(dti))],
  1834. name=names[2], freq='infer')
  1835. tm.assert_index_equal(res, expected)
  1836. with tm.assert_produces_warning(PerformanceWarning,
  1837. clear=[pd.core.arrays.datetimelike]):
  1838. res2 = other + dti
  1839. tm.assert_index_equal(res2, expected)
  1840. @pytest.mark.parametrize('names', [(None, None, None),
  1841. ('foo', 'bar', None),
  1842. ('foo', 'foo', 'foo')])
  1843. def test_dti_sub_offset_index(self, tz_naive_fixture, names):
  1844. # GH#18824, GH#19744
  1845. tz = tz_naive_fixture
  1846. dti = pd.date_range('2017-01-01', periods=2, tz=tz, name=names[0])
  1847. other = pd.Index([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)],
  1848. name=names[1])
  1849. with tm.assert_produces_warning(PerformanceWarning,
  1850. clear=[pd.core.arrays.datetimelike]):
  1851. res = dti - other
  1852. expected = DatetimeIndex([dti[n] - other[n] for n in range(len(dti))],
  1853. name=names[2], freq='infer')
  1854. tm.assert_index_equal(res, expected)
  1855. @pytest.mark.parametrize('names', [(None, None, None),
  1856. ('foo', 'bar', None),
  1857. ('foo', 'foo', 'foo')])
  1858. def test_dti_with_offset_series(self, tz_naive_fixture, names):
  1859. # GH#18849
  1860. tz = tz_naive_fixture
  1861. dti = pd.date_range('2017-01-01', periods=2, tz=tz, name=names[0])
  1862. other = Series([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)],
  1863. name=names[1])
  1864. expected_add = Series([dti[n] + other[n] for n in range(len(dti))],
  1865. name=names[2])
  1866. with tm.assert_produces_warning(PerformanceWarning,
  1867. clear=[pd.core.arrays.datetimelike]):
  1868. res = dti + other
  1869. tm.assert_series_equal(res, expected_add)
  1870. with tm.assert_produces_warning(PerformanceWarning,
  1871. clear=[pd.core.arrays.datetimelike]):
  1872. res2 = other + dti
  1873. tm.assert_series_equal(res2, expected_add)
  1874. expected_sub = Series([dti[n] - other[n] for n in range(len(dti))],
  1875. name=names[2])
  1876. with tm.assert_produces_warning(PerformanceWarning,
  1877. clear=[pd.core.arrays.datetimelike]):
  1878. res3 = dti - other
  1879. tm.assert_series_equal(res3, expected_sub)
  1880. @pytest.mark.parametrize('years', [-1, 0, 1])
  1881. @pytest.mark.parametrize('months', [-2, 0, 2])
  1882. def test_shift_months(years, months):
  1883. dti = DatetimeIndex([Timestamp('2000-01-05 00:15:00'),
  1884. Timestamp('2000-01-31 00:23:00'),
  1885. Timestamp('2000-01-01'),
  1886. Timestamp('2000-02-29'),
  1887. Timestamp('2000-12-31')])
  1888. actual = DatetimeIndex(shift_months(dti.asi8, years * 12 + months))
  1889. raw = [x + pd.offsets.DateOffset(years=years, months=months)
  1890. for x in dti]
  1891. expected = DatetimeIndex(raw)
  1892. tm.assert_index_equal(actual, expected)