test_datetime.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714
  1. # coding=utf-8
  2. # pylint: disable-msg=E1101,W0612
  3. from datetime import datetime, timedelta
  4. import numpy as np
  5. import pytest
  6. from pandas._libs import iNaT
  7. import pandas._libs.index as _index
  8. from pandas.compat import lrange, range
  9. import pandas as pd
  10. from pandas import DataFrame, DatetimeIndex, NaT, Series, Timestamp, date_range
  11. import pandas.util.testing as tm
  12. from pandas.util.testing import (
  13. assert_almost_equal, assert_frame_equal, assert_series_equal)
  14. """
  15. Also test support for datetime64[ns] in Series / DataFrame
  16. """
  17. def test_fancy_getitem():
  18. dti = date_range(freq='WOM-1FRI', start=datetime(2005, 1, 1),
  19. end=datetime(2010, 1, 1))
  20. s = Series(np.arange(len(dti)), index=dti)
  21. assert s[48] == 48
  22. assert s['1/2/2009'] == 48
  23. assert s['2009-1-2'] == 48
  24. assert s[datetime(2009, 1, 2)] == 48
  25. assert s[Timestamp(datetime(2009, 1, 2))] == 48
  26. with pytest.raises(KeyError, match=r"^'2009-1-3'$"):
  27. s['2009-1-3']
  28. assert_series_equal(s['3/6/2009':'2009-06-05'],
  29. s[datetime(2009, 3, 6):datetime(2009, 6, 5)])
  30. def test_fancy_setitem():
  31. dti = date_range(freq='WOM-1FRI', start=datetime(2005, 1, 1),
  32. end=datetime(2010, 1, 1))
  33. s = Series(np.arange(len(dti)), index=dti)
  34. s[48] = -1
  35. assert s[48] == -1
  36. s['1/2/2009'] = -2
  37. assert s[48] == -2
  38. s['1/2/2009':'2009-06-05'] = -3
  39. assert (s[48:54] == -3).all()
  40. def test_dti_snap():
  41. dti = DatetimeIndex(['1/1/2002', '1/2/2002', '1/3/2002', '1/4/2002',
  42. '1/5/2002', '1/6/2002', '1/7/2002'], freq='D')
  43. res = dti.snap(freq='W-MON')
  44. exp = date_range('12/31/2001', '1/7/2002', freq='w-mon')
  45. exp = exp.repeat([3, 4])
  46. assert (res == exp).all()
  47. res = dti.snap(freq='B')
  48. exp = date_range('1/1/2002', '1/7/2002', freq='b')
  49. exp = exp.repeat([1, 1, 1, 2, 2])
  50. assert (res == exp).all()
  51. def test_dti_reset_index_round_trip():
  52. dti = date_range(start='1/1/2001', end='6/1/2001', freq='D')
  53. d1 = DataFrame({'v': np.random.rand(len(dti))}, index=dti)
  54. d2 = d1.reset_index()
  55. assert d2.dtypes[0] == np.dtype('M8[ns]')
  56. d3 = d2.set_index('index')
  57. assert_frame_equal(d1, d3, check_names=False)
  58. # #2329
  59. stamp = datetime(2012, 11, 22)
  60. df = DataFrame([[stamp, 12.1]], columns=['Date', 'Value'])
  61. df = df.set_index('Date')
  62. assert df.index[0] == stamp
  63. assert df.reset_index()['Date'][0] == stamp
  64. def test_series_set_value():
  65. # #1561
  66. dates = [datetime(2001, 1, 1), datetime(2001, 1, 2)]
  67. index = DatetimeIndex(dates)
  68. with tm.assert_produces_warning(FutureWarning,
  69. check_stacklevel=False):
  70. s = Series().set_value(dates[0], 1.)
  71. with tm.assert_produces_warning(FutureWarning,
  72. check_stacklevel=False):
  73. s2 = s.set_value(dates[1], np.nan)
  74. exp = Series([1., np.nan], index=index)
  75. assert_series_equal(s2, exp)
  76. # s = Series(index[:1], index[:1])
  77. # s2 = s.set_value(dates[1], index[1])
  78. # assert s2.values.dtype == 'M8[ns]'
  79. @pytest.mark.slow
  80. def test_slice_locs_indexerror():
  81. times = [datetime(2000, 1, 1) + timedelta(minutes=i * 10)
  82. for i in range(100000)]
  83. s = Series(lrange(100000), times)
  84. s.loc[datetime(1900, 1, 1):datetime(2100, 1, 1)]
  85. def test_slicing_datetimes():
  86. # GH 7523
  87. # unique
  88. df = DataFrame(np.arange(4., dtype='float64'),
  89. index=[datetime(2001, 1, i, 10, 00)
  90. for i in [1, 2, 3, 4]])
  91. result = df.loc[datetime(2001, 1, 1, 10):]
  92. assert_frame_equal(result, df)
  93. result = df.loc[:datetime(2001, 1, 4, 10)]
  94. assert_frame_equal(result, df)
  95. result = df.loc[datetime(2001, 1, 1, 10):datetime(2001, 1, 4, 10)]
  96. assert_frame_equal(result, df)
  97. result = df.loc[datetime(2001, 1, 1, 11):]
  98. expected = df.iloc[1:]
  99. assert_frame_equal(result, expected)
  100. result = df.loc['20010101 11':]
  101. assert_frame_equal(result, expected)
  102. # duplicates
  103. df = pd.DataFrame(np.arange(5., dtype='float64'),
  104. index=[datetime(2001, 1, i, 10, 00)
  105. for i in [1, 2, 2, 3, 4]])
  106. result = df.loc[datetime(2001, 1, 1, 10):]
  107. assert_frame_equal(result, df)
  108. result = df.loc[:datetime(2001, 1, 4, 10)]
  109. assert_frame_equal(result, df)
  110. result = df.loc[datetime(2001, 1, 1, 10):datetime(2001, 1, 4, 10)]
  111. assert_frame_equal(result, df)
  112. result = df.loc[datetime(2001, 1, 1, 11):]
  113. expected = df.iloc[1:]
  114. assert_frame_equal(result, expected)
  115. result = df.loc['20010101 11':]
  116. assert_frame_equal(result, expected)
  117. def test_frame_datetime64_duplicated():
  118. dates = date_range('2010-07-01', end='2010-08-05')
  119. tst = DataFrame({'symbol': 'AAA', 'date': dates})
  120. result = tst.duplicated(['date', 'symbol'])
  121. assert (-result).all()
  122. tst = DataFrame({'date': dates})
  123. result = tst.duplicated()
  124. assert (-result).all()
  125. def test_getitem_setitem_datetime_tz_pytz():
  126. from pytz import timezone as tz
  127. from pandas import date_range
  128. N = 50
  129. # testing with timezone, GH #2785
  130. rng = date_range('1/1/1990', periods=N, freq='H', tz='US/Eastern')
  131. ts = Series(np.random.randn(N), index=rng)
  132. # also test Timestamp tz handling, GH #2789
  133. result = ts.copy()
  134. result["1990-01-01 09:00:00+00:00"] = 0
  135. result["1990-01-01 09:00:00+00:00"] = ts[4]
  136. assert_series_equal(result, ts)
  137. result = ts.copy()
  138. result["1990-01-01 03:00:00-06:00"] = 0
  139. result["1990-01-01 03:00:00-06:00"] = ts[4]
  140. assert_series_equal(result, ts)
  141. # repeat with datetimes
  142. result = ts.copy()
  143. result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = 0
  144. result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = ts[4]
  145. assert_series_equal(result, ts)
  146. result = ts.copy()
  147. # comparison dates with datetime MUST be localized!
  148. date = tz('US/Central').localize(datetime(1990, 1, 1, 3))
  149. result[date] = 0
  150. result[date] = ts[4]
  151. assert_series_equal(result, ts)
  152. def test_getitem_setitem_datetime_tz_dateutil():
  153. from dateutil.tz import tzutc
  154. from pandas._libs.tslibs.timezones import dateutil_gettz as gettz
  155. tz = lambda x: tzutc() if x == 'UTC' else gettz(
  156. x) # handle special case for utc in dateutil
  157. from pandas import date_range
  158. N = 50
  159. # testing with timezone, GH #2785
  160. rng = date_range('1/1/1990', periods=N, freq='H',
  161. tz='America/New_York')
  162. ts = Series(np.random.randn(N), index=rng)
  163. # also test Timestamp tz handling, GH #2789
  164. result = ts.copy()
  165. result["1990-01-01 09:00:00+00:00"] = 0
  166. result["1990-01-01 09:00:00+00:00"] = ts[4]
  167. assert_series_equal(result, ts)
  168. result = ts.copy()
  169. result["1990-01-01 03:00:00-06:00"] = 0
  170. result["1990-01-01 03:00:00-06:00"] = ts[4]
  171. assert_series_equal(result, ts)
  172. # repeat with datetimes
  173. result = ts.copy()
  174. result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = 0
  175. result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = ts[4]
  176. assert_series_equal(result, ts)
  177. result = ts.copy()
  178. result[datetime(1990, 1, 1, 3, tzinfo=tz('America/Chicago'))] = 0
  179. result[datetime(1990, 1, 1, 3, tzinfo=tz('America/Chicago'))] = ts[4]
  180. assert_series_equal(result, ts)
  181. def test_getitem_setitem_datetimeindex():
  182. N = 50
  183. # testing with timezone, GH #2785
  184. rng = date_range('1/1/1990', periods=N, freq='H', tz='US/Eastern')
  185. ts = Series(np.random.randn(N), index=rng)
  186. result = ts["1990-01-01 04:00:00"]
  187. expected = ts[4]
  188. assert result == expected
  189. result = ts.copy()
  190. result["1990-01-01 04:00:00"] = 0
  191. result["1990-01-01 04:00:00"] = ts[4]
  192. assert_series_equal(result, ts)
  193. result = ts["1990-01-01 04:00:00":"1990-01-01 07:00:00"]
  194. expected = ts[4:8]
  195. assert_series_equal(result, expected)
  196. result = ts.copy()
  197. result["1990-01-01 04:00:00":"1990-01-01 07:00:00"] = 0
  198. result["1990-01-01 04:00:00":"1990-01-01 07:00:00"] = ts[4:8]
  199. assert_series_equal(result, ts)
  200. lb = "1990-01-01 04:00:00"
  201. rb = "1990-01-01 07:00:00"
  202. # GH#18435 strings get a pass from tzawareness compat
  203. result = ts[(ts.index >= lb) & (ts.index <= rb)]
  204. expected = ts[4:8]
  205. assert_series_equal(result, expected)
  206. lb = "1990-01-01 04:00:00-0500"
  207. rb = "1990-01-01 07:00:00-0500"
  208. result = ts[(ts.index >= lb) & (ts.index <= rb)]
  209. expected = ts[4:8]
  210. assert_series_equal(result, expected)
  211. # repeat all the above with naive datetimes
  212. result = ts[datetime(1990, 1, 1, 4)]
  213. expected = ts[4]
  214. assert result == expected
  215. result = ts.copy()
  216. result[datetime(1990, 1, 1, 4)] = 0
  217. result[datetime(1990, 1, 1, 4)] = ts[4]
  218. assert_series_equal(result, ts)
  219. result = ts[datetime(1990, 1, 1, 4):datetime(1990, 1, 1, 7)]
  220. expected = ts[4:8]
  221. assert_series_equal(result, expected)
  222. result = ts.copy()
  223. result[datetime(1990, 1, 1, 4):datetime(1990, 1, 1, 7)] = 0
  224. result[datetime(1990, 1, 1, 4):datetime(1990, 1, 1, 7)] = ts[4:8]
  225. assert_series_equal(result, ts)
  226. lb = datetime(1990, 1, 1, 4)
  227. rb = datetime(1990, 1, 1, 7)
  228. msg = "Cannot compare tz-naive and tz-aware datetime-like objects"
  229. with pytest.raises(TypeError, match=msg):
  230. # tznaive vs tzaware comparison is invalid
  231. # see GH#18376, GH#18162
  232. ts[(ts.index >= lb) & (ts.index <= rb)]
  233. lb = pd.Timestamp(datetime(1990, 1, 1, 4)).tz_localize(rng.tzinfo)
  234. rb = pd.Timestamp(datetime(1990, 1, 1, 7)).tz_localize(rng.tzinfo)
  235. result = ts[(ts.index >= lb) & (ts.index <= rb)]
  236. expected = ts[4:8]
  237. assert_series_equal(result, expected)
  238. result = ts[ts.index[4]]
  239. expected = ts[4]
  240. assert result == expected
  241. result = ts[ts.index[4:8]]
  242. expected = ts[4:8]
  243. assert_series_equal(result, expected)
  244. result = ts.copy()
  245. result[ts.index[4:8]] = 0
  246. result[4:8] = ts[4:8]
  247. assert_series_equal(result, ts)
  248. # also test partial date slicing
  249. result = ts["1990-01-02"]
  250. expected = ts[24:48]
  251. assert_series_equal(result, expected)
  252. result = ts.copy()
  253. result["1990-01-02"] = 0
  254. result["1990-01-02"] = ts[24:48]
  255. assert_series_equal(result, ts)
  256. def test_getitem_setitem_periodindex():
  257. from pandas import period_range
  258. N = 50
  259. rng = period_range('1/1/1990', periods=N, freq='H')
  260. ts = Series(np.random.randn(N), index=rng)
  261. result = ts["1990-01-01 04"]
  262. expected = ts[4]
  263. assert result == expected
  264. result = ts.copy()
  265. result["1990-01-01 04"] = 0
  266. result["1990-01-01 04"] = ts[4]
  267. assert_series_equal(result, ts)
  268. result = ts["1990-01-01 04":"1990-01-01 07"]
  269. expected = ts[4:8]
  270. assert_series_equal(result, expected)
  271. result = ts.copy()
  272. result["1990-01-01 04":"1990-01-01 07"] = 0
  273. result["1990-01-01 04":"1990-01-01 07"] = ts[4:8]
  274. assert_series_equal(result, ts)
  275. lb = "1990-01-01 04"
  276. rb = "1990-01-01 07"
  277. result = ts[(ts.index >= lb) & (ts.index <= rb)]
  278. expected = ts[4:8]
  279. assert_series_equal(result, expected)
  280. # GH 2782
  281. result = ts[ts.index[4]]
  282. expected = ts[4]
  283. assert result == expected
  284. result = ts[ts.index[4:8]]
  285. expected = ts[4:8]
  286. assert_series_equal(result, expected)
  287. result = ts.copy()
  288. result[ts.index[4:8]] = 0
  289. result[4:8] = ts[4:8]
  290. assert_series_equal(result, ts)
  291. # FutureWarning from NumPy.
  292. @pytest.mark.filterwarnings("ignore:Using a non-tuple:FutureWarning")
  293. def test_getitem_median_slice_bug():
  294. index = date_range('20090415', '20090519', freq='2B')
  295. s = Series(np.random.randn(13), index=index)
  296. indexer = [slice(6, 7, None)]
  297. result = s[indexer]
  298. expected = s[indexer[0]]
  299. assert_series_equal(result, expected)
  300. def test_datetime_indexing():
  301. from pandas import date_range
  302. index = date_range('1/1/2000', '1/7/2000')
  303. index = index.repeat(3)
  304. s = Series(len(index), index=index)
  305. stamp = Timestamp('1/8/2000')
  306. with pytest.raises(KeyError, match=r"^947289600000000000L?$"):
  307. s[stamp]
  308. s[stamp] = 0
  309. assert s[stamp] == 0
  310. # not monotonic
  311. s = Series(len(index), index=index)
  312. s = s[::-1]
  313. with pytest.raises(KeyError, match=r"^947289600000000000L?$"):
  314. s[stamp]
  315. s[stamp] = 0
  316. assert s[stamp] == 0
  317. """
  318. test duplicates in time series
  319. """
  320. @pytest.fixture(scope='module')
  321. def dups():
  322. dates = [datetime(2000, 1, 2), datetime(2000, 1, 2),
  323. datetime(2000, 1, 2), datetime(2000, 1, 3),
  324. datetime(2000, 1, 3), datetime(2000, 1, 3),
  325. datetime(2000, 1, 4), datetime(2000, 1, 4),
  326. datetime(2000, 1, 4), datetime(2000, 1, 5)]
  327. return Series(np.random.randn(len(dates)), index=dates)
  328. def test_constructor(dups):
  329. assert isinstance(dups, Series)
  330. assert isinstance(dups.index, DatetimeIndex)
  331. def test_is_unique_monotonic(dups):
  332. assert not dups.index.is_unique
  333. def test_index_unique(dups):
  334. uniques = dups.index.unique()
  335. expected = DatetimeIndex([datetime(2000, 1, 2), datetime(2000, 1, 3),
  336. datetime(2000, 1, 4), datetime(2000, 1, 5)])
  337. assert uniques.dtype == 'M8[ns]' # sanity
  338. tm.assert_index_equal(uniques, expected)
  339. assert dups.index.nunique() == 4
  340. # #2563
  341. assert isinstance(uniques, DatetimeIndex)
  342. dups_local = dups.index.tz_localize('US/Eastern')
  343. dups_local.name = 'foo'
  344. result = dups_local.unique()
  345. expected = DatetimeIndex(expected, name='foo')
  346. expected = expected.tz_localize('US/Eastern')
  347. assert result.tz is not None
  348. assert result.name == 'foo'
  349. tm.assert_index_equal(result, expected)
  350. # NaT, note this is excluded
  351. arr = [1370745748 + t for t in range(20)] + [iNaT]
  352. idx = DatetimeIndex(arr * 3)
  353. tm.assert_index_equal(idx.unique(), DatetimeIndex(arr))
  354. assert idx.nunique() == 20
  355. assert idx.nunique(dropna=False) == 21
  356. arr = [Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t)
  357. for t in range(20)] + [NaT]
  358. idx = DatetimeIndex(arr * 3)
  359. tm.assert_index_equal(idx.unique(), DatetimeIndex(arr))
  360. assert idx.nunique() == 20
  361. assert idx.nunique(dropna=False) == 21
  362. def test_index_dupes_contains():
  363. d = datetime(2011, 12, 5, 20, 30)
  364. ix = DatetimeIndex([d, d])
  365. assert d in ix
  366. def test_duplicate_dates_indexing(dups):
  367. ts = dups
  368. uniques = ts.index.unique()
  369. for date in uniques:
  370. result = ts[date]
  371. mask = ts.index == date
  372. total = (ts.index == date).sum()
  373. expected = ts[mask]
  374. if total > 1:
  375. assert_series_equal(result, expected)
  376. else:
  377. assert_almost_equal(result, expected[0])
  378. cp = ts.copy()
  379. cp[date] = 0
  380. expected = Series(np.where(mask, 0, ts), index=ts.index)
  381. assert_series_equal(cp, expected)
  382. with pytest.raises(KeyError, match=r"^947116800000000000L?$"):
  383. ts[datetime(2000, 1, 6)]
  384. # new index
  385. ts[datetime(2000, 1, 6)] = 0
  386. assert ts[datetime(2000, 1, 6)] == 0
  387. def test_range_slice():
  388. idx = DatetimeIndex(['1/1/2000', '1/2/2000', '1/2/2000', '1/3/2000',
  389. '1/4/2000'])
  390. ts = Series(np.random.randn(len(idx)), index=idx)
  391. result = ts['1/2/2000':]
  392. expected = ts[1:]
  393. assert_series_equal(result, expected)
  394. result = ts['1/2/2000':'1/3/2000']
  395. expected = ts[1:4]
  396. assert_series_equal(result, expected)
  397. def test_groupby_average_dup_values(dups):
  398. result = dups.groupby(level=0).mean()
  399. expected = dups.groupby(dups.index).mean()
  400. assert_series_equal(result, expected)
  401. def test_indexing_over_size_cutoff():
  402. import datetime
  403. # #1821
  404. old_cutoff = _index._SIZE_CUTOFF
  405. try:
  406. _index._SIZE_CUTOFF = 1000
  407. # create large list of non periodic datetime
  408. dates = []
  409. sec = datetime.timedelta(seconds=1)
  410. half_sec = datetime.timedelta(microseconds=500000)
  411. d = datetime.datetime(2011, 12, 5, 20, 30)
  412. n = 1100
  413. for i in range(n):
  414. dates.append(d)
  415. dates.append(d + sec)
  416. dates.append(d + sec + half_sec)
  417. dates.append(d + sec + sec + half_sec)
  418. d += 3 * sec
  419. # duplicate some values in the list
  420. duplicate_positions = np.random.randint(0, len(dates) - 1, 20)
  421. for p in duplicate_positions:
  422. dates[p + 1] = dates[p]
  423. df = DataFrame(np.random.randn(len(dates), 4),
  424. index=dates,
  425. columns=list('ABCD'))
  426. pos = n * 3
  427. timestamp = df.index[pos]
  428. assert timestamp in df.index
  429. # it works!
  430. df.loc[timestamp]
  431. assert len(df.loc[[timestamp]]) > 0
  432. finally:
  433. _index._SIZE_CUTOFF = old_cutoff
  434. def test_indexing_unordered():
  435. # GH 2437
  436. rng = date_range(start='2011-01-01', end='2011-01-15')
  437. ts = Series(np.random.rand(len(rng)), index=rng)
  438. ts2 = pd.concat([ts[0:4], ts[-4:], ts[4:-4]])
  439. for t in ts.index:
  440. # TODO: unused?
  441. s = str(t) # noqa
  442. expected = ts[t]
  443. result = ts2[t]
  444. assert expected == result
  445. # GH 3448 (ranges)
  446. def compare(slobj):
  447. result = ts2[slobj].copy()
  448. result = result.sort_index()
  449. expected = ts[slobj]
  450. assert_series_equal(result, expected)
  451. compare(slice('2011-01-01', '2011-01-15'))
  452. compare(slice('2010-12-30', '2011-01-15'))
  453. compare(slice('2011-01-01', '2011-01-16'))
  454. # partial ranges
  455. compare(slice('2011-01-01', '2011-01-6'))
  456. compare(slice('2011-01-06', '2011-01-8'))
  457. compare(slice('2011-01-06', '2011-01-12'))
  458. # single values
  459. result = ts2['2011'].sort_index()
  460. expected = ts['2011']
  461. assert_series_equal(result, expected)
  462. # diff freq
  463. rng = date_range(datetime(2005, 1, 1), periods=20, freq='M')
  464. ts = Series(np.arange(len(rng)), index=rng)
  465. ts = ts.take(np.random.permutation(20))
  466. result = ts['2005']
  467. for t in result.index:
  468. assert t.year == 2005
  469. def test_indexing():
  470. idx = date_range("2001-1-1", periods=20, freq='M')
  471. ts = Series(np.random.rand(len(idx)), index=idx)
  472. # getting
  473. # GH 3070, make sure semantics work on Series/Frame
  474. expected = ts['2001']
  475. expected.name = 'A'
  476. df = DataFrame(dict(A=ts))
  477. result = df['2001']['A']
  478. assert_series_equal(expected, result)
  479. # setting
  480. ts['2001'] = 1
  481. expected = ts['2001']
  482. expected.name = 'A'
  483. df.loc['2001', 'A'] = 1
  484. result = df['2001']['A']
  485. assert_series_equal(expected, result)
  486. # GH3546 (not including times on the last day)
  487. idx = date_range(start='2013-05-31 00:00', end='2013-05-31 23:00',
  488. freq='H')
  489. ts = Series(lrange(len(idx)), index=idx)
  490. expected = ts['2013-05']
  491. assert_series_equal(expected, ts)
  492. idx = date_range(start='2013-05-31 00:00', end='2013-05-31 23:59',
  493. freq='S')
  494. ts = Series(lrange(len(idx)), index=idx)
  495. expected = ts['2013-05']
  496. assert_series_equal(expected, ts)
  497. idx = [Timestamp('2013-05-31 00:00'),
  498. Timestamp(datetime(2013, 5, 31, 23, 59, 59, 999999))]
  499. ts = Series(lrange(len(idx)), index=idx)
  500. expected = ts['2013']
  501. assert_series_equal(expected, ts)
  502. # GH14826, indexing with a seconds resolution string / datetime object
  503. df = DataFrame(np.random.rand(5, 5),
  504. columns=['open', 'high', 'low', 'close', 'volume'],
  505. index=date_range('2012-01-02 18:01:00',
  506. periods=5, tz='US/Central', freq='s'))
  507. expected = df.loc[[df.index[2]]]
  508. # this is a single date, so will raise
  509. with pytest.raises(KeyError, match=r"^'2012-01-02 18:01:02'$"):
  510. df['2012-01-02 18:01:02']
  511. msg = r"Timestamp\('2012-01-02 18:01:02-0600', tz='US/Central', freq='S'\)"
  512. with pytest.raises(KeyError, match=msg):
  513. df[df.index[2]]
  514. """
  515. test NaT support
  516. """
  517. def test_set_none_nan():
  518. series = Series(date_range('1/1/2000', periods=10))
  519. series[3] = None
  520. assert series[3] is NaT
  521. series[3:5] = None
  522. assert series[4] is NaT
  523. series[5] = np.nan
  524. assert series[5] is NaT
  525. series[5:7] = np.nan
  526. assert series[6] is NaT
  527. def test_nat_operations():
  528. # GH 8617
  529. s = Series([0, pd.NaT], dtype='m8[ns]')
  530. exp = s[0]
  531. assert s.median() == exp
  532. assert s.min() == exp
  533. assert s.max() == exp
  534. @pytest.mark.parametrize('method', ["round", "floor", "ceil"])
  535. @pytest.mark.parametrize('freq', ["s", "5s", "min", "5min", "h", "5h"])
  536. def test_round_nat(method, freq):
  537. # GH14940
  538. s = Series([pd.NaT])
  539. expected = Series(pd.NaT)
  540. round_method = getattr(s.dt, method)
  541. assert_series_equal(round_method(freq), expected)