datetimes.py 75 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152
  1. # -*- coding: utf-8 -*-
  2. from datetime import datetime, time, timedelta
  3. import textwrap
  4. import warnings
  5. import numpy as np
  6. from pytz import utc
  7. from pandas._libs import lib, tslib
  8. from pandas._libs.tslibs import (
  9. NaT, Timestamp, ccalendar, conversion, fields, iNaT, normalize_date,
  10. resolution as libresolution, timezones)
  11. import pandas.compat as compat
  12. from pandas.errors import PerformanceWarning
  13. from pandas.util._decorators import Appender
  14. from pandas.core.dtypes.common import (
  15. _INT64_DTYPE, _NS_DTYPE, is_categorical_dtype, is_datetime64_dtype,
  16. is_datetime64_ns_dtype, is_datetime64tz_dtype, is_dtype_equal,
  17. is_extension_type, is_float_dtype, is_object_dtype, is_period_dtype,
  18. is_string_dtype, is_timedelta64_dtype, pandas_dtype)
  19. from pandas.core.dtypes.dtypes import DatetimeTZDtype
  20. from pandas.core.dtypes.generic import (
  21. ABCDataFrame, ABCIndexClass, ABCPandasArray, ABCSeries)
  22. from pandas.core.dtypes.missing import isna
  23. from pandas.core import ops
  24. from pandas.core.algorithms import checked_add_with_arr
  25. from pandas.core.arrays import datetimelike as dtl
  26. from pandas.core.arrays._ranges import generate_regular_range
  27. import pandas.core.common as com
  28. from pandas.tseries.frequencies import get_period_alias, to_offset
  29. from pandas.tseries.offsets import Day, Tick
  30. _midnight = time(0, 0)
  31. # TODO(GH-24559): Remove warning, int_as_wall_time parameter.
  32. _i8_message = """
  33. Passing integer-dtype data and a timezone to DatetimeIndex. Integer values
  34. will be interpreted differently in a future version of pandas. Previously,
  35. these were viewed as datetime64[ns] values representing the wall time
  36. *in the specified timezone*. In the future, these will be viewed as
  37. datetime64[ns] values representing the wall time *in UTC*. This is similar
  38. to a nanosecond-precision UNIX epoch. To accept the future behavior, use
  39. pd.to_datetime(integer_data, utc=True).tz_convert(tz)
  40. To keep the previous behavior, use
  41. pd.to_datetime(integer_data).tz_localize(tz)
  42. """
  43. def tz_to_dtype(tz):
  44. """
  45. Return a datetime64[ns] dtype appropriate for the given timezone.
  46. Parameters
  47. ----------
  48. tz : tzinfo or None
  49. Returns
  50. -------
  51. np.dtype or Datetime64TZDType
  52. """
  53. if tz is None:
  54. return _NS_DTYPE
  55. else:
  56. return DatetimeTZDtype(tz=tz)
  57. def _to_M8(key, tz=None):
  58. """
  59. Timestamp-like => dt64
  60. """
  61. if not isinstance(key, Timestamp):
  62. # this also converts strings
  63. key = Timestamp(key)
  64. if key.tzinfo is not None and tz is not None:
  65. # Don't tz_localize(None) if key is already tz-aware
  66. key = key.tz_convert(tz)
  67. else:
  68. key = key.tz_localize(tz)
  69. return np.int64(conversion.pydt_to_i8(key)).view(_NS_DTYPE)
  70. def _field_accessor(name, field, docstring=None):
  71. def f(self):
  72. values = self.asi8
  73. if self.tz is not None and not timezones.is_utc(self.tz):
  74. values = self._local_timestamps()
  75. if field in self._bool_ops:
  76. if field.endswith(('start', 'end')):
  77. freq = self.freq
  78. month_kw = 12
  79. if freq:
  80. kwds = freq.kwds
  81. month_kw = kwds.get('startingMonth', kwds.get('month', 12))
  82. result = fields.get_start_end_field(values, field,
  83. self.freqstr, month_kw)
  84. else:
  85. result = fields.get_date_field(values, field)
  86. # these return a boolean by-definition
  87. return result
  88. if field in self._object_ops:
  89. result = fields.get_date_name_field(values, field)
  90. result = self._maybe_mask_results(result, fill_value=None)
  91. else:
  92. result = fields.get_date_field(values, field)
  93. result = self._maybe_mask_results(result, fill_value=None,
  94. convert='float64')
  95. return result
  96. f.__name__ = name
  97. f.__doc__ = "\n{}\n".format(docstring)
  98. return property(f)
  99. def _dt_array_cmp(cls, op):
  100. """
  101. Wrap comparison operations to convert datetime-like to datetime64
  102. """
  103. opname = '__{name}__'.format(name=op.__name__)
  104. nat_result = True if opname == '__ne__' else False
  105. def wrapper(self, other):
  106. if isinstance(other, (ABCDataFrame, ABCSeries, ABCIndexClass)):
  107. return NotImplemented
  108. other = lib.item_from_zerodim(other)
  109. if isinstance(other, (datetime, np.datetime64, compat.string_types)):
  110. if isinstance(other, (datetime, np.datetime64)):
  111. # GH#18435 strings get a pass from tzawareness compat
  112. self._assert_tzawareness_compat(other)
  113. try:
  114. other = _to_M8(other, tz=self.tz)
  115. except ValueError:
  116. # string that cannot be parsed to Timestamp
  117. return ops.invalid_comparison(self, other, op)
  118. result = op(self.asi8, other.view('i8'))
  119. if isna(other):
  120. result.fill(nat_result)
  121. elif lib.is_scalar(other) or np.ndim(other) == 0:
  122. return ops.invalid_comparison(self, other, op)
  123. elif len(other) != len(self):
  124. raise ValueError("Lengths must match")
  125. else:
  126. if isinstance(other, list):
  127. try:
  128. other = type(self)._from_sequence(other)
  129. except ValueError:
  130. other = np.array(other, dtype=np.object_)
  131. elif not isinstance(other, (np.ndarray, ABCIndexClass, ABCSeries,
  132. DatetimeArray)):
  133. # Following Timestamp convention, __eq__ is all-False
  134. # and __ne__ is all True, others raise TypeError.
  135. return ops.invalid_comparison(self, other, op)
  136. if is_object_dtype(other):
  137. # We have to use _comp_method_OBJECT_ARRAY instead of numpy
  138. # comparison otherwise it would fail to raise when
  139. # comparing tz-aware and tz-naive
  140. with np.errstate(all='ignore'):
  141. result = ops._comp_method_OBJECT_ARRAY(op,
  142. self.astype(object),
  143. other)
  144. o_mask = isna(other)
  145. elif not (is_datetime64_dtype(other) or
  146. is_datetime64tz_dtype(other)):
  147. # e.g. is_timedelta64_dtype(other)
  148. return ops.invalid_comparison(self, other, op)
  149. else:
  150. self._assert_tzawareness_compat(other)
  151. if isinstance(other, (ABCIndexClass, ABCSeries)):
  152. other = other.array
  153. if (is_datetime64_dtype(other) and
  154. not is_datetime64_ns_dtype(other) or
  155. not hasattr(other, 'asi8')):
  156. # e.g. other.dtype == 'datetime64[s]'
  157. # or an object-dtype ndarray
  158. other = type(self)._from_sequence(other)
  159. result = op(self.view('i8'), other.view('i8'))
  160. o_mask = other._isnan
  161. result = com.values_from_object(result)
  162. # Make sure to pass an array to result[...]; indexing with
  163. # Series breaks with older version of numpy
  164. o_mask = np.array(o_mask)
  165. if o_mask.any():
  166. result[o_mask] = nat_result
  167. if self._hasnans:
  168. result[self._isnan] = nat_result
  169. return result
  170. return compat.set_function_name(wrapper, opname, cls)
  171. class DatetimeArray(dtl.DatetimeLikeArrayMixin,
  172. dtl.TimelikeOps,
  173. dtl.DatelikeOps):
  174. """
  175. Pandas ExtensionArray for tz-naive or tz-aware datetime data.
  176. .. versionadded:: 0.24.0
  177. .. warning::
  178. DatetimeArray is currently experimental, and its API may change
  179. without warning. In particular, :attr:`DatetimeArray.dtype` is
  180. expected to change to always be an instance of an ``ExtensionDtype``
  181. subclass.
  182. Parameters
  183. ----------
  184. values : Series, Index, DatetimeArray, ndarray
  185. The datetime data.
  186. For DatetimeArray `values` (or a Series or Index boxing one),
  187. `dtype` and `freq` will be extracted from `values`, with
  188. precedence given to
  189. dtype : numpy.dtype or DatetimeTZDtype
  190. Note that the only NumPy dtype allowed is 'datetime64[ns]'.
  191. freq : str or Offset, optional
  192. copy : bool, default False
  193. Whether to copy the underlying array of values.
  194. """
  195. _typ = "datetimearray"
  196. _scalar_type = Timestamp
  197. # define my properties & methods for delegation
  198. _bool_ops = ['is_month_start', 'is_month_end',
  199. 'is_quarter_start', 'is_quarter_end', 'is_year_start',
  200. 'is_year_end', 'is_leap_year']
  201. _object_ops = ['weekday_name', 'freq', 'tz']
  202. _field_ops = ['year', 'month', 'day', 'hour', 'minute', 'second',
  203. 'weekofyear', 'week', 'weekday', 'dayofweek',
  204. 'dayofyear', 'quarter', 'days_in_month',
  205. 'daysinmonth', 'microsecond',
  206. 'nanosecond']
  207. _other_ops = ['date', 'time', 'timetz']
  208. _datetimelike_ops = _field_ops + _object_ops + _bool_ops + _other_ops
  209. _datetimelike_methods = ['to_period', 'tz_localize',
  210. 'tz_convert',
  211. 'normalize', 'strftime', 'round', 'floor',
  212. 'ceil', 'month_name', 'day_name']
  213. # dummy attribute so that datetime.__eq__(DatetimeArray) defers
  214. # by returning NotImplemented
  215. timetuple = None
  216. # Needed so that Timestamp.__richcmp__(DateTimeArray) operates pointwise
  217. ndim = 1
  218. # ensure that operations with numpy arrays defer to our implementation
  219. __array_priority__ = 1000
  220. # -----------------------------------------------------------------
  221. # Constructors
  222. _attributes = ["freq", "tz"]
  223. _dtype = None # type: Union[np.dtype, DatetimeTZDtype]
  224. _freq = None
  225. def __init__(self, values, dtype=_NS_DTYPE, freq=None, copy=False):
  226. if isinstance(values, (ABCSeries, ABCIndexClass)):
  227. values = values._values
  228. inferred_freq = getattr(values, "_freq", None)
  229. if isinstance(values, type(self)):
  230. # validation
  231. dtz = getattr(dtype, 'tz', None)
  232. if dtz and values.tz is None:
  233. dtype = DatetimeTZDtype(tz=dtype.tz)
  234. elif dtz and values.tz:
  235. if not timezones.tz_compare(dtz, values.tz):
  236. msg = (
  237. "Timezone of the array and 'dtype' do not match. "
  238. "'{}' != '{}'"
  239. )
  240. raise TypeError(msg.format(dtz, values.tz))
  241. elif values.tz:
  242. dtype = values.dtype
  243. # freq = validate_values_freq(values, freq)
  244. if freq is None:
  245. freq = values.freq
  246. values = values._data
  247. if not isinstance(values, np.ndarray):
  248. msg = (
  249. "Unexpected type '{}'. 'values' must be a DatetimeArray "
  250. "ndarray, or Series or Index containing one of those."
  251. )
  252. raise ValueError(msg.format(type(values).__name__))
  253. if values.dtype == 'i8':
  254. # for compat with datetime/timedelta/period shared methods,
  255. # we can sometimes get here with int64 values. These represent
  256. # nanosecond UTC (or tz-naive) unix timestamps
  257. values = values.view(_NS_DTYPE)
  258. if values.dtype != _NS_DTYPE:
  259. msg = (
  260. "The dtype of 'values' is incorrect. Must be 'datetime64[ns]'."
  261. " Got {} instead."
  262. )
  263. raise ValueError(msg.format(values.dtype))
  264. dtype = _validate_dt64_dtype(dtype)
  265. if freq == "infer":
  266. msg = (
  267. "Frequency inference not allowed in DatetimeArray.__init__. "
  268. "Use 'pd.array()' instead."
  269. )
  270. raise ValueError(msg)
  271. if copy:
  272. values = values.copy()
  273. if freq:
  274. freq = to_offset(freq)
  275. if getattr(dtype, 'tz', None):
  276. # https://github.com/pandas-dev/pandas/issues/18595
  277. # Ensure that we have a standard timezone for pytz objects.
  278. # Without this, things like adding an array of timedeltas and
  279. # a tz-aware Timestamp (with a tz specific to its datetime) will
  280. # be incorrect(ish?) for the array as a whole
  281. dtype = DatetimeTZDtype(tz=timezones.tz_standardize(dtype.tz))
  282. self._data = values
  283. self._dtype = dtype
  284. self._freq = freq
  285. if inferred_freq is None and freq is not None:
  286. type(self)._validate_frequency(self, freq)
  287. @classmethod
  288. def _simple_new(cls, values, freq=None, dtype=_NS_DTYPE):
  289. assert isinstance(values, np.ndarray)
  290. if values.dtype == 'i8':
  291. values = values.view(_NS_DTYPE)
  292. result = object.__new__(cls)
  293. result._data = values
  294. result._freq = freq
  295. result._dtype = dtype
  296. return result
  297. @classmethod
  298. def _from_sequence(cls, data, dtype=None, copy=False,
  299. tz=None, freq=None,
  300. dayfirst=False, yearfirst=False, ambiguous='raise',
  301. int_as_wall_time=False):
  302. freq, freq_infer = dtl.maybe_infer_freq(freq)
  303. subarr, tz, inferred_freq = sequence_to_dt64ns(
  304. data, dtype=dtype, copy=copy, tz=tz,
  305. dayfirst=dayfirst, yearfirst=yearfirst,
  306. ambiguous=ambiguous, int_as_wall_time=int_as_wall_time)
  307. freq, freq_infer = dtl.validate_inferred_freq(freq, inferred_freq,
  308. freq_infer)
  309. dtype = tz_to_dtype(tz)
  310. result = cls._simple_new(subarr, freq=freq, dtype=dtype)
  311. if inferred_freq is None and freq is not None:
  312. # this condition precludes `freq_infer`
  313. cls._validate_frequency(result, freq, ambiguous=ambiguous)
  314. elif freq_infer:
  315. # Set _freq directly to bypass duplicative _validate_frequency
  316. # check.
  317. result._freq = to_offset(result.inferred_freq)
  318. return result
  319. @classmethod
  320. def _generate_range(cls, start, end, periods, freq, tz=None,
  321. normalize=False, ambiguous='raise',
  322. nonexistent='raise', closed=None):
  323. periods = dtl.validate_periods(periods)
  324. if freq is None and any(x is None for x in [periods, start, end]):
  325. raise ValueError('Must provide freq argument if no data is '
  326. 'supplied')
  327. if com.count_not_none(start, end, periods, freq) != 3:
  328. raise ValueError('Of the four parameters: start, end, periods, '
  329. 'and freq, exactly three must be specified')
  330. freq = to_offset(freq)
  331. if start is not None:
  332. start = Timestamp(start)
  333. if end is not None:
  334. end = Timestamp(end)
  335. if start is None and end is None:
  336. if closed is not None:
  337. raise ValueError("Closed has to be None if not both of start"
  338. "and end are defined")
  339. if start is NaT or end is NaT:
  340. raise ValueError("Neither `start` nor `end` can be NaT")
  341. left_closed, right_closed = dtl.validate_endpoints(closed)
  342. start, end, _normalized = _maybe_normalize_endpoints(start, end,
  343. normalize)
  344. tz = _infer_tz_from_endpoints(start, end, tz)
  345. if tz is not None:
  346. # Localize the start and end arguments
  347. start = _maybe_localize_point(
  348. start, getattr(start, 'tz', None), start, freq, tz
  349. )
  350. end = _maybe_localize_point(
  351. end, getattr(end, 'tz', None), end, freq, tz
  352. )
  353. if freq is not None:
  354. # We break Day arithmetic (fixed 24 hour) here and opt for
  355. # Day to mean calendar day (23/24/25 hour). Therefore, strip
  356. # tz info from start and day to avoid DST arithmetic
  357. if isinstance(freq, Day):
  358. if start is not None:
  359. start = start.tz_localize(None)
  360. if end is not None:
  361. end = end.tz_localize(None)
  362. # TODO: consider re-implementing _cached_range; GH#17914
  363. values, _tz = generate_regular_range(start, end, periods, freq)
  364. index = cls._simple_new(values, freq=freq, dtype=tz_to_dtype(_tz))
  365. if tz is not None and index.tz is None:
  366. arr = conversion.tz_localize_to_utc(
  367. index.asi8,
  368. tz, ambiguous=ambiguous, nonexistent=nonexistent)
  369. index = cls(arr)
  370. # index is localized datetime64 array -> have to convert
  371. # start/end as well to compare
  372. if start is not None:
  373. start = start.tz_localize(tz).asm8
  374. if end is not None:
  375. end = end.tz_localize(tz).asm8
  376. else:
  377. # Create a linearly spaced date_range in local time
  378. # Nanosecond-granularity timestamps aren't always correctly
  379. # representable with doubles, so we limit the range that we
  380. # pass to np.linspace as much as possible
  381. arr = np.linspace(
  382. 0, end.value - start.value,
  383. periods, dtype='int64') + start.value
  384. dtype = tz_to_dtype(tz)
  385. index = cls._simple_new(
  386. arr.astype('M8[ns]', copy=False), freq=None, dtype=dtype
  387. )
  388. if not left_closed and len(index) and index[0] == start:
  389. index = index[1:]
  390. if not right_closed and len(index) and index[-1] == end:
  391. index = index[:-1]
  392. dtype = tz_to_dtype(tz)
  393. return cls._simple_new(index.asi8, freq=freq, dtype=dtype)
  394. # -----------------------------------------------------------------
  395. # DatetimeLike Interface
  396. def _unbox_scalar(self, value):
  397. if not isinstance(value, self._scalar_type) and value is not NaT:
  398. raise ValueError("'value' should be a Timestamp.")
  399. if not isna(value):
  400. self._check_compatible_with(value)
  401. return value.value
  402. def _scalar_from_string(self, value):
  403. return Timestamp(value, tz=self.tz)
  404. def _check_compatible_with(self, other):
  405. if other is NaT:
  406. return
  407. if not timezones.tz_compare(self.tz, other.tz):
  408. raise ValueError("Timezones don't match. '{own} != {other}'"
  409. .format(own=self.tz, other=other.tz))
  410. def _maybe_clear_freq(self):
  411. self._freq = None
  412. # -----------------------------------------------------------------
  413. # Descriptive Properties
  414. @property
  415. def _box_func(self):
  416. return lambda x: Timestamp(x, freq=self.freq, tz=self.tz)
  417. @property
  418. def dtype(self):
  419. # type: () -> Union[np.dtype, DatetimeTZDtype]
  420. """
  421. The dtype for the DatetimeArray.
  422. .. warning::
  423. A future version of pandas will change dtype to never be a
  424. ``numpy.dtype``. Instead, :attr:`DatetimeArray.dtype` will
  425. always be an instance of an ``ExtensionDtype`` subclass.
  426. Returns
  427. -------
  428. numpy.dtype or DatetimeTZDtype
  429. If the values are tz-naive, then ``np.dtype('datetime64[ns]')``
  430. is returned.
  431. If the values are tz-aware, then the ``DatetimeTZDtype``
  432. is returned.
  433. """
  434. return self._dtype
  435. @property
  436. def tz(self):
  437. """
  438. Return timezone, if any.
  439. Returns
  440. -------
  441. datetime.tzinfo, pytz.tzinfo.BaseTZInfo, dateutil.tz.tz.tzfile, or None
  442. Returns None when the array is tz-naive.
  443. """
  444. # GH 18595
  445. return getattr(self.dtype, "tz", None)
  446. @tz.setter
  447. def tz(self, value):
  448. # GH 3746: Prevent localizing or converting the index by setting tz
  449. raise AttributeError("Cannot directly set timezone. Use tz_localize() "
  450. "or tz_convert() as appropriate")
  451. @property
  452. def tzinfo(self):
  453. """
  454. Alias for tz attribute
  455. """
  456. return self.tz
  457. @property # NB: override with cache_readonly in immutable subclasses
  458. def _timezone(self):
  459. """
  460. Comparable timezone both for pytz / dateutil
  461. """
  462. return timezones.get_timezone(self.tzinfo)
  463. @property # NB: override with cache_readonly in immutable subclasses
  464. def is_normalized(self):
  465. """
  466. Returns True if all of the dates are at midnight ("no time")
  467. """
  468. return conversion.is_date_array_normalized(self.asi8, self.tz)
  469. @property # NB: override with cache_readonly in immutable subclasses
  470. def _resolution(self):
  471. return libresolution.resolution(self.asi8, self.tz)
  472. # ----------------------------------------------------------------
  473. # Array-Like / EA-Interface Methods
  474. def __array__(self, dtype=None):
  475. if dtype is None and self.tz:
  476. # The default for tz-aware is object, to preserve tz info
  477. dtype = object
  478. return super(DatetimeArray, self).__array__(dtype=dtype)
  479. def __iter__(self):
  480. """
  481. Return an iterator over the boxed values
  482. Yields
  483. -------
  484. tstamp : Timestamp
  485. """
  486. # convert in chunks of 10k for efficiency
  487. data = self.asi8
  488. length = len(self)
  489. chunksize = 10000
  490. chunks = int(length / chunksize) + 1
  491. for i in range(chunks):
  492. start_i = i * chunksize
  493. end_i = min((i + 1) * chunksize, length)
  494. converted = tslib.ints_to_pydatetime(data[start_i:end_i],
  495. tz=self.tz, freq=self.freq,
  496. box="timestamp")
  497. for v in converted:
  498. yield v
  499. def astype(self, dtype, copy=True):
  500. # We handle
  501. # --> datetime
  502. # --> period
  503. # DatetimeLikeArrayMixin Super handles the rest.
  504. dtype = pandas_dtype(dtype)
  505. if (is_datetime64_ns_dtype(dtype) and
  506. not is_dtype_equal(dtype, self.dtype)):
  507. # GH#18951: datetime64_ns dtype but not equal means different tz
  508. new_tz = getattr(dtype, 'tz', None)
  509. if getattr(self.dtype, 'tz', None) is None:
  510. return self.tz_localize(new_tz)
  511. result = self.tz_convert(new_tz)
  512. if new_tz is None:
  513. # Do we want .astype('datetime64[ns]') to be an ndarray.
  514. # The astype in Block._astype expects this to return an
  515. # ndarray, but we could maybe work around it there.
  516. result = result._data
  517. return result
  518. elif is_datetime64tz_dtype(self.dtype) and is_dtype_equal(self.dtype,
  519. dtype):
  520. if copy:
  521. return self.copy()
  522. return self
  523. elif is_period_dtype(dtype):
  524. return self.to_period(freq=dtype.freq)
  525. return dtl.DatetimeLikeArrayMixin.astype(self, dtype, copy)
  526. # ----------------------------------------------------------------
  527. # ExtensionArray Interface
  528. @Appender(dtl.DatetimeLikeArrayMixin._validate_fill_value.__doc__)
  529. def _validate_fill_value(self, fill_value):
  530. if isna(fill_value):
  531. fill_value = iNaT
  532. elif isinstance(fill_value, (datetime, np.datetime64)):
  533. self._assert_tzawareness_compat(fill_value)
  534. fill_value = Timestamp(fill_value).value
  535. else:
  536. raise ValueError("'fill_value' should be a Timestamp. "
  537. "Got '{got}'.".format(got=fill_value))
  538. return fill_value
  539. # -----------------------------------------------------------------
  540. # Rendering Methods
  541. def _format_native_types(self, na_rep='NaT', date_format=None, **kwargs):
  542. from pandas.io.formats.format import _get_format_datetime64_from_values
  543. fmt = _get_format_datetime64_from_values(self, date_format)
  544. return tslib.format_array_from_datetime(self.asi8,
  545. tz=self.tz,
  546. format=fmt,
  547. na_rep=na_rep)
  548. # -----------------------------------------------------------------
  549. # Comparison Methods
  550. _create_comparison_method = classmethod(_dt_array_cmp)
  551. def _has_same_tz(self, other):
  552. zzone = self._timezone
  553. # vzone sholdn't be None if value is non-datetime like
  554. if isinstance(other, np.datetime64):
  555. # convert to Timestamp as np.datetime64 doesn't have tz attr
  556. other = Timestamp(other)
  557. vzone = timezones.get_timezone(getattr(other, 'tzinfo', '__no_tz__'))
  558. return zzone == vzone
  559. def _assert_tzawareness_compat(self, other):
  560. # adapted from _Timestamp._assert_tzawareness_compat
  561. other_tz = getattr(other, 'tzinfo', None)
  562. if is_datetime64tz_dtype(other):
  563. # Get tzinfo from Series dtype
  564. other_tz = other.dtype.tz
  565. if other is NaT:
  566. # pd.NaT quacks both aware and naive
  567. pass
  568. elif self.tz is None:
  569. if other_tz is not None:
  570. raise TypeError('Cannot compare tz-naive and tz-aware '
  571. 'datetime-like objects.')
  572. elif other_tz is None:
  573. raise TypeError('Cannot compare tz-naive and tz-aware '
  574. 'datetime-like objects')
  575. # -----------------------------------------------------------------
  576. # Arithmetic Methods
  577. def _sub_datetime_arraylike(self, other):
  578. """subtract DatetimeArray/Index or ndarray[datetime64]"""
  579. if len(self) != len(other):
  580. raise ValueError("cannot add indices of unequal length")
  581. if isinstance(other, np.ndarray):
  582. assert is_datetime64_dtype(other)
  583. other = type(self)(other)
  584. if not self._has_same_tz(other):
  585. # require tz compat
  586. raise TypeError("{cls} subtraction must have the same "
  587. "timezones or no timezones"
  588. .format(cls=type(self).__name__))
  589. self_i8 = self.asi8
  590. other_i8 = other.asi8
  591. arr_mask = self._isnan | other._isnan
  592. new_values = checked_add_with_arr(self_i8, -other_i8,
  593. arr_mask=arr_mask)
  594. if self._hasnans or other._hasnans:
  595. new_values[arr_mask] = iNaT
  596. return new_values.view('timedelta64[ns]')
  597. def _add_offset(self, offset):
  598. assert not isinstance(offset, Tick)
  599. try:
  600. if self.tz is not None:
  601. values = self.tz_localize(None)
  602. else:
  603. values = self
  604. result = offset.apply_index(values)
  605. if self.tz is not None:
  606. result = result.tz_localize(self.tz)
  607. except NotImplementedError:
  608. warnings.warn("Non-vectorized DateOffset being applied to Series "
  609. "or DatetimeIndex", PerformanceWarning)
  610. result = self.astype('O') + offset
  611. return type(self)._from_sequence(result, freq='infer')
  612. def _sub_datetimelike_scalar(self, other):
  613. # subtract a datetime from myself, yielding a ndarray[timedelta64[ns]]
  614. assert isinstance(other, (datetime, np.datetime64))
  615. assert other is not NaT
  616. other = Timestamp(other)
  617. if other is NaT:
  618. return self - NaT
  619. if not self._has_same_tz(other):
  620. # require tz compat
  621. raise TypeError("Timestamp subtraction must have the same "
  622. "timezones or no timezones")
  623. i8 = self.asi8
  624. result = checked_add_with_arr(i8, -other.value,
  625. arr_mask=self._isnan)
  626. result = self._maybe_mask_results(result)
  627. return result.view('timedelta64[ns]')
  628. def _add_delta(self, delta):
  629. """
  630. Add a timedelta-like, Tick, or TimedeltaIndex-like object
  631. to self, yielding a new DatetimeArray
  632. Parameters
  633. ----------
  634. other : {timedelta, np.timedelta64, Tick,
  635. TimedeltaIndex, ndarray[timedelta64]}
  636. Returns
  637. -------
  638. result : DatetimeArray
  639. """
  640. new_values = super(DatetimeArray, self)._add_delta(delta)
  641. return type(self)._from_sequence(new_values, tz=self.tz, freq='infer')
  642. # -----------------------------------------------------------------
  643. # Timezone Conversion and Localization Methods
  644. def _local_timestamps(self):
  645. """
  646. Convert to an i8 (unix-like nanosecond timestamp) representation
  647. while keeping the local timezone and not using UTC.
  648. This is used to calculate time-of-day information as if the timestamps
  649. were timezone-naive.
  650. """
  651. return conversion.tz_convert(self.asi8, utc, self.tz)
  652. def tz_convert(self, tz):
  653. """
  654. Convert tz-aware Datetime Array/Index from one time zone to another.
  655. Parameters
  656. ----------
  657. tz : string, pytz.timezone, dateutil.tz.tzfile or None
  658. Time zone for time. Corresponding timestamps would be converted
  659. to this time zone of the Datetime Array/Index. A `tz` of None will
  660. convert to UTC and remove the timezone information.
  661. Returns
  662. -------
  663. normalized : same type as self
  664. Raises
  665. ------
  666. TypeError
  667. If Datetime Array/Index is tz-naive.
  668. See Also
  669. --------
  670. DatetimeIndex.tz : A timezone that has a variable offset from UTC.
  671. DatetimeIndex.tz_localize : Localize tz-naive DatetimeIndex to a
  672. given time zone, or remove timezone from a tz-aware DatetimeIndex.
  673. Examples
  674. --------
  675. With the `tz` parameter, we can change the DatetimeIndex
  676. to other time zones:
  677. >>> dti = pd.date_range(start='2014-08-01 09:00',
  678. ... freq='H', periods=3, tz='Europe/Berlin')
  679. >>> dti
  680. DatetimeIndex(['2014-08-01 09:00:00+02:00',
  681. '2014-08-01 10:00:00+02:00',
  682. '2014-08-01 11:00:00+02:00'],
  683. dtype='datetime64[ns, Europe/Berlin]', freq='H')
  684. >>> dti.tz_convert('US/Central')
  685. DatetimeIndex(['2014-08-01 02:00:00-05:00',
  686. '2014-08-01 03:00:00-05:00',
  687. '2014-08-01 04:00:00-05:00'],
  688. dtype='datetime64[ns, US/Central]', freq='H')
  689. With the ``tz=None``, we can remove the timezone (after converting
  690. to UTC if necessary):
  691. >>> dti = pd.date_range(start='2014-08-01 09:00',freq='H',
  692. ... periods=3, tz='Europe/Berlin')
  693. >>> dti
  694. DatetimeIndex(['2014-08-01 09:00:00+02:00',
  695. '2014-08-01 10:00:00+02:00',
  696. '2014-08-01 11:00:00+02:00'],
  697. dtype='datetime64[ns, Europe/Berlin]', freq='H')
  698. >>> dti.tz_convert(None)
  699. DatetimeIndex(['2014-08-01 07:00:00',
  700. '2014-08-01 08:00:00',
  701. '2014-08-01 09:00:00'],
  702. dtype='datetime64[ns]', freq='H')
  703. """
  704. tz = timezones.maybe_get_tz(tz)
  705. if self.tz is None:
  706. # tz naive, use tz_localize
  707. raise TypeError('Cannot convert tz-naive timestamps, use '
  708. 'tz_localize to localize')
  709. # No conversion since timestamps are all UTC to begin with
  710. dtype = tz_to_dtype(tz)
  711. return self._simple_new(self.asi8, dtype=dtype, freq=self.freq)
  712. def tz_localize(self, tz, ambiguous='raise', nonexistent='raise',
  713. errors=None):
  714. """
  715. Localize tz-naive Datetime Array/Index to tz-aware
  716. Datetime Array/Index.
  717. This method takes a time zone (tz) naive Datetime Array/Index object
  718. and makes this time zone aware. It does not move the time to another
  719. time zone.
  720. Time zone localization helps to switch from time zone aware to time
  721. zone unaware objects.
  722. Parameters
  723. ----------
  724. tz : string, pytz.timezone, dateutil.tz.tzfile or None
  725. Time zone to convert timestamps to. Passing ``None`` will
  726. remove the time zone information preserving local time.
  727. ambiguous : 'infer', 'NaT', bool array, default 'raise'
  728. When clocks moved backward due to DST, ambiguous times may arise.
  729. For example in Central European Time (UTC+01), when going from
  730. 03:00 DST to 02:00 non-DST, 02:30:00 local time occurs both at
  731. 00:30:00 UTC and at 01:30:00 UTC. In such a situation, the
  732. `ambiguous` parameter dictates how ambiguous times should be
  733. handled.
  734. - 'infer' will attempt to infer fall dst-transition hours based on
  735. order
  736. - bool-ndarray where True signifies a DST time, False signifies a
  737. non-DST time (note that this flag is only applicable for
  738. ambiguous times)
  739. - 'NaT' will return NaT where there are ambiguous times
  740. - 'raise' will raise an AmbiguousTimeError if there are ambiguous
  741. times
  742. nonexistent : 'shift_forward', 'shift_backward, 'NaT', timedelta,
  743. default 'raise'
  744. A nonexistent time does not exist in a particular timezone
  745. where clocks moved forward due to DST.
  746. - 'shift_forward' will shift the nonexistent time forward to the
  747. closest existing time
  748. - 'shift_backward' will shift the nonexistent time backward to the
  749. closest existing time
  750. - 'NaT' will return NaT where there are nonexistent times
  751. - timedelta objects will shift nonexistent times by the timedelta
  752. - 'raise' will raise an NonExistentTimeError if there are
  753. nonexistent times
  754. .. versionadded:: 0.24.0
  755. errors : {'raise', 'coerce'}, default None
  756. - 'raise' will raise a NonExistentTimeError if a timestamp is not
  757. valid in the specified time zone (e.g. due to a transition from
  758. or to DST time). Use ``nonexistent='raise'`` instead.
  759. - 'coerce' will return NaT if the timestamp can not be converted
  760. to the specified time zone. Use ``nonexistent='NaT'`` instead.
  761. .. deprecated:: 0.24.0
  762. Returns
  763. -------
  764. result : same type as self
  765. Array/Index converted to the specified time zone.
  766. Raises
  767. ------
  768. TypeError
  769. If the Datetime Array/Index is tz-aware and tz is not None.
  770. See Also
  771. --------
  772. DatetimeIndex.tz_convert : Convert tz-aware DatetimeIndex from
  773. one time zone to another.
  774. Examples
  775. --------
  776. >>> tz_naive = pd.date_range('2018-03-01 09:00', periods=3)
  777. >>> tz_naive
  778. DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
  779. '2018-03-03 09:00:00'],
  780. dtype='datetime64[ns]', freq='D')
  781. Localize DatetimeIndex in US/Eastern time zone:
  782. >>> tz_aware = tz_naive.tz_localize(tz='US/Eastern')
  783. >>> tz_aware
  784. DatetimeIndex(['2018-03-01 09:00:00-05:00',
  785. '2018-03-02 09:00:00-05:00',
  786. '2018-03-03 09:00:00-05:00'],
  787. dtype='datetime64[ns, US/Eastern]', freq='D')
  788. With the ``tz=None``, we can remove the time zone information
  789. while keeping the local time (not converted to UTC):
  790. >>> tz_aware.tz_localize(None)
  791. DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
  792. '2018-03-03 09:00:00'],
  793. dtype='datetime64[ns]', freq='D')
  794. Be careful with DST changes. When there is sequential data, pandas can
  795. infer the DST time:
  796. >>> s = pd.to_datetime(pd.Series([
  797. ... '2018-10-28 01:30:00',
  798. ... '2018-10-28 02:00:00',
  799. ... '2018-10-28 02:30:00',
  800. ... '2018-10-28 02:00:00',
  801. ... '2018-10-28 02:30:00',
  802. ... '2018-10-28 03:00:00',
  803. ... '2018-10-28 03:30:00']))
  804. >>> s.dt.tz_localize('CET', ambiguous='infer')
  805. 2018-10-28 01:30:00+02:00 0
  806. 2018-10-28 02:00:00+02:00 1
  807. 2018-10-28 02:30:00+02:00 2
  808. 2018-10-28 02:00:00+01:00 3
  809. 2018-10-28 02:30:00+01:00 4
  810. 2018-10-28 03:00:00+01:00 5
  811. 2018-10-28 03:30:00+01:00 6
  812. dtype: int64
  813. In some cases, inferring the DST is impossible. In such cases, you can
  814. pass an ndarray to the ambiguous parameter to set the DST explicitly
  815. >>> s = pd.to_datetime(pd.Series([
  816. ... '2018-10-28 01:20:00',
  817. ... '2018-10-28 02:36:00',
  818. ... '2018-10-28 03:46:00']))
  819. >>> s.dt.tz_localize('CET', ambiguous=np.array([True, True, False]))
  820. 0 2018-10-28 01:20:00+02:00
  821. 1 2018-10-28 02:36:00+02:00
  822. 2 2018-10-28 03:46:00+01:00
  823. dtype: datetime64[ns, CET]
  824. If the DST transition causes nonexistent times, you can shift these
  825. dates forward or backwards with a timedelta object or `'shift_forward'`
  826. or `'shift_backwards'`.
  827. >>> s = pd.to_datetime(pd.Series([
  828. ... '2015-03-29 02:30:00',
  829. ... '2015-03-29 03:30:00']))
  830. >>> s.dt.tz_localize('Europe/Warsaw', nonexistent='shift_forward')
  831. 0 2015-03-29 03:00:00+02:00
  832. 1 2015-03-29 03:30:00+02:00
  833. dtype: datetime64[ns, 'Europe/Warsaw']
  834. >>> s.dt.tz_localize('Europe/Warsaw', nonexistent='shift_backward')
  835. 0 2015-03-29 01:59:59.999999999+01:00
  836. 1 2015-03-29 03:30:00+02:00
  837. dtype: datetime64[ns, 'Europe/Warsaw']
  838. >>> s.dt.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H'))
  839. 0 2015-03-29 03:30:00+02:00
  840. 1 2015-03-29 03:30:00+02:00
  841. dtype: datetime64[ns, 'Europe/Warsaw']
  842. """
  843. if errors is not None:
  844. warnings.warn("The errors argument is deprecated and will be "
  845. "removed in a future release. Use "
  846. "nonexistent='NaT' or nonexistent='raise' "
  847. "instead.", FutureWarning)
  848. if errors == 'coerce':
  849. nonexistent = 'NaT'
  850. elif errors == 'raise':
  851. nonexistent = 'raise'
  852. else:
  853. raise ValueError("The errors argument must be either 'coerce' "
  854. "or 'raise'.")
  855. nonexistent_options = ('raise', 'NaT', 'shift_forward',
  856. 'shift_backward')
  857. if nonexistent not in nonexistent_options and not isinstance(
  858. nonexistent, timedelta):
  859. raise ValueError("The nonexistent argument must be one of 'raise',"
  860. " 'NaT', 'shift_forward', 'shift_backward' or"
  861. " a timedelta object")
  862. if self.tz is not None:
  863. if tz is None:
  864. new_dates = conversion.tz_convert(self.asi8, timezones.UTC,
  865. self.tz)
  866. else:
  867. raise TypeError("Already tz-aware, use tz_convert to convert.")
  868. else:
  869. tz = timezones.maybe_get_tz(tz)
  870. # Convert to UTC
  871. new_dates = conversion.tz_localize_to_utc(
  872. self.asi8, tz, ambiguous=ambiguous, nonexistent=nonexistent,
  873. )
  874. new_dates = new_dates.view(_NS_DTYPE)
  875. dtype = tz_to_dtype(tz)
  876. return self._simple_new(new_dates, dtype=dtype, freq=self.freq)
  877. # ----------------------------------------------------------------
  878. # Conversion Methods - Vectorized analogues of Timestamp methods
  879. def to_pydatetime(self):
  880. """
  881. Return Datetime Array/Index as object ndarray of datetime.datetime
  882. objects
  883. Returns
  884. -------
  885. datetimes : ndarray
  886. """
  887. return tslib.ints_to_pydatetime(self.asi8, tz=self.tz)
  888. def normalize(self):
  889. """
  890. Convert times to midnight.
  891. The time component of the date-time is converted to midnight i.e.
  892. 00:00:00. This is useful in cases, when the time does not matter.
  893. Length is unaltered. The timezones are unaffected.
  894. This method is available on Series with datetime values under
  895. the ``.dt`` accessor, and directly on Datetime Array/Index.
  896. Returns
  897. -------
  898. DatetimeArray, DatetimeIndex or Series
  899. The same type as the original data. Series will have the same
  900. name and index. DatetimeIndex will have the same name.
  901. See Also
  902. --------
  903. floor : Floor the datetimes to the specified freq.
  904. ceil : Ceil the datetimes to the specified freq.
  905. round : Round the datetimes to the specified freq.
  906. Examples
  907. --------
  908. >>> idx = pd.date_range(start='2014-08-01 10:00', freq='H',
  909. ... periods=3, tz='Asia/Calcutta')
  910. >>> idx
  911. DatetimeIndex(['2014-08-01 10:00:00+05:30',
  912. '2014-08-01 11:00:00+05:30',
  913. '2014-08-01 12:00:00+05:30'],
  914. dtype='datetime64[ns, Asia/Calcutta]', freq='H')
  915. >>> idx.normalize()
  916. DatetimeIndex(['2014-08-01 00:00:00+05:30',
  917. '2014-08-01 00:00:00+05:30',
  918. '2014-08-01 00:00:00+05:30'],
  919. dtype='datetime64[ns, Asia/Calcutta]', freq=None)
  920. """
  921. if self.tz is None or timezones.is_utc(self.tz):
  922. not_null = ~self.isna()
  923. DAY_NS = ccalendar.DAY_SECONDS * 1000000000
  924. new_values = self.asi8.copy()
  925. adjustment = (new_values[not_null] % DAY_NS)
  926. new_values[not_null] = new_values[not_null] - adjustment
  927. else:
  928. new_values = conversion.normalize_i8_timestamps(self.asi8, self.tz)
  929. return type(self)._from_sequence(new_values,
  930. freq='infer').tz_localize(self.tz)
  931. def to_period(self, freq=None):
  932. """
  933. Cast to PeriodArray/Index at a particular frequency.
  934. Converts DatetimeArray/Index to PeriodArray/Index.
  935. Parameters
  936. ----------
  937. freq : string or Offset, optional
  938. One of pandas' :ref:`offset strings <timeseries.offset_aliases>`
  939. or an Offset object. Will be inferred by default.
  940. Returns
  941. -------
  942. PeriodArray/Index
  943. Raises
  944. ------
  945. ValueError
  946. When converting a DatetimeArray/Index with non-regular values,
  947. so that a frequency cannot be inferred.
  948. See Also
  949. --------
  950. PeriodIndex: Immutable ndarray holding ordinal values.
  951. DatetimeIndex.to_pydatetime: Return DatetimeIndex as object.
  952. Examples
  953. --------
  954. >>> df = pd.DataFrame({"y": [1,2,3]},
  955. ... index=pd.to_datetime(["2000-03-31 00:00:00",
  956. ... "2000-05-31 00:00:00",
  957. ... "2000-08-31 00:00:00"]))
  958. >>> df.index.to_period("M")
  959. PeriodIndex(['2000-03', '2000-05', '2000-08'],
  960. dtype='period[M]', freq='M')
  961. Infer the daily frequency
  962. >>> idx = pd.date_range("2017-01-01", periods=2)
  963. >>> idx.to_period()
  964. PeriodIndex(['2017-01-01', '2017-01-02'],
  965. dtype='period[D]', freq='D')
  966. """
  967. from pandas.core.arrays import PeriodArray
  968. if self.tz is not None:
  969. warnings.warn("Converting to PeriodArray/Index representation "
  970. "will drop timezone information.", UserWarning)
  971. if freq is None:
  972. freq = self.freqstr or self.inferred_freq
  973. if freq is None:
  974. raise ValueError("You must pass a freq argument as "
  975. "current index has none.")
  976. freq = get_period_alias(freq)
  977. return PeriodArray._from_datetime64(self._data, freq, tz=self.tz)
  978. def to_perioddelta(self, freq):
  979. """
  980. Calculate TimedeltaArray of difference between index
  981. values and index converted to PeriodArray at specified
  982. freq. Used for vectorized offsets
  983. Parameters
  984. ----------
  985. freq : Period frequency
  986. Returns
  987. -------
  988. TimedeltaArray/Index
  989. """
  990. # TODO: consider privatizing (discussion in GH#23113)
  991. from pandas.core.arrays.timedeltas import TimedeltaArray
  992. i8delta = self.asi8 - self.to_period(freq).to_timestamp().asi8
  993. m8delta = i8delta.view('m8[ns]')
  994. return TimedeltaArray(m8delta)
  995. # -----------------------------------------------------------------
  996. # Properties - Vectorized Timestamp Properties/Methods
  997. def month_name(self, locale=None):
  998. """
  999. Return the month names of the DateTimeIndex with specified locale.
  1000. .. versionadded:: 0.23.0
  1001. Parameters
  1002. ----------
  1003. locale : str, optional
  1004. Locale determining the language in which to return the month name.
  1005. Default is English locale.
  1006. Returns
  1007. -------
  1008. Index
  1009. Index of month names.
  1010. Examples
  1011. --------
  1012. >>> idx = pd.date_range(start='2018-01', freq='M', periods=3)
  1013. >>> idx
  1014. DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31'],
  1015. dtype='datetime64[ns]', freq='M')
  1016. >>> idx.month_name()
  1017. Index(['January', 'February', 'March'], dtype='object')
  1018. """
  1019. if self.tz is not None and not timezones.is_utc(self.tz):
  1020. values = self._local_timestamps()
  1021. else:
  1022. values = self.asi8
  1023. result = fields.get_date_name_field(values, 'month_name',
  1024. locale=locale)
  1025. result = self._maybe_mask_results(result, fill_value=None)
  1026. return result
  1027. def day_name(self, locale=None):
  1028. """
  1029. Return the day names of the DateTimeIndex with specified locale.
  1030. .. versionadded:: 0.23.0
  1031. Parameters
  1032. ----------
  1033. locale : str, optional
  1034. Locale determining the language in which to return the day name.
  1035. Default is English locale.
  1036. Returns
  1037. -------
  1038. Index
  1039. Index of day names.
  1040. Examples
  1041. --------
  1042. >>> idx = pd.date_range(start='2018-01-01', freq='D', periods=3)
  1043. >>> idx
  1044. DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03'],
  1045. dtype='datetime64[ns]', freq='D')
  1046. >>> idx.day_name()
  1047. Index(['Monday', 'Tuesday', 'Wednesday'], dtype='object')
  1048. """
  1049. if self.tz is not None and not timezones.is_utc(self.tz):
  1050. values = self._local_timestamps()
  1051. else:
  1052. values = self.asi8
  1053. result = fields.get_date_name_field(values, 'day_name',
  1054. locale=locale)
  1055. result = self._maybe_mask_results(result, fill_value=None)
  1056. return result
  1057. @property
  1058. def time(self):
  1059. """
  1060. Returns numpy array of datetime.time. The time part of the Timestamps.
  1061. """
  1062. # If the Timestamps have a timezone that is not UTC,
  1063. # convert them into their i8 representation while
  1064. # keeping their timezone and not using UTC
  1065. if self.tz is not None and not timezones.is_utc(self.tz):
  1066. timestamps = self._local_timestamps()
  1067. else:
  1068. timestamps = self.asi8
  1069. return tslib.ints_to_pydatetime(timestamps, box="time")
  1070. @property
  1071. def timetz(self):
  1072. """
  1073. Returns numpy array of datetime.time also containing timezone
  1074. information. The time part of the Timestamps.
  1075. """
  1076. return tslib.ints_to_pydatetime(self.asi8, self.tz, box="time")
  1077. @property
  1078. def date(self):
  1079. """
  1080. Returns numpy array of python datetime.date objects (namely, the date
  1081. part of Timestamps without timezone information).
  1082. """
  1083. # If the Timestamps have a timezone that is not UTC,
  1084. # convert them into their i8 representation while
  1085. # keeping their timezone and not using UTC
  1086. if self.tz is not None and not timezones.is_utc(self.tz):
  1087. timestamps = self._local_timestamps()
  1088. else:
  1089. timestamps = self.asi8
  1090. return tslib.ints_to_pydatetime(timestamps, box="date")
  1091. year = _field_accessor('year', 'Y', "The year of the datetime.")
  1092. month = _field_accessor('month', 'M',
  1093. "The month as January=1, December=12. ")
  1094. day = _field_accessor('day', 'D', "The days of the datetime.")
  1095. hour = _field_accessor('hour', 'h', "The hours of the datetime.")
  1096. minute = _field_accessor('minute', 'm', "The minutes of the datetime.")
  1097. second = _field_accessor('second', 's', "The seconds of the datetime.")
  1098. microsecond = _field_accessor('microsecond', 'us',
  1099. "The microseconds of the datetime.")
  1100. nanosecond = _field_accessor('nanosecond', 'ns',
  1101. "The nanoseconds of the datetime.")
  1102. weekofyear = _field_accessor('weekofyear', 'woy',
  1103. "The week ordinal of the year.")
  1104. week = weekofyear
  1105. _dayofweek_doc = """
  1106. The day of the week with Monday=0, Sunday=6.
  1107. Return the day of the week. It is assumed the week starts on
  1108. Monday, which is denoted by 0 and ends on Sunday which is denoted
  1109. by 6. This method is available on both Series with datetime
  1110. values (using the `dt` accessor) or DatetimeIndex.
  1111. Returns
  1112. -------
  1113. Series or Index
  1114. Containing integers indicating the day number.
  1115. See Also
  1116. --------
  1117. Series.dt.dayofweek : Alias.
  1118. Series.dt.weekday : Alias.
  1119. Series.dt.day_name : Returns the name of the day of the week.
  1120. Examples
  1121. --------
  1122. >>> s = pd.date_range('2016-12-31', '2017-01-08', freq='D').to_series()
  1123. >>> s.dt.dayofweek
  1124. 2016-12-31 5
  1125. 2017-01-01 6
  1126. 2017-01-02 0
  1127. 2017-01-03 1
  1128. 2017-01-04 2
  1129. 2017-01-05 3
  1130. 2017-01-06 4
  1131. 2017-01-07 5
  1132. 2017-01-08 6
  1133. Freq: D, dtype: int64
  1134. """
  1135. dayofweek = _field_accessor('dayofweek', 'dow', _dayofweek_doc)
  1136. weekday = dayofweek
  1137. weekday_name = _field_accessor(
  1138. 'weekday_name',
  1139. 'weekday_name',
  1140. "The name of day in a week (ex: Friday)\n\n.. deprecated:: 0.23.0")
  1141. dayofyear = _field_accessor('dayofyear', 'doy',
  1142. "The ordinal day of the year.")
  1143. quarter = _field_accessor('quarter', 'q', "The quarter of the date.")
  1144. days_in_month = _field_accessor(
  1145. 'days_in_month',
  1146. 'dim',
  1147. "The number of days in the month.")
  1148. daysinmonth = days_in_month
  1149. _is_month_doc = """
  1150. Indicates whether the date is the {first_or_last} day of the month.
  1151. Returns
  1152. -------
  1153. Series or array
  1154. For Series, returns a Series with boolean values.
  1155. For DatetimeIndex, returns a boolean array.
  1156. See Also
  1157. --------
  1158. is_month_start : Return a boolean indicating whether the date
  1159. is the first day of the month.
  1160. is_month_end : Return a boolean indicating whether the date
  1161. is the last day of the month.
  1162. Examples
  1163. --------
  1164. This method is available on Series with datetime values under
  1165. the ``.dt`` accessor, and directly on DatetimeIndex.
  1166. >>> s = pd.Series(pd.date_range("2018-02-27", periods=3))
  1167. >>> s
  1168. 0 2018-02-27
  1169. 1 2018-02-28
  1170. 2 2018-03-01
  1171. dtype: datetime64[ns]
  1172. >>> s.dt.is_month_start
  1173. 0 False
  1174. 1 False
  1175. 2 True
  1176. dtype: bool
  1177. >>> s.dt.is_month_end
  1178. 0 False
  1179. 1 True
  1180. 2 False
  1181. dtype: bool
  1182. >>> idx = pd.date_range("2018-02-27", periods=3)
  1183. >>> idx.is_month_start
  1184. array([False, False, True])
  1185. >>> idx.is_month_end
  1186. array([False, True, False])
  1187. """
  1188. is_month_start = _field_accessor(
  1189. 'is_month_start',
  1190. 'is_month_start',
  1191. _is_month_doc.format(first_or_last='first'))
  1192. is_month_end = _field_accessor(
  1193. 'is_month_end',
  1194. 'is_month_end',
  1195. _is_month_doc.format(first_or_last='last'))
  1196. is_quarter_start = _field_accessor(
  1197. 'is_quarter_start',
  1198. 'is_quarter_start',
  1199. """
  1200. Indicator for whether the date is the first day of a quarter.
  1201. Returns
  1202. -------
  1203. is_quarter_start : Series or DatetimeIndex
  1204. The same type as the original data with boolean values. Series will
  1205. have the same name and index. DatetimeIndex will have the same
  1206. name.
  1207. See Also
  1208. --------
  1209. quarter : Return the quarter of the date.
  1210. is_quarter_end : Similar property for indicating the quarter start.
  1211. Examples
  1212. --------
  1213. This method is available on Series with datetime values under
  1214. the ``.dt`` accessor, and directly on DatetimeIndex.
  1215. >>> df = pd.DataFrame({'dates': pd.date_range("2017-03-30",
  1216. ... periods=4)})
  1217. >>> df.assign(quarter=df.dates.dt.quarter,
  1218. ... is_quarter_start=df.dates.dt.is_quarter_start)
  1219. dates quarter is_quarter_start
  1220. 0 2017-03-30 1 False
  1221. 1 2017-03-31 1 False
  1222. 2 2017-04-01 2 True
  1223. 3 2017-04-02 2 False
  1224. >>> idx = pd.date_range('2017-03-30', periods=4)
  1225. >>> idx
  1226. DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
  1227. dtype='datetime64[ns]', freq='D')
  1228. >>> idx.is_quarter_start
  1229. array([False, False, True, False])
  1230. """)
  1231. is_quarter_end = _field_accessor(
  1232. 'is_quarter_end',
  1233. 'is_quarter_end',
  1234. """
  1235. Indicator for whether the date is the last day of a quarter.
  1236. Returns
  1237. -------
  1238. is_quarter_end : Series or DatetimeIndex
  1239. The same type as the original data with boolean values. Series will
  1240. have the same name and index. DatetimeIndex will have the same
  1241. name.
  1242. See Also
  1243. --------
  1244. quarter : Return the quarter of the date.
  1245. is_quarter_start : Similar property indicating the quarter start.
  1246. Examples
  1247. --------
  1248. This method is available on Series with datetime values under
  1249. the ``.dt`` accessor, and directly on DatetimeIndex.
  1250. >>> df = pd.DataFrame({'dates': pd.date_range("2017-03-30",
  1251. ... periods=4)})
  1252. >>> df.assign(quarter=df.dates.dt.quarter,
  1253. ... is_quarter_end=df.dates.dt.is_quarter_end)
  1254. dates quarter is_quarter_end
  1255. 0 2017-03-30 1 False
  1256. 1 2017-03-31 1 True
  1257. 2 2017-04-01 2 False
  1258. 3 2017-04-02 2 False
  1259. >>> idx = pd.date_range('2017-03-30', periods=4)
  1260. >>> idx
  1261. DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
  1262. dtype='datetime64[ns]', freq='D')
  1263. >>> idx.is_quarter_end
  1264. array([False, True, False, False])
  1265. """)
  1266. is_year_start = _field_accessor(
  1267. 'is_year_start',
  1268. 'is_year_start',
  1269. """
  1270. Indicate whether the date is the first day of a year.
  1271. Returns
  1272. -------
  1273. Series or DatetimeIndex
  1274. The same type as the original data with boolean values. Series will
  1275. have the same name and index. DatetimeIndex will have the same
  1276. name.
  1277. See Also
  1278. --------
  1279. is_year_end : Similar property indicating the last day of the year.
  1280. Examples
  1281. --------
  1282. This method is available on Series with datetime values under
  1283. the ``.dt`` accessor, and directly on DatetimeIndex.
  1284. >>> dates = pd.Series(pd.date_range("2017-12-30", periods=3))
  1285. >>> dates
  1286. 0 2017-12-30
  1287. 1 2017-12-31
  1288. 2 2018-01-01
  1289. dtype: datetime64[ns]
  1290. >>> dates.dt.is_year_start
  1291. 0 False
  1292. 1 False
  1293. 2 True
  1294. dtype: bool
  1295. >>> idx = pd.date_range("2017-12-30", periods=3)
  1296. >>> idx
  1297. DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
  1298. dtype='datetime64[ns]', freq='D')
  1299. >>> idx.is_year_start
  1300. array([False, False, True])
  1301. """)
  1302. is_year_end = _field_accessor(
  1303. 'is_year_end',
  1304. 'is_year_end',
  1305. """
  1306. Indicate whether the date is the last day of the year.
  1307. Returns
  1308. -------
  1309. Series or DatetimeIndex
  1310. The same type as the original data with boolean values. Series will
  1311. have the same name and index. DatetimeIndex will have the same
  1312. name.
  1313. See Also
  1314. --------
  1315. is_year_start : Similar property indicating the start of the year.
  1316. Examples
  1317. --------
  1318. This method is available on Series with datetime values under
  1319. the ``.dt`` accessor, and directly on DatetimeIndex.
  1320. >>> dates = pd.Series(pd.date_range("2017-12-30", periods=3))
  1321. >>> dates
  1322. 0 2017-12-30
  1323. 1 2017-12-31
  1324. 2 2018-01-01
  1325. dtype: datetime64[ns]
  1326. >>> dates.dt.is_year_end
  1327. 0 False
  1328. 1 True
  1329. 2 False
  1330. dtype: bool
  1331. >>> idx = pd.date_range("2017-12-30", periods=3)
  1332. >>> idx
  1333. DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
  1334. dtype='datetime64[ns]', freq='D')
  1335. >>> idx.is_year_end
  1336. array([False, True, False])
  1337. """)
  1338. is_leap_year = _field_accessor(
  1339. 'is_leap_year',
  1340. 'is_leap_year',
  1341. """
  1342. Boolean indicator if the date belongs to a leap year.
  1343. A leap year is a year, which has 366 days (instead of 365) including
  1344. 29th of February as an intercalary day.
  1345. Leap years are years which are multiples of four with the exception
  1346. of years divisible by 100 but not by 400.
  1347. Returns
  1348. -------
  1349. Series or ndarray
  1350. Booleans indicating if dates belong to a leap year.
  1351. Examples
  1352. --------
  1353. This method is available on Series with datetime values under
  1354. the ``.dt`` accessor, and directly on DatetimeIndex.
  1355. >>> idx = pd.date_range("2012-01-01", "2015-01-01", freq="Y")
  1356. >>> idx
  1357. DatetimeIndex(['2012-12-31', '2013-12-31', '2014-12-31'],
  1358. dtype='datetime64[ns]', freq='A-DEC')
  1359. >>> idx.is_leap_year
  1360. array([ True, False, False], dtype=bool)
  1361. >>> dates = pd.Series(idx)
  1362. >>> dates_series
  1363. 0 2012-12-31
  1364. 1 2013-12-31
  1365. 2 2014-12-31
  1366. dtype: datetime64[ns]
  1367. >>> dates_series.dt.is_leap_year
  1368. 0 True
  1369. 1 False
  1370. 2 False
  1371. dtype: bool
  1372. """)
  1373. def to_julian_date(self):
  1374. """
  1375. Convert Datetime Array to float64 ndarray of Julian Dates.
  1376. 0 Julian date is noon January 1, 4713 BC.
  1377. http://en.wikipedia.org/wiki/Julian_day
  1378. """
  1379. # http://mysite.verizon.net/aesir_research/date/jdalg2.htm
  1380. year = np.asarray(self.year)
  1381. month = np.asarray(self.month)
  1382. day = np.asarray(self.day)
  1383. testarr = month < 3
  1384. year[testarr] -= 1
  1385. month[testarr] += 12
  1386. return (day +
  1387. np.fix((153 * month - 457) / 5) +
  1388. 365 * year +
  1389. np.floor(year / 4) -
  1390. np.floor(year / 100) +
  1391. np.floor(year / 400) +
  1392. 1721118.5 +
  1393. (self.hour +
  1394. self.minute / 60.0 +
  1395. self.second / 3600.0 +
  1396. self.microsecond / 3600.0 / 1e+6 +
  1397. self.nanosecond / 3600.0 / 1e+9
  1398. ) / 24.0)
  1399. DatetimeArray._add_comparison_ops()
  1400. # -------------------------------------------------------------------
  1401. # Constructor Helpers
  1402. def sequence_to_dt64ns(data, dtype=None, copy=False,
  1403. tz=None,
  1404. dayfirst=False, yearfirst=False, ambiguous='raise',
  1405. int_as_wall_time=False):
  1406. """
  1407. Parameters
  1408. ----------
  1409. data : list-like
  1410. dtype : dtype, str, or None, default None
  1411. copy : bool, default False
  1412. tz : tzinfo, str, or None, default None
  1413. dayfirst : bool, default False
  1414. yearfirst : bool, default False
  1415. ambiguous : str, bool, or arraylike, default 'raise'
  1416. See pandas._libs.tslibs.conversion.tz_localize_to_utc
  1417. int_as_wall_time : bool, default False
  1418. Whether to treat ints as wall time in specified timezone, or as
  1419. nanosecond-precision UNIX epoch (wall time in UTC).
  1420. This is used in DatetimeIndex.__init__ to deprecate the wall-time
  1421. behaviour.
  1422. ..versionadded:: 0.24.0
  1423. Returns
  1424. -------
  1425. result : numpy.ndarray
  1426. The sequence converted to a numpy array with dtype ``datetime64[ns]``.
  1427. tz : tzinfo or None
  1428. Either the user-provided tzinfo or one inferred from the data.
  1429. inferred_freq : Tick or None
  1430. The inferred frequency of the sequence.
  1431. Raises
  1432. ------
  1433. TypeError : PeriodDType data is passed
  1434. """
  1435. inferred_freq = None
  1436. dtype = _validate_dt64_dtype(dtype)
  1437. if not hasattr(data, "dtype"):
  1438. # e.g. list, tuple
  1439. if np.ndim(data) == 0:
  1440. # i.e. generator
  1441. data = list(data)
  1442. data = np.asarray(data)
  1443. copy = False
  1444. elif isinstance(data, ABCSeries):
  1445. data = data._values
  1446. if isinstance(data, ABCPandasArray):
  1447. data = data.to_numpy()
  1448. if hasattr(data, "freq"):
  1449. # i.e. DatetimeArray/Index
  1450. inferred_freq = data.freq
  1451. # if dtype has an embedded tz, capture it
  1452. tz = validate_tz_from_dtype(dtype, tz)
  1453. if isinstance(data, ABCIndexClass):
  1454. data = data._data
  1455. # By this point we are assured to have either a numpy array or Index
  1456. data, copy = maybe_convert_dtype(data, copy)
  1457. if is_object_dtype(data) or is_string_dtype(data):
  1458. # TODO: We do not have tests specific to string-dtypes,
  1459. # also complex or categorical or other extension
  1460. copy = False
  1461. if lib.infer_dtype(data, skipna=False) == 'integer':
  1462. data = data.astype(np.int64)
  1463. else:
  1464. # data comes back here as either i8 to denote UTC timestamps
  1465. # or M8[ns] to denote wall times
  1466. data, inferred_tz = objects_to_datetime64ns(
  1467. data, dayfirst=dayfirst, yearfirst=yearfirst)
  1468. tz = maybe_infer_tz(tz, inferred_tz)
  1469. # When a sequence of timestamp objects is passed, we always
  1470. # want to treat the (now i8-valued) data as UTC timestamps,
  1471. # not wall times.
  1472. int_as_wall_time = False
  1473. # `data` may have originally been a Categorical[datetime64[ns, tz]],
  1474. # so we need to handle these types.
  1475. if is_datetime64tz_dtype(data):
  1476. # DatetimeArray -> ndarray
  1477. tz = maybe_infer_tz(tz, data.tz)
  1478. result = data._data
  1479. elif is_datetime64_dtype(data):
  1480. # tz-naive DatetimeArray or ndarray[datetime64]
  1481. data = getattr(data, "_data", data)
  1482. if data.dtype != _NS_DTYPE:
  1483. data = conversion.ensure_datetime64ns(data)
  1484. if tz is not None:
  1485. # Convert tz-naive to UTC
  1486. tz = timezones.maybe_get_tz(tz)
  1487. data = conversion.tz_localize_to_utc(data.view('i8'), tz,
  1488. ambiguous=ambiguous)
  1489. data = data.view(_NS_DTYPE)
  1490. assert data.dtype == _NS_DTYPE, data.dtype
  1491. result = data
  1492. else:
  1493. # must be integer dtype otherwise
  1494. # assume this data are epoch timestamps
  1495. if tz:
  1496. tz = timezones.maybe_get_tz(tz)
  1497. if data.dtype != _INT64_DTYPE:
  1498. data = data.astype(np.int64, copy=False)
  1499. if int_as_wall_time and tz is not None and not timezones.is_utc(tz):
  1500. warnings.warn(_i8_message, FutureWarning, stacklevel=4)
  1501. data = conversion.tz_localize_to_utc(data.view('i8'), tz,
  1502. ambiguous=ambiguous)
  1503. data = data.view(_NS_DTYPE)
  1504. result = data.view(_NS_DTYPE)
  1505. if copy:
  1506. # TODO: should this be deepcopy?
  1507. result = result.copy()
  1508. assert isinstance(result, np.ndarray), type(result)
  1509. assert result.dtype == 'M8[ns]', result.dtype
  1510. # We have to call this again after possibly inferring a tz above
  1511. validate_tz_from_dtype(dtype, tz)
  1512. return result, tz, inferred_freq
  1513. def objects_to_datetime64ns(data, dayfirst, yearfirst,
  1514. utc=False, errors="raise",
  1515. require_iso8601=False, allow_object=False):
  1516. """
  1517. Convert data to array of timestamps.
  1518. Parameters
  1519. ----------
  1520. data : np.ndarray[object]
  1521. dayfirst : bool
  1522. yearfirst : bool
  1523. utc : bool, default False
  1524. Whether to convert timezone-aware timestamps to UTC
  1525. errors : {'raise', 'ignore', 'coerce'}
  1526. allow_object : bool
  1527. Whether to return an object-dtype ndarray instead of raising if the
  1528. data contains more than one timezone.
  1529. Returns
  1530. -------
  1531. result : ndarray
  1532. np.int64 dtype if returned values represent UTC timestamps
  1533. np.datetime64[ns] if returned values represent wall times
  1534. object if mixed timezones
  1535. inferred_tz : tzinfo or None
  1536. Raises
  1537. ------
  1538. ValueError : if data cannot be converted to datetimes
  1539. """
  1540. assert errors in ["raise", "ignore", "coerce"]
  1541. # if str-dtype, convert
  1542. data = np.array(data, copy=False, dtype=np.object_)
  1543. try:
  1544. result, tz_parsed = tslib.array_to_datetime(
  1545. data,
  1546. errors=errors,
  1547. utc=utc,
  1548. dayfirst=dayfirst,
  1549. yearfirst=yearfirst,
  1550. require_iso8601=require_iso8601
  1551. )
  1552. except ValueError as e:
  1553. try:
  1554. values, tz_parsed = conversion.datetime_to_datetime64(data)
  1555. # If tzaware, these values represent unix timestamps, so we
  1556. # return them as i8 to distinguish from wall times
  1557. return values.view('i8'), tz_parsed
  1558. except (ValueError, TypeError):
  1559. raise e
  1560. if tz_parsed is not None:
  1561. # We can take a shortcut since the datetime64 numpy array
  1562. # is in UTC
  1563. # Return i8 values to denote unix timestamps
  1564. return result.view('i8'), tz_parsed
  1565. elif is_datetime64_dtype(result):
  1566. # returning M8[ns] denotes wall-times; since tz is None
  1567. # the distinction is a thin one
  1568. return result, tz_parsed
  1569. elif is_object_dtype(result):
  1570. # GH#23675 when called via `pd.to_datetime`, returning an object-dtype
  1571. # array is allowed. When called via `pd.DatetimeIndex`, we can
  1572. # only accept datetime64 dtype, so raise TypeError if object-dtype
  1573. # is returned, as that indicates the values can be recognized as
  1574. # datetimes but they have conflicting timezones/awareness
  1575. if allow_object:
  1576. return result, tz_parsed
  1577. raise TypeError(result)
  1578. else: # pragma: no cover
  1579. # GH#23675 this TypeError should never be hit, whereas the TypeError
  1580. # in the object-dtype branch above is reachable.
  1581. raise TypeError(result)
  1582. def maybe_convert_dtype(data, copy):
  1583. """
  1584. Convert data based on dtype conventions, issuing deprecation warnings
  1585. or errors where appropriate.
  1586. Parameters
  1587. ----------
  1588. data : np.ndarray or pd.Index
  1589. copy : bool
  1590. Returns
  1591. -------
  1592. data : np.ndarray or pd.Index
  1593. copy : bool
  1594. Raises
  1595. ------
  1596. TypeError : PeriodDType data is passed
  1597. """
  1598. if is_float_dtype(data):
  1599. # Note: we must cast to datetime64[ns] here in order to treat these
  1600. # as wall-times instead of UTC timestamps.
  1601. data = data.astype(_NS_DTYPE)
  1602. copy = False
  1603. # TODO: deprecate this behavior to instead treat symmetrically
  1604. # with integer dtypes. See discussion in GH#23675
  1605. elif is_timedelta64_dtype(data):
  1606. warnings.warn("Passing timedelta64-dtype data is deprecated, will "
  1607. "raise a TypeError in a future version",
  1608. FutureWarning, stacklevel=5)
  1609. data = data.view(_NS_DTYPE)
  1610. elif is_period_dtype(data):
  1611. # Note: without explicitly raising here, PeriodIndex
  1612. # test_setops.test_join_does_not_recur fails
  1613. raise TypeError("Passing PeriodDtype data is invalid. "
  1614. "Use `data.to_timestamp()` instead")
  1615. elif is_categorical_dtype(data):
  1616. # GH#18664 preserve tz in going DTI->Categorical->DTI
  1617. # TODO: cases where we need to do another pass through this func,
  1618. # e.g. the categories are timedelta64s
  1619. data = data.categories.take(data.codes, fill_value=NaT)._values
  1620. copy = False
  1621. elif is_extension_type(data) and not is_datetime64tz_dtype(data):
  1622. # Includes categorical
  1623. # TODO: We have no tests for these
  1624. data = np.array(data, dtype=np.object_)
  1625. copy = False
  1626. return data, copy
  1627. # -------------------------------------------------------------------
  1628. # Validation and Inference
  1629. def maybe_infer_tz(tz, inferred_tz):
  1630. """
  1631. If a timezone is inferred from data, check that it is compatible with
  1632. the user-provided timezone, if any.
  1633. Parameters
  1634. ----------
  1635. tz : tzinfo or None
  1636. inferred_tz : tzinfo or None
  1637. Returns
  1638. -------
  1639. tz : tzinfo or None
  1640. Raises
  1641. ------
  1642. TypeError : if both timezones are present but do not match
  1643. """
  1644. if tz is None:
  1645. tz = inferred_tz
  1646. elif inferred_tz is None:
  1647. pass
  1648. elif not timezones.tz_compare(tz, inferred_tz):
  1649. raise TypeError('data is already tz-aware {inferred_tz}, unable to '
  1650. 'set specified tz: {tz}'
  1651. .format(inferred_tz=inferred_tz, tz=tz))
  1652. return tz
  1653. def _validate_dt64_dtype(dtype):
  1654. """
  1655. Check that a dtype, if passed, represents either a numpy datetime64[ns]
  1656. dtype or a pandas DatetimeTZDtype.
  1657. Parameters
  1658. ----------
  1659. dtype : object
  1660. Returns
  1661. -------
  1662. dtype : None, numpy.dtype, or DatetimeTZDtype
  1663. Raises
  1664. ------
  1665. ValueError : invalid dtype
  1666. Notes
  1667. -----
  1668. Unlike validate_tz_from_dtype, this does _not_ allow non-existent
  1669. tz errors to go through
  1670. """
  1671. if dtype is not None:
  1672. dtype = pandas_dtype(dtype)
  1673. if is_dtype_equal(dtype, np.dtype("M8")):
  1674. # no precision, warn
  1675. dtype = _NS_DTYPE
  1676. msg = textwrap.dedent("""\
  1677. Passing in 'datetime64' dtype with no precision is deprecated
  1678. and will raise in a future version. Please pass in
  1679. 'datetime64[ns]' instead.""")
  1680. warnings.warn(msg, FutureWarning, stacklevel=5)
  1681. if ((isinstance(dtype, np.dtype) and dtype != _NS_DTYPE)
  1682. or not isinstance(dtype, (np.dtype, DatetimeTZDtype))):
  1683. raise ValueError("Unexpected value for 'dtype': '{dtype}'. "
  1684. "Must be 'datetime64[ns]' or DatetimeTZDtype'."
  1685. .format(dtype=dtype))
  1686. return dtype
  1687. def validate_tz_from_dtype(dtype, tz):
  1688. """
  1689. If the given dtype is a DatetimeTZDtype, extract the implied
  1690. tzinfo object from it and check that it does not conflict with the given
  1691. tz.
  1692. Parameters
  1693. ----------
  1694. dtype : dtype, str
  1695. tz : None, tzinfo
  1696. Returns
  1697. -------
  1698. tz : consensus tzinfo
  1699. Raises
  1700. ------
  1701. ValueError : on tzinfo mismatch
  1702. """
  1703. if dtype is not None:
  1704. if isinstance(dtype, compat.string_types):
  1705. try:
  1706. dtype = DatetimeTZDtype.construct_from_string(dtype)
  1707. except TypeError:
  1708. # Things like `datetime64[ns]`, which is OK for the
  1709. # constructors, but also nonsense, which should be validated
  1710. # but not by us. We *do* allow non-existent tz errors to
  1711. # go through
  1712. pass
  1713. dtz = getattr(dtype, 'tz', None)
  1714. if dtz is not None:
  1715. if tz is not None and not timezones.tz_compare(tz, dtz):
  1716. raise ValueError("cannot supply both a tz and a dtype"
  1717. " with a tz")
  1718. tz = dtz
  1719. if tz is not None and is_datetime64_dtype(dtype):
  1720. # We also need to check for the case where the user passed a
  1721. # tz-naive dtype (i.e. datetime64[ns])
  1722. if tz is not None and not timezones.tz_compare(tz, dtz):
  1723. raise ValueError("cannot supply both a tz and a "
  1724. "timezone-naive dtype (i.e. datetime64[ns]")
  1725. return tz
  1726. def _infer_tz_from_endpoints(start, end, tz):
  1727. """
  1728. If a timezone is not explicitly given via `tz`, see if one can
  1729. be inferred from the `start` and `end` endpoints. If more than one
  1730. of these inputs provides a timezone, require that they all agree.
  1731. Parameters
  1732. ----------
  1733. start : Timestamp
  1734. end : Timestamp
  1735. tz : tzinfo or None
  1736. Returns
  1737. -------
  1738. tz : tzinfo or None
  1739. Raises
  1740. ------
  1741. TypeError : if start and end timezones do not agree
  1742. """
  1743. try:
  1744. inferred_tz = timezones.infer_tzinfo(start, end)
  1745. except Exception:
  1746. raise TypeError('Start and end cannot both be tz-aware with '
  1747. 'different timezones')
  1748. inferred_tz = timezones.maybe_get_tz(inferred_tz)
  1749. tz = timezones.maybe_get_tz(tz)
  1750. if tz is not None and inferred_tz is not None:
  1751. if not timezones.tz_compare(inferred_tz, tz):
  1752. raise AssertionError("Inferred time zone not equal to passed "
  1753. "time zone")
  1754. elif inferred_tz is not None:
  1755. tz = inferred_tz
  1756. return tz
  1757. def _maybe_normalize_endpoints(start, end, normalize):
  1758. _normalized = True
  1759. if start is not None:
  1760. if normalize:
  1761. start = normalize_date(start)
  1762. _normalized = True
  1763. else:
  1764. _normalized = _normalized and start.time() == _midnight
  1765. if end is not None:
  1766. if normalize:
  1767. end = normalize_date(end)
  1768. _normalized = True
  1769. else:
  1770. _normalized = _normalized and end.time() == _midnight
  1771. return start, end, _normalized
  1772. def _maybe_localize_point(ts, is_none, is_not_none, freq, tz):
  1773. """
  1774. Localize a start or end Timestamp to the timezone of the corresponding
  1775. start or end Timestamp
  1776. Parameters
  1777. ----------
  1778. ts : start or end Timestamp to potentially localize
  1779. is_none : argument that should be None
  1780. is_not_none : argument that should not be None
  1781. freq : Tick, DateOffset, or None
  1782. tz : str, timezone object or None
  1783. Returns
  1784. -------
  1785. ts : Timestamp
  1786. """
  1787. # Make sure start and end are timezone localized if:
  1788. # 1) freq = a Timedelta-like frequency (Tick)
  1789. # 2) freq = None i.e. generating a linspaced range
  1790. if isinstance(freq, Tick) or freq is None:
  1791. localize_args = {'tz': tz, 'ambiguous': False}
  1792. else:
  1793. localize_args = {'tz': None}
  1794. if is_none is None and is_not_none is not None:
  1795. ts = ts.tz_localize(**localize_args)
  1796. return ts