diff --git a/doc/source/whatsnew/v3.0.0.rst b/doc/source/whatsnew/v3.0.0.rst index 5db05142aba98..941ea44cbd279 100644 --- a/doc/source/whatsnew/v3.0.0.rst +++ b/doc/source/whatsnew/v3.0.0.rst @@ -384,6 +384,8 @@ In cases with mixed-resolution inputs, the highest resolution is used: .. warning:: Many users will now get "M8[us]" dtype data in cases when they used to get "M8[ns]". For most use cases they should not notice a difference. One big exception is converting to integers, which will give integers 1000x smaller. +Similarly, the :class:`Timedelta` constructor and :func:`to_timedelta` with a string input now defaults to a microsecond unit, using nanosecond unit only in cases that actually have nanosecond precision. + .. _whatsnew_300.api_breaking.concat_datetime_sorting: :func:`concat` no longer ignores ``sort`` when all objects have a :class:`DatetimeIndex` diff --git a/pandas/_libs/tslibs/timedeltas.pyx b/pandas/_libs/tslibs/timedeltas.pyx index 8515f3de8ca4e..06334cd6f3081 100644 --- a/pandas/_libs/tslibs/timedeltas.pyx +++ b/pandas/_libs/tslibs/timedeltas.pyx @@ -1,4 +1,5 @@ import collections +import re import warnings from pandas.util._decorators import set_module @@ -448,11 +449,19 @@ def array_to_timedelta64( ival = parse_iso_format_string(item) else: ival = parse_timedelta_string(item) + if ( + (infer_reso or creso == NPY_DATETIMEUNIT.NPY_FR_us) + and not needs_nano_unit(ival, item) + ): + item_reso = NPY_DATETIMEUNIT.NPY_FR_us + ival = ival // 1000 + else: + item_reso = NPY_FR_ns - item_reso = NPY_FR_ns - state.update_creso(item_reso) - if infer_reso: - creso = state.creso + if ival != NPY_NAT: + state.update_creso(item_reso) + if infer_reso: + creso = state.creso elif is_tick_object(item): item_reso = get_supported_reso(item._creso) @@ -722,6 +731,24 @@ cdef timedelta_from_spec(object number, object frac, object unit): return cast_from_unit(float(n), unit) +cdef bint needs_nano_unit(int64_t ival, str item): + """ + Check if a passed string `item` needs to be stored with nano unit or can + use microsecond instead. Needs nanoseconds if: + + - if the parsed value in nanoseconds has sub-microseconds content -> certainly + needs nano + - if the seconds part in the string contains more than 6 decimals, i.e. has + trailing zeros beyond the microsecond part (e.g. "0.123456000 s") -> treat + as nano for consistency + - if the string explicitly contains an entry for nanoseconds (e.g. "1000 ns") + """ + # TODO: more performant way of doing this check? + if ival % 1000 != 0: + return True + return re.search(r"\.\d{7}", item) or "ns" in item or "nano" in item.lower() + + cpdef inline str parse_timedelta_unit(str unit): """ Parameters @@ -2121,10 +2148,17 @@ class Timedelta(_Timedelta): if (len(value) > 0 and value[0] == "P") or ( len(value) > 1 and value[:2] == "-P" ): - value = parse_iso_format_string(value) + ival = parse_iso_format_string(value) + else: + ival = parse_timedelta_string(value) + + if not needs_nano_unit(ival, value): + # If we don't specifically need nanosecond resolution, default + # to microsecond like we do for datetimes + value = np.timedelta64(ival // 1000, "us") + return cls(value) else: - value = parse_timedelta_string(value) - value = np.timedelta64(value) + value = np.timedelta64(ival, "ns") elif PyDelta_Check(value): # pytimedelta object -> microsecond resolution new_value = delta_to_nanoseconds( diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index c55a5d6dba175..d4ada8360d645 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -927,7 +927,7 @@ def inferred_freq(self) -> str | None: >>> tdelta_idx = pd.to_timedelta(["0 days", "10 days", "20 days"]) >>> tdelta_idx TimedeltaIndex(['0 days', '10 days', '20 days'], - dtype='timedelta64[ns]', freq=None) + dtype='timedelta64[us]', freq=None) >>> tdelta_idx.inferred_freq '10D' """ diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index a8830145e709e..514ad28f698d6 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -150,7 +150,7 @@ class TimedeltaArray(dtl.TimelikeOps): >>> pd.arrays.TimedeltaArray._from_sequence(pd.TimedeltaIndex(["1h", "2h"])) ['0 days 01:00:00', '0 days 02:00:00'] - Length: 2, dtype: timedelta64[ns] + Length: 2, dtype: timedelta64[us] """ _typ = "timedeltaarray" @@ -813,7 +813,7 @@ def total_seconds(self) -> npt.NDArray[np.float64]: >>> idx = pd.to_timedelta(np.arange(5), unit="D") >>> idx TimedeltaIndex(['0 days', '1 days', '2 days', '3 days', '4 days'], - dtype='timedelta64[ns]', freq=None) + dtype='timedelta64[us]', freq=None) >>> idx.total_seconds() Index([0.0, 86400.0, 172800.0, 259200.0, 345600.0], dtype='float64') @@ -892,7 +892,7 @@ def to_pytimedelta(self) -> npt.NDArray[np.object_]: >>> tdelta_idx = pd.to_timedelta(["0 days", "10 days", "20 days"]) >>> tdelta_idx TimedeltaIndex(['0 days', '10 days', '20 days'], - dtype='timedelta64[ns]', freq=None) + dtype='timedelta64[us]', freq=None) >>> tdelta_idx.days Index([0, 10, 20], dtype='int64')""" ) diff --git a/pandas/core/dtypes/astype.py b/pandas/core/dtypes/astype.py index b21c6e3fb33fb..075ac9e0f919a 100644 --- a/pandas/core/dtypes/astype.py +++ b/pandas/core/dtypes/astype.py @@ -117,7 +117,7 @@ def _astype_nansafe( # bc we know arr.dtype == object, this is equivalent to # `np.asarray(to_timedelta(arr))`, but using a lower-level API that # does not require a circular import. - tdvals = array_to_timedelta64(arr).view("m8[ns]") + tdvals = array_to_timedelta64(arr) tda = ensure_wrapped_if_datetimelike(tdvals) return tda.astype(dtype, copy=False)._ndarray diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 2b9adb6230028..6d4d9e2080c3b 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -8657,7 +8657,8 @@ def _maybe_align_series_as_frame(self, series: Series, axis: AxisInt): rvalues = series._values if not isinstance(rvalues, np.ndarray): # TODO(EA2D): no need to special-case with 2D EAs - if rvalues.dtype in ("datetime64[ns]", "timedelta64[ns]"): + if lib.is_np_dtype(rvalues.dtype, "mM"): + # i.e. DatetimeArray[tznaive] or TimedeltaArray # We can losslessly+cheaply cast to ndarray rvalues = np.asarray(rvalues) else: diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 4ea6b2906492b..bf2f5f77fdce7 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -1560,7 +1560,7 @@ def abs(self) -> Self: >>> s = pd.Series([pd.Timedelta("1 days")]) >>> s.abs() 0 1 days - dtype: timedelta64[ns] + dtype: timedelta64[us] Select rows with data closest to certain value using argsort (from `StackOverflow `__). diff --git a/pandas/core/indexes/accessors.py b/pandas/core/indexes/accessors.py index 686d00b680134..9ead85cb1b2e7 100644 --- a/pandas/core/indexes/accessors.py +++ b/pandas/core/indexes/accessors.py @@ -453,7 +453,7 @@ class TimedeltaProperties(Properties): 0 0 days 00:00:01 1 0 days 00:00:02 2 0 days 00:00:03 - dtype: timedelta64[ns] + dtype: timedelta64[us] >>> seconds_series.dt.seconds 0 1 1 2 diff --git a/pandas/core/indexes/datetimelike.py b/pandas/core/indexes/datetimelike.py index 01b1016007951..e81181e49eef7 100644 --- a/pandas/core/indexes/datetimelike.py +++ b/pandas/core/indexes/datetimelike.py @@ -691,7 +691,7 @@ def inferred_freq(self) -> str | None: >>> tdelta_idx = pd.to_timedelta(["0 days", "10 days", "20 days"]) >>> tdelta_idx TimedeltaIndex(['0 days', '10 days', '20 days'], - dtype='timedelta64[ns]', freq=None) + dtype='timedelta64[us]', freq=None) >>> tdelta_idx.inferred_freq '10D' """ diff --git a/pandas/core/indexes/timedeltas.py b/pandas/core/indexes/timedeltas.py index fa40d89ce6fe7..8cd2cda4f9035 100644 --- a/pandas/core/indexes/timedeltas.py +++ b/pandas/core/indexes/timedeltas.py @@ -122,7 +122,7 @@ class TimedeltaIndex(DatetimeTimedeltaMixin): -------- >>> pd.TimedeltaIndex(["0 days", "1 days", "2 days", "3 days", "4 days"]) TimedeltaIndex(['0 days', '1 days', '2 days', '3 days', '4 days'], - dtype='timedelta64[ns]', freq=None) + dtype='timedelta64[us]', freq=None) We can also let pandas infer the frequency when possible. @@ -230,18 +230,27 @@ def get_loc(self, key): return Index.get_loc(self, key) - # error: Return type "tuple[Timedelta | NaTType, None]" of "_parse_with_reso" - # incompatible with return type "tuple[datetime, Resolution]" in supertype - # "DatetimeIndexOpsMixin" - def _parse_with_reso(self, label: str) -> tuple[Timedelta | NaTType, None]: # type: ignore[override] - # the "with_reso" is a no-op for TimedeltaIndex + # error: Return type "tuple[Timedelta | NaTType, Resolution]" of + # "_parse_with_reso" incompatible with return type + # "tuple[datetime, Resolution]" in supertype + # "pandas.core.indexes.datetimelike.DatetimeIndexOpsMixin" + def _parse_with_reso(self, label: str) -> tuple[Timedelta | NaTType, Resolution]: # type: ignore[override] parsed = Timedelta(label) - return parsed, None + if isinstance(parsed, Timedelta): + reso = Resolution.get_reso_from_freqstr(parsed.unit) + else: + # i.e. pd.NaT + reso = Resolution.get_reso_from_freqstr("s") + return parsed, reso - def _parsed_string_to_bounds(self, reso, parsed: Timedelta): + def _parsed_string_to_bounds(self, reso: Resolution, parsed: Timedelta): # reso is unused, included to match signature of DTI/PI lbound = parsed.round(parsed.resolution_string) - rbound = lbound + to_offset(parsed.resolution_string) - Timedelta(1, "ns") + rbound = ( + lbound + + to_offset(parsed.resolution_string) + - Timedelta(1, unit=self.unit).as_unit(self.unit) + ) return lbound, rbound # ------------------------------------------------------------------- @@ -314,14 +323,14 @@ def timedelta_range( -------- >>> pd.timedelta_range(start="1 day", periods=4) TimedeltaIndex(['1 days', '2 days', '3 days', '4 days'], - dtype='timedelta64[ns]', freq='D') + dtype='timedelta64[us]', freq='D') The ``closed`` parameter specifies which endpoint is included. The default behavior is to include both endpoints. >>> pd.timedelta_range(start="1 day", periods=4, closed="right") TimedeltaIndex(['2 days', '3 days', '4 days'], - dtype='timedelta64[ns]', freq='D') + dtype='timedelta64[us]', freq='D') The ``freq`` parameter specifies the frequency of the TimedeltaIndex. Only fixed frequencies can be passed, non-fixed frequencies such as @@ -330,7 +339,7 @@ def timedelta_range( >>> pd.timedelta_range(start="1 day", end="2 days", freq="6h") TimedeltaIndex(['1 days 00:00:00', '1 days 06:00:00', '1 days 12:00:00', '1 days 18:00:00', '2 days 00:00:00'], - dtype='timedelta64[ns]', freq='6h') + dtype='timedelta64[us]', freq='6h') Specify ``start``, ``end``, and ``periods``; the frequency is generated automatically (linearly spaced). @@ -338,7 +347,7 @@ def timedelta_range( >>> pd.timedelta_range(start="1 day", end="5 days", periods=4) TimedeltaIndex(['1 days 00:00:00', '2 days 08:00:00', '3 days 16:00:00', '5 days 00:00:00'], - dtype='timedelta64[ns]', freq=None) + dtype='timedelta64[us]', freq=None) **Specify a unit** diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 772f425beed96..02845f9b357f6 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -944,8 +944,9 @@ def nanstd( >>> nanops.nanstd(s.values) 1.0 """ - if values.dtype == "M8[ns]": - values = values.view("m8[ns]") + if values.dtype.kind == "M": + unit = np.datetime_data(values.dtype)[0] + values = values.view(f"m8[{unit}]") orig_dtype = values.dtype values, mask = _get_values(values, skipna, mask=mask) diff --git a/pandas/core/ops/array_ops.py b/pandas/core/ops/array_ops.py index be249215ea4dc..7b21772b443f6 100644 --- a/pandas/core/ops/array_ops.py +++ b/pandas/core/ops/array_ops.py @@ -545,7 +545,9 @@ def maybe_prepare_scalar_for_op(obj, shape: Shape): # Avoid possible ambiguities with pd.NaT # GH 52295 if is_unitless(obj.dtype): - obj = obj.astype("datetime64[ns]") + # Use second resolution to ensure that the result of e.g. + # `left - np.datetime64("NaT")` retains the unit of left.unit + obj = obj.astype("datetime64[s]") elif not is_supported_dtype(obj.dtype): new_dtype = get_supported_dtype(obj.dtype) obj = obj.astype(new_dtype) @@ -563,7 +565,9 @@ def maybe_prepare_scalar_for_op(obj, shape: Shape): # we broadcast and wrap in a TimedeltaArray # GH 52295 if is_unitless(obj.dtype): - obj = obj.astype("timedelta64[ns]") + # Use second resolution to ensure that the result of e.g. + # `left + np.timedelta64("NaT")` retains the unit of left.unit + obj = obj.astype("timedelta64[s]") elif not is_supported_dtype(obj.dtype): new_dtype = get_supported_dtype(obj.dtype) obj = obj.astype(new_dtype) diff --git a/pandas/plotting/_matplotlib/converter.py b/pandas/plotting/_matplotlib/converter.py index 6ff7f4eda62eb..5d06cbb8e8d2e 100644 --- a/pandas/plotting/_matplotlib/converter.py +++ b/pandas/plotting/_matplotlib/converter.py @@ -61,6 +61,7 @@ from matplotlib.axis import Axis from pandas._libs.tslibs.offsets import BaseOffset + from pandas._typing import TimeUnit _mpl_units: dict = {} # Cache for units overwritten by us @@ -1099,18 +1100,22 @@ class TimeSeries_TimedeltaFormatter(mpl.ticker.Formatter): # pyright: ignore[re Formats the ticks along an axis controlled by a :class:`TimedeltaIndex`. """ + def __init__(self, unit: TimeUnit = "ns"): + self.unit = unit + super().__init__() + axis: Axis @staticmethod - def format_timedelta_ticks(x, pos, n_decimals: int) -> str: + def format_timedelta_ticks(x, pos, n_decimals: int, exp: int) -> str: """ Convert seconds to 'D days HH:MM:SS.F' """ - s, ns = divmod(x, 10**9) # TODO(non-nano): this looks like it assumes ns + s, ns = divmod(x, 10**exp) m, s = divmod(s, 60) h, m = divmod(m, 60) d, h = divmod(h, 24) - decimals = int(ns * 10 ** (n_decimals - 9)) + decimals = int(ns * 10 ** (n_decimals - exp)) s = f"{int(h):02d}:{int(m):02d}:{int(s):02d}" if n_decimals > 0: s += f".{decimals:0{n_decimals}d}" @@ -1119,6 +1124,7 @@ def format_timedelta_ticks(x, pos, n_decimals: int) -> str: return s def __call__(self, x, pos: int | None = 0) -> str: + exp = {"ns": 9, "us": 6, "ms": 3, "s": 0}[self.unit] (vmin, vmax) = tuple(self.axis.get_view_interval()) - n_decimals = min(int(np.ceil(np.log10(100 * 10**9 / abs(vmax - vmin)))), 9) - return self.format_timedelta_ticks(x, pos, n_decimals) + n_decimals = min(int(np.ceil(np.log10(100 * 10**exp / abs(vmax - vmin)))), exp) + return self.format_timedelta_ticks(x, pos, n_decimals, exp) diff --git a/pandas/plotting/_matplotlib/timeseries.py b/pandas/plotting/_matplotlib/timeseries.py index e489b6a5e8f30..5023867445adb 100644 --- a/pandas/plotting/_matplotlib/timeseries.py +++ b/pandas/plotting/_matplotlib/timeseries.py @@ -341,7 +341,7 @@ def format_dateaxis( subplot.format_coord = functools.partial(_format_coord, freq) elif isinstance(index, ABCTimedeltaIndex): - subplot.xaxis.set_major_formatter(TimeSeries_TimedeltaFormatter()) + subplot.xaxis.set_major_formatter(TimeSeries_TimedeltaFormatter(index.unit)) else: raise TypeError("index type not supported") diff --git a/pandas/tests/apply/test_frame_apply.py b/pandas/tests/apply/test_frame_apply.py index e14c69ce694be..8d2f57c7cc926 100644 --- a/pandas/tests/apply/test_frame_apply.py +++ b/pandas/tests/apply/test_frame_apply.py @@ -659,7 +659,7 @@ def test_apply_non_numpy_dtype(): result = df.apply(lambda x: x + pd.Timedelta("1day")) expected = DataFrame( - {"dt": date_range("2015-01-02", periods=3, tz="Europe/Brussels", unit="ns")} + {"dt": date_range("2015-01-02", periods=3, tz="Europe/Brussels")} ) tm.assert_frame_equal(result, expected) diff --git a/pandas/tests/apply/test_series_apply.py b/pandas/tests/apply/test_series_apply.py index 896c5c5fca9f7..5669a73f521e6 100644 --- a/pandas/tests/apply/test_series_apply.py +++ b/pandas/tests/apply/test_series_apply.py @@ -158,7 +158,7 @@ def test_apply_box_td64(): # timedelta vals = [pd.Timedelta("1 days"), pd.Timedelta("2 days")] ser = Series(vals) - assert ser.dtype == "timedelta64[ns]" + assert ser.dtype == "timedelta64[us]" res = ser.apply(lambda x: f"{type(x).__name__}_{x.days}", by_row="compat") exp = Series(["Timedelta_1", "Timedelta_2"]) tm.assert_series_equal(res, exp) diff --git a/pandas/tests/arithmetic/test_datetime64.py b/pandas/tests/arithmetic/test_datetime64.py index 7fd5db3a6e83e..05d0a9c0626af 100644 --- a/pandas/tests/arithmetic/test_datetime64.py +++ b/pandas/tests/arithmetic/test_datetime64.py @@ -870,7 +870,7 @@ def test_dt64arr_sub_timedeltalike_scalar( tm.assert_equal(rng, expected) def test_dt64_array_sub_dt_with_different_timezone(self, box_with_array): - t1 = date_range("20130101", periods=3, unit="ns").tz_localize("US/Eastern") + t1 = date_range("20130101", periods=3).tz_localize("US/Eastern") t1 = tm.box_expected(t1, box_with_array) t2 = Timestamp("20130101").tz_localize("CET") tnaive = Timestamp(20130101) @@ -897,11 +897,11 @@ def test_dt64_array_sub_dt_with_different_timezone(self, box_with_array): tnaive - t1 def test_dt64_array_sub_dt64_array_with_different_timezone(self, box_with_array): - t1 = date_range("20130101", periods=3, unit="ns").tz_localize("US/Eastern") + t1 = date_range("20130101", periods=3).tz_localize("US/Eastern") t1 = tm.box_expected(t1, box_with_array) - t2 = date_range("20130101", periods=3, unit="ns").tz_localize("CET") + t2 = date_range("20130101", periods=3).tz_localize("CET") t2 = tm.box_expected(t2, box_with_array) - tnaive = date_range("20130101", periods=3, unit="ns") + tnaive = date_range("20130101", periods=3) result = t1 - t2 expected = TimedeltaIndex( @@ -947,11 +947,11 @@ def test_dt64arr_add_sub_td64_nat(self, box_with_array, tz_naive_fixture): def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array): tz = tz_naive_fixture - dti = date_range("2016-01-01", periods=3, tz=tz, unit="ns") + dti = date_range("2016-01-01", periods=3, tz=tz) tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"]) tdarr = tdi.values - expected = date_range("2015-12-31", "2016-01-02", periods=3, tz=tz, unit="ns") + expected = date_range("2015-12-31", "2016-01-02", periods=3, tz=tz) dtarr = tm.box_expected(dti, box_with_array) expected = tm.box_expected(expected, box_with_array) @@ -961,7 +961,7 @@ def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array): result = tdarr + dtarr tm.assert_equal(result, expected) - expected = date_range("2016-01-02", "2016-01-04", periods=3, tz=tz, unit="ns") + expected = date_range("2016-01-02", "2016-01-04", periods=3, tz=tz) expected = tm.box_expected(expected, box_with_array) result = dtarr - tdarr @@ -970,6 +970,7 @@ def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array): [ "cannot subtract DatetimeArray from ndarray", "cannot subtract a datelike from a TimedeltaArray", + "cannot subtract DatetimeArray from Timedelta", ] ) with pytest.raises(TypeError, match=msg): @@ -991,7 +992,7 @@ def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array): ) def test_dt64arr_sub_dtscalar(self, box_with_array, ts): # GH#8554, GH#22163 DataFrame op should _not_ return dt64 dtype - idx = date_range("2013-01-01", periods=3, unit="ns")._with_freq(None) + idx = date_range("2013-01-01", periods=3)._with_freq(None) idx = tm.box_expected(idx, box_with_array) expected = TimedeltaIndex(["0 Days", "1 Day", "2 Days"]) @@ -1892,7 +1893,7 @@ def test_sub_single_tz(self, unit): def test_dt64tz_series_sub_dtitz(self): # GH#19071 subtracting tzaware DatetimeIndex from tzaware Series # (with same tz) raises, fixed by #19024 - dti = date_range("1999-09-30", periods=10, tz="US/Pacific", unit="ns") + dti = date_range("1999-09-30", periods=10, tz="US/Pacific") ser = Series(dti) expected = Series(TimedeltaIndex(["0days"] * 10)) @@ -2042,7 +2043,7 @@ def test_dti_add_tdi(self, tz_naive_fixture): tz = tz_naive_fixture dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10) tdi = pd.timedelta_range("0 days", periods=10) - expected = date_range("2017-01-01", periods=10, tz=tz, unit="ns") + expected = date_range("2017-01-01", periods=10, tz=tz) expected = expected._with_freq(None) # add with TimedeltaIndex @@ -2064,7 +2065,7 @@ def test_dti_iadd_tdi(self, tz_naive_fixture): tz = tz_naive_fixture dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10) tdi = pd.timedelta_range("0 days", periods=10) - expected = date_range("2017-01-01", periods=10, tz=tz, unit="ns") + expected = date_range("2017-01-01", periods=10, tz=tz) expected = expected._with_freq(None) # iadd with TimedeltaIndex @@ -2090,7 +2091,7 @@ def test_dti_sub_tdi(self, tz_naive_fixture): tz = tz_naive_fixture dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10) tdi = pd.timedelta_range("0 days", periods=10) - expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D", unit="ns") + expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D") expected = expected._with_freq(None) # sub with TimedeltaIndex @@ -2479,11 +2480,11 @@ def test_non_nano_dt64_addsub_np_nat_scalars_unitless(): # TODO: Can we default to the ser unit? ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]") result = ser - np.datetime64("nat") - expected = Series([NaT] * 3, dtype="timedelta64[ns]") + expected = Series([NaT] * 3, dtype="timedelta64[ms]") tm.assert_series_equal(result, expected) result = ser + np.timedelta64("nat") - expected = Series([NaT] * 3, dtype="datetime64[ns]") + expected = Series([NaT] * 3, dtype="datetime64[ms]") tm.assert_series_equal(result, expected) diff --git a/pandas/tests/arithmetic/test_numeric.py b/pandas/tests/arithmetic/test_numeric.py index d9d9343a9b56e..d1db440a8f609 100644 --- a/pandas/tests/arithmetic/test_numeric.py +++ b/pandas/tests/arithmetic/test_numeric.py @@ -295,7 +295,9 @@ def test_numeric_arr_rdiv_tdscalar(self, three_days, numeric_idx, box_with_array # i.e. resolution is lower -> use lowest supported resolution dtype = np.dtype("m8[s]") expected = expected.astype(dtype) - elif type(three_days) is timedelta: + elif type(three_days) is timedelta or ( + isinstance(three_days, Timedelta) and three_days.unit == "us" + ): expected = expected.astype("m8[us]") elif isinstance( three_days, diff --git a/pandas/tests/arithmetic/test_period.py b/pandas/tests/arithmetic/test_period.py index 0514bc203bf66..7756719654657 100644 --- a/pandas/tests/arithmetic/test_period.py +++ b/pandas/tests/arithmetic/test_period.py @@ -1361,12 +1361,8 @@ def test_period_add_timestamp_raises(self, box_with_array): arr + ts with pytest.raises(TypeError, match=msg): ts + arr - if box_with_array is pd.DataFrame: - # TODO: before implementing resolution-inference we got the same - # message with DataFrame and non-DataFrame. Why did that change? - msg = "cannot add PeriodArray and Timestamp" - else: - msg = "cannot add PeriodArray and DatetimeArray" + + msg = "cannot add PeriodArray and DatetimeArray" with pytest.raises(TypeError, match=msg): arr + Series([ts]) with pytest.raises(TypeError, match=msg): diff --git a/pandas/tests/arithmetic/test_timedelta64.py b/pandas/tests/arithmetic/test_timedelta64.py index 562c7c674da61..491a509e2af31 100644 --- a/pandas/tests/arithmetic/test_timedelta64.py +++ b/pandas/tests/arithmetic/test_timedelta64.py @@ -284,7 +284,7 @@ def test_td64_op_with_list(self, box_with_array): right = [Timestamp("2016-01-01"), Timestamp("2016-02-01")] result = left + right - expected = DatetimeIndex(["2016-01-03", "2016-02-05"], dtype="M8[ns]") + expected = DatetimeIndex(["2016-01-03", "2016-02-05"], dtype="M8[us]") expected = tm.box_expected(expected, box) tm.assert_equal(result, expected) @@ -325,7 +325,7 @@ def test_ufunc_coercions(self): def test_subtraction_ops(self): # with datetimes/timedelta and tdi/dti tdi = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo") - dti = pd.date_range("20130101", periods=3, name="bar", unit="ns") + dti = pd.date_range("20130101", periods=3, name="bar") td = Timedelta("1 days") dt = Timestamp("20130101") @@ -361,25 +361,23 @@ def test_subtraction_ops(self): result = dti - td expected = DatetimeIndex( - ["20121231", "20130101", "20130102"], dtype="M8[ns]", freq="D", name="bar" + ["20121231", "20130101", "20130102"], dtype="M8[us]", freq="D", name="bar" ) tm.assert_index_equal(result, expected) result = dt - tdi expected = DatetimeIndex( - ["20121231", NaT, "20121230"], dtype="M8[ns]", name="foo" + ["20121231", NaT, "20121230"], dtype="M8[us]", name="foo" ) tm.assert_index_equal(result, expected) def test_subtraction_ops_with_tz(self, box_with_array): # check that dt/dti subtraction ops with tz are validated - dti = pd.date_range("20130101", periods=3, unit="ns") + dti = pd.date_range("20130101", periods=3) dti = tm.box_expected(dti, box_with_array) ts = Timestamp("20130101") dt = ts.to_pydatetime() - dti_tz = pd.date_range("20130101", periods=3, unit="ns").tz_localize( - "US/Eastern" - ) + dti_tz = pd.date_range("20130101", periods=3).tz_localize("US/Eastern") dti_tz = tm.box_expected(dti_tz, box_with_array) ts_tz = Timestamp("20130101").tz_localize("US/Eastern") ts_tz2 = Timestamp("20130101").tz_localize("CET") @@ -453,9 +451,7 @@ def _check(result, expected): _check(result, expected) result = dti_tz - td - expected = DatetimeIndex( - ["20121231", "20130101", "20130102"], tz="US/Eastern" - ).as_unit("ns") + expected = DatetimeIndex(["20121231", "20130101", "20130102"], tz="US/Eastern") expected = tm.box_expected(expected, box_with_array) tm.assert_equal(result, expected) @@ -473,7 +469,7 @@ def test_dti_tdi_numeric_ops(self): tm.assert_index_equal(result, expected) result = dti - tdi # name will be reset - expected = DatetimeIndex(["20121231", NaT, "20130101"], dtype="M8[ns]") + expected = DatetimeIndex(["20121231", NaT, "20130101"], dtype="M8[us]") tm.assert_index_equal(result, expected) def test_addition_ops(self): @@ -485,13 +481,13 @@ def test_addition_ops(self): result = tdi + dt expected = DatetimeIndex( - ["20130102", NaT, "20130103"], dtype="M8[ns]", name="foo" + ["20130102", NaT, "20130103"], dtype="M8[us]", name="foo" ) tm.assert_index_equal(result, expected) result = dt + tdi expected = DatetimeIndex( - ["20130102", NaT, "20130103"], dtype="M8[ns]", name="foo" + ["20130102", NaT, "20130103"], dtype="M8[us]", name="foo" ) tm.assert_index_equal(result, expected) @@ -520,11 +516,11 @@ def test_addition_ops(self): # pytest.raises(TypeError, lambda : Index([1,2,3]) + tdi) result = tdi + dti # name will be reset - expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[ns]") + expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[us]") tm.assert_index_equal(result, expected) result = dti + tdi # name will be reset - expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[ns]") + expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[us]") tm.assert_index_equal(result, expected) result = dt + td @@ -620,6 +616,15 @@ def test_tdi_iadd_timedeltalike(self, two_hours, box_with_array): # only test adding/sub offsets as + is now numeric rng = timedelta_range("1 days", "10 days") expected = timedelta_range("1 days 02:00:00", "10 days 02:00:00", freq="D") + if ( + isinstance(two_hours, Timedelta) + and two_hours.unit == "ns" + and box_with_array is not pd.array + ): + # The EA op has to be _actually_ inplace so does not cast to a + # new dtype. For the others, the op can assign a new array + # and get the dtype that normally results from `rng + two_hours` + expected = expected.as_unit("ns") rng = tm.box_expected(rng, box_with_array) expected = tm.box_expected(expected, box_with_array) @@ -635,6 +640,15 @@ def test_tdi_isub_timedeltalike(self, two_hours, box_with_array): # only test adding/sub offsets as - is now numeric rng = timedelta_range("1 days", "10 days") expected = timedelta_range("0 days 22:00:00", "9 days 22:00:00") + if ( + isinstance(two_hours, Timedelta) + and two_hours.unit == "ns" + and box_with_array is not pd.array + ): + # The EA op has to be _actually_ inplace so does not cast to a + # new dtype. For the others, the op can assign a new array + # and get the dtype that normally results from `rng - two_hours` + expected = expected.as_unit("ns") rng = tm.box_expected(rng, box_with_array) expected = tm.box_expected(expected, box_with_array) @@ -736,7 +750,7 @@ def test_tdi_add_overflow(self): ) # These should not overflow! - exp = TimedeltaIndex([NaT], dtype="m8[ns]") + exp = TimedeltaIndex([NaT], dtype="m8[us]") result = pd.to_timedelta([NaT]) - Timedelta("1 days") tm.assert_index_equal(result, exp) @@ -768,12 +782,12 @@ def test_timedelta_ops_with_missing_values(self): s1 = pd.to_timedelta(Series(["00:00:01"])) s2 = pd.to_timedelta(Series(["00:00:02"])) - sn = pd.to_timedelta(Series([NaT], dtype="m8[ns]")) + sn = pd.to_timedelta(Series([NaT], dtype="m8[us]")) df1 = DataFrame(["00:00:01"]).apply(pd.to_timedelta) df2 = DataFrame(["00:00:02"]).apply(pd.to_timedelta) - dfn = DataFrame([NaT._value]).apply(pd.to_timedelta) + dfn = DataFrame([NaT._value]).apply(pd.to_timedelta).astype("m8[us]") scalar1 = pd.to_timedelta("00:00:01") scalar2 = pd.to_timedelta("00:00:02") @@ -930,8 +944,8 @@ def test_operators_timedelta64(self): def test_timedelta64_ops_nat(self): # GH 11349 timedelta_series = Series([NaT, Timedelta("1s")]) - nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[ns]") - single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[ns]") + nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[us]") + single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[us]") # subtraction tm.assert_series_equal(timedelta_series - NaT, nat_series_dtype_timedelta) @@ -1030,7 +1044,7 @@ def test_td64arr_add_sub_datetimelike_scalar( ts = dt_scalar tdi = timedelta_range("1 day", periods=3) - expected = pd.date_range("2012-01-02", periods=3, tz=tz, unit="ns") + expected = pd.date_range("2012-01-02", periods=3, tz=tz) if tz is not None and not timezones.is_utc(expected.tz): # Day is no longer preserved by timedelta add/sub in pandas3 because # it represents Calendar-Day instead of 24h @@ -1042,7 +1056,7 @@ def test_td64arr_add_sub_datetimelike_scalar( tm.assert_equal(ts + tdarr, expected) tm.assert_equal(tdarr + ts, expected) - expected2 = pd.date_range("2011-12-31", periods=3, freq="-1D", tz=tz, unit="ns") + expected2 = pd.date_range("2011-12-31", periods=3, freq="-1D", tz=tz) if tz is not None and not timezones.is_utc(expected2.tz): # Day is no longer preserved by timedelta add/sub in pandas3 because # it represents Calendar-Day instead of 24h @@ -1061,7 +1075,7 @@ def test_td64arr_add_datetime64_nat(self, box_with_array): other = np.datetime64("NaT") tdi = timedelta_range("1 day", periods=3) - expected = DatetimeIndex(["NaT", "NaT", "NaT"], dtype="M8[ns]") + expected = DatetimeIndex(["NaT", "NaT", "NaT"], dtype="M8[us]") tdser = tm.box_expected(tdi, box_with_array) expected = tm.box_expected(expected, box_with_array) @@ -1116,7 +1130,15 @@ def test_td64arr_sub_periodlike( tdi = tm.box_expected(tdi, box_with_array) pi = tm.box_expected(pi, box_with_array2) - msg = "cannot subtract|unsupported operand type" + msg = "|".join( + [ + "cannot subtract", + "unsupported operand type", + r"bad operand type for unary \-: 'PeriodArray'", + r"Input has different freq=-1h from PeriodArray\(.*\)", + "Cannot add/subtract timedelta-like from PeriodArray", + ] + ) with pytest.raises(TypeError, match=msg): tdi - pi @@ -1265,7 +1287,7 @@ def test_td64arr_add_sub_td64_nat(self, box_with_array, tdnat): # GH#18808, GH#23320 special handling for timedelta64("NaT") box = box_with_array tdi = TimedeltaIndex([NaT, Timedelta("1s")]) - expected = TimedeltaIndex(["NaT"] * 2) + expected = TimedeltaIndex(["NaT"] * 2).as_unit("us") obj = tm.box_expected(tdi, box) expected = tm.box_expected(expected, box) @@ -1285,6 +1307,9 @@ def test_td64arr_add_timedeltalike(self, two_hours, box_with_array): box = box_with_array rng = timedelta_range("1 days", "10 days") expected = timedelta_range("1 days 02:00:00", "10 days 02:00:00", freq="D") + if isinstance(two_hours, Timedelta) and two_hours.unit == "ns": + expected = expected.as_unit("ns") + rng = tm.box_expected(rng, box) expected = tm.box_expected(expected, box) @@ -1300,6 +1325,8 @@ def test_td64arr_sub_timedeltalike(self, two_hours, box_with_array): box = box_with_array rng = timedelta_range("1 days", "10 days") expected = timedelta_range("0 days 22:00:00", "9 days 22:00:00") + if isinstance(two_hours, Timedelta) and two_hours.unit == "ns": + expected = expected.as_unit("ns") rng = tm.box_expected(rng, box) expected = tm.box_expected(expected, box) @@ -1496,6 +1523,7 @@ def test_td64arr_mul_tdlike_scalar_raises(self, two_hours, box_with_array): "argument must be an integer", "cannot use operands with types dtype", "Cannot multiply with", + r"unsupported operand type\(s\) for \*", ] ) with pytest.raises(TypeError, match=msg): @@ -1826,7 +1854,7 @@ def test_td64_div_object_mixed_result(self, box_with_array): res = tdi / other - expected = Index([1.0, np.timedelta64("NaT", "ns"), orig[0], 1.5], dtype=object) + expected = Index([1.0, np.timedelta64("NaT", "us"), orig[0], 1.5], dtype=object) expected = tm.box_expected(expected, box_with_array, transpose=False) if isinstance(expected, NumpyExtensionArray): expected = expected.to_numpy() @@ -1837,7 +1865,7 @@ def test_td64_div_object_mixed_result(self, box_with_array): res = tdi // other - expected = Index([1, np.timedelta64("NaT", "ns"), orig[0], 1], dtype=object) + expected = Index([1, np.timedelta64("NaT", "us"), orig[0], 1], dtype=object) expected = tm.box_expected(expected, box_with_array, transpose=False) if isinstance(expected, NumpyExtensionArray): expected = expected.to_numpy() @@ -2192,7 +2220,7 @@ def test_td64arr_mul_int_series(self, box_with_array, names): expected = Series( ["0days", "1day", "4days", "9days", "16days"], - dtype="timedelta64[ns]", + dtype="timedelta64[us]", name=exname, ) @@ -2219,7 +2247,7 @@ def test_float_series_rdiv_td64arr(self, box_with_array, names): xname = names[2] if box not in [tm.to_array, pd.array] else names[1] expected = Series( [tdi[n] / ser[n] for n in range(len(ser))], - dtype="timedelta64[ns]", + dtype="timedelta64[us]", name=xname, ) @@ -2284,8 +2312,8 @@ def test_add_timestamp_to_timedelta(): [ timestamp + ( - pd.to_timedelta("0.033333333s") * i - + pd.to_timedelta("0.000000001s") * divmod(i, 3)[0] + pd.to_timedelta("0.033333s") * i + + pd.to_timedelta("0.000001s") * divmod(i, 3)[0] ) for i in range(31) ] diff --git a/pandas/tests/arrays/categorical/test_repr.py b/pandas/tests/arrays/categorical/test_repr.py index e5b94fe25cff3..3e7b8c10221c8 100644 --- a/pandas/tests/arrays/categorical/test_repr.py +++ b/pandas/tests/arrays/categorical/test_repr.py @@ -331,13 +331,13 @@ def test_categorical_repr_timedelta(self): idx = timedelta_range("1 days", periods=5) c = Categorical(idx) exp = """[1 days, 2 days, 3 days, 4 days, 5 days] -Categories (5, timedelta64[ns]): [1 days, 2 days, 3 days, 4 days, 5 days]""" +Categories (5, timedelta64[us]): [1 days, 2 days, 3 days, 4 days, 5 days]""" assert repr(c) == exp c = Categorical(idx.append(idx), categories=idx) exp = """[1 days, 2 days, 3 days, 4 days, 5 days, 1 days, 2 days, 3 days, 4 days, 5 days] -Categories (5, timedelta64[ns]): [1 days, 2 days, 3 days, 4 days, 5 days]""" # noqa: E501 +Categories (5, timedelta64[us]): [1 days, 2 days, 3 days, 4 days, 5 days]""" # noqa: E501 assert repr(c) == exp @@ -345,7 +345,7 @@ def test_categorical_repr_timedelta(self): c = Categorical(idx) exp = """[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, ..., 15 days 01:00:00, 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00] Length: 20 -Categories (20, timedelta64[ns]): [0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, +Categories (20, timedelta64[us]): [0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, ..., 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00]""" # noqa: E501 @@ -354,7 +354,7 @@ def test_categorical_repr_timedelta(self): c = Categorical(idx.append(idx), categories=idx) exp = """[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, ..., 15 days 01:00:00, 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00] Length: 40 -Categories (20, timedelta64[ns]): [0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, +Categories (20, timedelta64[us]): [0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, ..., 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00]""" # noqa: E501 @@ -364,13 +364,13 @@ def test_categorical_repr_timedelta_ordered(self): idx = timedelta_range("1 days", periods=5) c = Categorical(idx, ordered=True) exp = """[1 days, 2 days, 3 days, 4 days, 5 days] -Categories (5, timedelta64[ns]): [1 days < 2 days < 3 days < 4 days < 5 days]""" +Categories (5, timedelta64[us]): [1 days < 2 days < 3 days < 4 days < 5 days]""" assert repr(c) == exp c = Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[1 days, 2 days, 3 days, 4 days, 5 days, 1 days, 2 days, 3 days, 4 days, 5 days] -Categories (5, timedelta64[ns]): [1 days < 2 days < 3 days < 4 days < 5 days]""" # noqa: E501 +Categories (5, timedelta64[us]): [1 days < 2 days < 3 days < 4 days < 5 days]""" # noqa: E501 assert repr(c) == exp @@ -378,7 +378,7 @@ def test_categorical_repr_timedelta_ordered(self): c = Categorical(idx, ordered=True) exp = """[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, ..., 15 days 01:00:00, 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00] Length: 20 -Categories (20, timedelta64[ns]): [0 days 01:00:00 < 1 days 01:00:00 < 2 days 01:00:00 < +Categories (20, timedelta64[us]): [0 days 01:00:00 < 1 days 01:00:00 < 2 days 01:00:00 < 3 days 01:00:00 ... 16 days 01:00:00 < 17 days 01:00:00 < 18 days 01:00:00 < 19 days 01:00:00]""" # noqa: E501 @@ -387,7 +387,7 @@ def test_categorical_repr_timedelta_ordered(self): c = Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, ..., 15 days 01:00:00, 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00] Length: 40 -Categories (20, timedelta64[ns]): [0 days 01:00:00 < 1 days 01:00:00 < 2 days 01:00:00 < +Categories (20, timedelta64[us]): [0 days 01:00:00 < 1 days 01:00:00 < 2 days 01:00:00 < 3 days 01:00:00 ... 16 days 01:00:00 < 17 days 01:00:00 < 18 days 01:00:00 < 19 days 01:00:00]""" # noqa: E501 diff --git a/pandas/tests/arrays/test_array.py b/pandas/tests/arrays/test_array.py index c327d1b647bce..de80171e0bb60 100644 --- a/pandas/tests/arrays/test_array.py +++ b/pandas/tests/arrays/test_array.py @@ -163,7 +163,7 @@ def test_dt64_array(dtype_unit): ( pd.TimedeltaIndex(["1h", "2h"]), None, - TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"), + TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[us]"), ), ( # preserve non-nano, i.e. don't cast to NumpyExtensionArray @@ -369,7 +369,7 @@ def test_array_copy(): # timedelta ( [pd.Timedelta("1h"), pd.Timedelta("2h")], - TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"), + TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[us]"), ), ( np.array([1, 2], dtype="m8[ns]"), diff --git a/pandas/tests/arrays/test_datetimelike.py b/pandas/tests/arrays/test_datetimelike.py index 3e608b155c890..60a80f9af78c5 100644 --- a/pandas/tests/arrays/test_datetimelike.py +++ b/pandas/tests/arrays/test_datetimelike.py @@ -979,15 +979,15 @@ def test_array_interface(self, timedelta_index): assert result is expected tm.assert_numpy_array_equal(result, expected) - # specifying m8[ns] gives the same result as default - result = np.asarray(arr, dtype="timedelta64[ns]") + # specifying m8[us] gives the same result as default + result = np.asarray(arr, dtype="timedelta64[us]") expected = arr._ndarray assert result is expected tm.assert_numpy_array_equal(result, expected) - result = np.array(arr, dtype="timedelta64[ns]", copy=copy_false) + result = np.array(arr, dtype="timedelta64[us]", copy=copy_false) assert result is expected tm.assert_numpy_array_equal(result, expected) - result = np.array(arr, dtype="timedelta64[ns]") + result = np.array(arr, dtype="timedelta64[us]") if not np_version_gt2: # TODO: GH 57739 assert result is not expected diff --git a/pandas/tests/arrays/timedeltas/test_reductions.py b/pandas/tests/arrays/timedeltas/test_reductions.py index 3565f2e8690e2..e1cafbcfcb6ed 100644 --- a/pandas/tests/arrays/timedeltas/test_reductions.py +++ b/pandas/tests/arrays/timedeltas/test_reductions.py @@ -127,7 +127,7 @@ def test_sum_2d_skipna_false(self): @pytest.mark.parametrize( "add", [ - Timedelta(0), + Timedelta(0).as_unit("us"), pd.Timestamp("2021-01-01"), pd.Timestamp("2021-01-01", tz="UTC"), pd.Timestamp("2021-01-01", tz="Asia/Tokyo"), @@ -138,7 +138,7 @@ def test_std(self, add): arr = tdi.array result = arr.std(skipna=True) - expected = Timedelta(hours=2) + expected = Timedelta(hours=2).as_unit("us") assert isinstance(result, Timedelta) assert result == expected @@ -208,7 +208,7 @@ def test_mean_2d(self): tm.assert_timedelta_array_equal(result, expected) result = tda.mean(axis=1) - expected = tda[:, 0] + Timedelta(hours=12) + expected = tda[:, 0] + Timedelta(hours=12).as_unit("us") tm.assert_timedelta_array_equal(result, expected) result = tda.mean(axis=None) diff --git a/pandas/tests/frame/indexing/test_indexing.py b/pandas/tests/frame/indexing/test_indexing.py index 2082fa8624739..d11c2ef8a3a3c 100644 --- a/pandas/tests/frame/indexing/test_indexing.py +++ b/pandas/tests/frame/indexing/test_indexing.py @@ -1591,7 +1591,7 @@ def test_object_casting_indexing_wraps_datetimelike(): assert isinstance(val, Timestamp) blk = mgr.blocks[mgr.blknos[2]] - assert blk.dtype == "m8[ns]" # we got the right block + assert blk.dtype == "m8[us]" # we got the right block val = blk.iget((0, 0)) assert isinstance(val, pd.Timedelta) diff --git a/pandas/tests/frame/indexing/test_mask.py b/pandas/tests/frame/indexing/test_mask.py index f52bdd0a430ac..ac648696ead58 100644 --- a/pandas/tests/frame/indexing/test_mask.py +++ b/pandas/tests/frame/indexing/test_mask.py @@ -131,7 +131,8 @@ def test_mask_where_dtype_timedelta(): tm.assert_frame_equal(df.mask(df.notna()), expected) expected = DataFrame( - [np.nan, np.nan, np.nan, Timedelta("3 day"), Timedelta("4 day")] + [np.nan, np.nan, np.nan, Timedelta("3 day"), Timedelta("4 day")], + dtype="m8[ns]", ) tm.assert_frame_equal(df.where(df > Timedelta(2, unit="D")), expected) diff --git a/pandas/tests/frame/indexing/test_setitem.py b/pandas/tests/frame/indexing/test_setitem.py index f03cdfcb8f70e..517b026757d89 100644 --- a/pandas/tests/frame/indexing/test_setitem.py +++ b/pandas/tests/frame/indexing/test_setitem.py @@ -1000,6 +1000,8 @@ def test_loc_expansion_with_timedelta_type(self): index=Index([0]), columns=(["a", "b", "c"]), ) + expected["a"] = expected["a"].astype("m8[ns]") + expected["b"] = expected["b"].astype("m8[ns]") tm.assert_frame_equal(result, expected) def test_setitem_tuple_key_in_empty_frame(self): diff --git a/pandas/tests/frame/methods/test_astype.py b/pandas/tests/frame/methods/test_astype.py index a364f85191c1d..b95ad74c84120 100644 --- a/pandas/tests/frame/methods/test_astype.py +++ b/pandas/tests/frame/methods/test_astype.py @@ -422,7 +422,7 @@ def test_astype_from_object_to_timedelta_unit(self, unit): ] df = DataFrame(vals, dtype=object) msg = ( - r"Cannot convert from timedelta64\[ns\] to timedelta64\[.*\]. " + r"Cannot convert from timedelta64\[us\] to timedelta64\[.*\]. " "Supported resolutions are 's', 'ms', 'us', 'ns'" ) with pytest.raises(ValueError, match=msg): diff --git a/pandas/tests/frame/methods/test_combine_first.py b/pandas/tests/frame/methods/test_combine_first.py index 07483a8cd91fe..d4006eb4f942f 100644 --- a/pandas/tests/frame/methods/test_combine_first.py +++ b/pandas/tests/frame/methods/test_combine_first.py @@ -327,7 +327,7 @@ def test_combine_first_timedelta(self): ) exp = DataFrame({"TD": exp_dts}, index=[1, 2, 3, 4, 5, 7]) tm.assert_frame_equal(res, exp) - assert res["TD"].dtype == "timedelta64[ns]" + assert res["TD"].dtype == "timedelta64[us]" def test_combine_first_period(self): data1 = pd.PeriodIndex(["2011-01", "NaT", "2011-03", "2011-04"], freq="M") diff --git a/pandas/tests/frame/methods/test_convert_dtypes.py b/pandas/tests/frame/methods/test_convert_dtypes.py index 013a2112c2935..e302572f4912f 100644 --- a/pandas/tests/frame/methods/test_convert_dtypes.py +++ b/pandas/tests/frame/methods/test_convert_dtypes.py @@ -110,7 +110,7 @@ def test_pyarrow_dtype_backend(self, using_nan_is_na): datetime.timedelta(2), datetime.timedelta(3), ], - type=pa.duration("ns"), + type=pa.duration("us"), ) ), } diff --git a/pandas/tests/frame/methods/test_describe.py b/pandas/tests/frame/methods/test_describe.py index 90e27cbcf412b..1f61c7d0f32f9 100644 --- a/pandas/tests/frame/methods/test_describe.py +++ b/pandas/tests/frame/methods/test_describe.py @@ -237,15 +237,15 @@ def test_describe_timedelta_values(self): tm.assert_frame_equal(result, expected) exp_repr = ( - " t1 t2\n" - "count 5 5\n" - "mean 3 days 00:00:00 0 days 03:00:00\n" - "std 1 days 13:56:50.394919273 0 days 01:34:52.099788303\n" - "min 1 days 00:00:00 0 days 01:00:00\n" - "25% 2 days 00:00:00 0 days 02:00:00\n" - "50% 3 days 00:00:00 0 days 03:00:00\n" - "75% 4 days 00:00:00 0 days 04:00:00\n" - "max 5 days 00:00:00 0 days 05:00:00" + " t1 t2\n" + "count 5 5\n" + "mean 3 days 00:00:00 0 days 03:00:00\n" + "std 1 days 13:56:50.394919 0 days 01:34:52.099788\n" + "min 1 days 00:00:00 0 days 01:00:00\n" + "25% 2 days 00:00:00 0 days 02:00:00\n" + "50% 3 days 00:00:00 0 days 03:00:00\n" + "75% 4 days 00:00:00 0 days 04:00:00\n" + "max 5 days 00:00:00 0 days 05:00:00" ) assert repr(result) == exp_repr diff --git a/pandas/tests/frame/methods/test_diff.py b/pandas/tests/frame/methods/test_diff.py index 61d718452b350..de3304c601916 100644 --- a/pandas/tests/frame/methods/test_diff.py +++ b/pandas/tests/frame/methods/test_diff.py @@ -109,8 +109,8 @@ def test_diff_datetime_axis0(self, tz): # GH#18578 df = DataFrame( { - 0: date_range("2010", freq="D", periods=2, tz=tz, unit="ns"), - 1: date_range("2010", freq="D", periods=2, tz=tz, unit="ns"), + 0: date_range("2010", freq="D", periods=2, tz=tz), + 1: date_range("2010", freq="D", periods=2, tz=tz), } ) @@ -128,15 +128,15 @@ def test_diff_datetime_axis1(self, tz): # GH#18578 df = DataFrame( { - 0: date_range("2010", freq="D", periods=2, tz=tz, unit="ns"), - 1: date_range("2010", freq="D", periods=2, tz=tz, unit="ns"), + 0: date_range("2010", freq="D", periods=2, tz=tz), + 1: date_range("2010", freq="D", periods=2, tz=tz), } ) result = df.diff(axis=1) expected = DataFrame( { - 0: pd.TimedeltaIndex(["NaT", "NaT"]), + 0: pd.TimedeltaIndex(["NaT", "NaT"], dtype="m8[us]"), 1: pd.TimedeltaIndex(["0 days", "0 days"]), } ) diff --git a/pandas/tests/frame/methods/test_to_csv.py b/pandas/tests/frame/methods/test_to_csv.py index ababcaa11377c..f5f3a3031b1cb 100644 --- a/pandas/tests/frame/methods/test_to_csv.py +++ b/pandas/tests/frame/methods/test_to_csv.py @@ -128,7 +128,7 @@ def test_to_csv_from_csv3(self, temp_file): def test_to_csv_from_csv4(self, temp_file): path = str(temp_file) # GH 10833 (TimedeltaIndex formatting) - dt = pd.Timedelta(seconds=1) + dt = pd.Timedelta(seconds=1).as_unit("us") df = DataFrame( {"dt_data": [i * dt for i in range(3)]}, index=Index([i * dt for i in range(3)], name="dt_index"), @@ -1163,7 +1163,7 @@ def test_to_csv_date_format(self, temp_file, datetime_frame): tm.assert_frame_equal(test, nat_frame) - @pytest.mark.parametrize("td", [pd.Timedelta(0), pd.Timedelta("10s")]) + @pytest.mark.parametrize("td", [pd.Timedelta(0).as_unit("us"), pd.Timedelta("10s")]) def test_to_csv_with_dst_transitions(self, td, temp_file): path = str(temp_file) # make sure we are not failing on transitions @@ -1182,11 +1182,7 @@ def test_to_csv_with_dst_transitions(self, td, temp_file): # we have to reconvert the index as we # don't parse the tz's result = read_csv(path, index_col=0) - result.index = ( - to_datetime(result.index, utc=True) - .tz_convert("Europe/London") - .as_unit("ns") - ) + result.index = to_datetime(result.index, utc=True).tz_convert("Europe/London") tm.assert_frame_equal(result, df) @pytest.mark.parametrize( diff --git a/pandas/tests/frame/test_reductions.py b/pandas/tests/frame/test_reductions.py index 8cb2e968c3b58..4d8f163197416 100644 --- a/pandas/tests/frame/test_reductions.py +++ b/pandas/tests/frame/test_reductions.py @@ -613,7 +613,7 @@ def test_sem(self, datetime_frame): "D": Series([np.nan], dtype="str"), "E": Categorical([np.nan], categories=["a"]), "F": DatetimeIndex([pd.NaT], dtype="M8[ns]"), - "G": to_timedelta([pd.NaT]).as_unit("ns"), + "G": to_timedelta([pd.NaT]).as_unit("us"), }, ), ( @@ -781,12 +781,14 @@ def test_std_timedelta64_skipna_false(self): result = df.std(skipna=False) expected = Series( - [df["A"].std(), pd.NaT], index=["A", "B"], dtype="timedelta64[ns]" + [df["A"].std(), pd.NaT], index=["A", "B"], dtype="timedelta64[us]" ) tm.assert_series_equal(result, expected) result = df.std(axis=1, skipna=False) - expected = Series([pd.Timedelta(0)] * 8 + [pd.NaT, pd.Timedelta(0)]) + expected = Series( + [pd.Timedelta(0)] * 8 + [pd.NaT, pd.Timedelta(0)], dtype="m8[us]" + ) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( diff --git a/pandas/tests/generic/test_to_xarray.py b/pandas/tests/generic/test_to_xarray.py index ccbae4a37ab9d..3aabdb6d7869a 100644 --- a/pandas/tests/generic/test_to_xarray.py +++ b/pandas/tests/generic/test_to_xarray.py @@ -15,7 +15,7 @@ if xarray is not None and Version(xarray.__version__) < Version("2025.1.0"): pytestmark = pytest.mark.filterwarnings( - "ignore:Converting non-nanosecond precision datetime:UserWarning" + "ignore:Converting non-nanosecond precision:UserWarning" ) diff --git a/pandas/tests/groupby/aggregate/test_aggregate.py b/pandas/tests/groupby/aggregate/test_aggregate.py index 2dc4911459989..b1c76d0839950 100644 --- a/pandas/tests/groupby/aggregate/test_aggregate.py +++ b/pandas/tests/groupby/aggregate/test_aggregate.py @@ -1736,7 +1736,7 @@ def test_groupby_agg_extension_timedelta_cumsum_with_named_aggregation(): { "td": Series( ["0 days 01:00:00", "0 days 00:15:00", "0 days 01:15:00"], - dtype="timedelta64[ns]", + dtype="timedelta64[us]", ), "grps": ["a", "a", "b"], } diff --git a/pandas/tests/groupby/methods/test_quantile.py b/pandas/tests/groupby/methods/test_quantile.py index 25bac63034125..1b4a26919af44 100644 --- a/pandas/tests/groupby/methods/test_quantile.py +++ b/pandas/tests/groupby/methods/test_quantile.py @@ -370,8 +370,8 @@ def test_groupby_timedelta_quantile(): expected = DataFrame( { "value": [ - pd.Timedelta("0 days 00:00:00.990000"), - pd.Timedelta("0 days 00:00:02.990000"), + pd.Timedelta("0 days 00:00:00.990000").as_unit("ns"), + pd.Timedelta("0 days 00:00:02.990000").as_unit("ns"), ] }, index=Index([1, 2], name="group"), diff --git a/pandas/tests/groupby/test_groupby.py b/pandas/tests/groupby/test_groupby.py index 9d238a745fd30..4ad6e8dd34fed 100644 --- a/pandas/tests/groupby/test_groupby.py +++ b/pandas/tests/groupby/test_groupby.py @@ -147,7 +147,7 @@ def test_len_nan_group(): def test_groupby_timedelta_median(): # issue 57926 - expected = Series(data=Timedelta("1D"), index=["foo"]) + expected = Series(data=Timedelta("1D"), index=["foo"], dtype="m8[us]") df = DataFrame({"label": ["foo", "foo"], "timedelta": [pd.NaT, Timedelta("1D")]}) gb = df.groupby("label")["timedelta"] actual = gb.median() diff --git a/pandas/tests/groupby/test_reductions.py b/pandas/tests/groupby/test_reductions.py index 9c13ceec49e35..890ce4e398f01 100644 --- a/pandas/tests/groupby/test_reductions.py +++ b/pandas/tests/groupby/test_reductions.py @@ -1312,7 +1312,7 @@ def test_groupby_sum_timedelta_with_nat(): "b": [pd.Timedelta("1D"), pd.Timedelta("2D"), pd.Timedelta("3D"), pd.NaT], } ) - td3 = pd.Timedelta(days=3) + td3 = pd.Timedelta(days=3).as_unit("us") gb = df.groupby("a") @@ -1324,7 +1324,7 @@ def test_groupby_sum_timedelta_with_nat(): tm.assert_series_equal(res, expected["b"]) res = gb["b"].sum(min_count=2) - expected = Series([td3, pd.NaT], dtype="m8[ns]", name="b", index=expected.index) + expected = Series([td3, pd.NaT], dtype="m8[us]", name="b", index=expected.index) tm.assert_series_equal(res, expected) @@ -1497,7 +1497,7 @@ def test_groupby_prod_with_int64_dtype(): def test_groupby_std_datetimelike(): # GH#48481 - tdi = pd.timedelta_range("1 Day", periods=10000) + tdi = pd.timedelta_range("1 Day", periods=10000, unit="ns") ser = Series(tdi) ser[::5] *= 2 # get different std for different groups diff --git a/pandas/tests/indexes/datetimes/test_arithmetic.py b/pandas/tests/indexes/datetimes/test_arithmetic.py index f5ada5a191250..25164349babe9 100644 --- a/pandas/tests/indexes/datetimes/test_arithmetic.py +++ b/pandas/tests/indexes/datetimes/test_arithmetic.py @@ -36,7 +36,6 @@ def test_sub_datetime_preserves_freq(self, tz_naive_fixture): periods=12, tz=tz_naive_fixture, freq=offsets.Hour(24), - unit="ns", ) res = dti - dti[0] diff --git a/pandas/tests/indexes/datetimes/test_date_range.py b/pandas/tests/indexes/datetimes/test_date_range.py index a30f729aba364..fd6a20d9274ce 100644 --- a/pandas/tests/indexes/datetimes/test_date_range.py +++ b/pandas/tests/indexes/datetimes/test_date_range.py @@ -164,6 +164,7 @@ def test_date_range_edges(self, freq): # GH#13672 td = Timedelta(f"1{freq}") ts = Timestamp("1970-01-01") + exp_dtype = "M8[us]" if freq != "ns" else "M8[ns]" idx = date_range( start=ts + td, @@ -172,7 +173,7 @@ def test_date_range_edges(self, freq): ) exp = DatetimeIndex( [ts + n * td for n in range(1, 5)], - dtype="M8[ns]", + dtype=exp_dtype, freq=freq, ) tm.assert_index_equal(idx, exp) @@ -183,7 +184,7 @@ def test_date_range_edges(self, freq): end=ts + td, freq=freq, ) - exp = DatetimeIndex([], dtype="M8[ns]", freq=freq) + exp = DatetimeIndex([], dtype=exp_dtype, freq=freq) tm.assert_index_equal(idx, exp) # start matches end @@ -192,7 +193,7 @@ def test_date_range_edges(self, freq): end=ts + td, freq=freq, ) - exp = DatetimeIndex([ts + td], dtype="M8[ns]", freq=freq) + exp = DatetimeIndex([ts + td], dtype=exp_dtype, freq=freq) tm.assert_index_equal(idx, exp) def test_date_range_near_implementation_bound(self): diff --git a/pandas/tests/indexes/interval/test_astype.py b/pandas/tests/indexes/interval/test_astype.py index 84de11e9dad2f..3e6c67d8610d8 100644 --- a/pandas/tests/indexes/interval/test_astype.py +++ b/pandas/tests/indexes/interval/test_astype.py @@ -222,7 +222,7 @@ def test_subtype_integer(self, index, subtype): if subtype != "int64": msg = ( r"Cannot convert interval" - r"\[(timedelta64\[ns\]|datetime64\[us(, US/Eastern)?\]), .*\] " + r"\[(timedelta64\[us\]|datetime64\[us(, US/Eastern)?\]), .*\] " r"to interval\[uint64, .*\]" ) with pytest.raises(TypeError, match=msg): diff --git a/pandas/tests/indexes/interval/test_constructors.py b/pandas/tests/indexes/interval/test_constructors.py index 7a7ac3ae59014..b1ea1460c67d4 100644 --- a/pandas/tests/indexes/interval/test_constructors.py +++ b/pandas/tests/indexes/interval/test_constructors.py @@ -45,7 +45,7 @@ class ConstructorTests: date_range("20180101", periods=10, tz="US/Eastern", unit="ns"), "datetime64[ns, US/Eastern]", ), - (timedelta_range("1 day", periods=10), "m8[ns]"), + (timedelta_range("1 day", periods=10), "m8[us]"), ], ) @pytest.mark.parametrize("name", [None, "foo"]) diff --git a/pandas/tests/indexes/test_base.py b/pandas/tests/indexes/test_base.py index 2a80c692d17bf..68c97b140fda0 100644 --- a/pandas/tests/indexes/test_base.py +++ b/pandas/tests/indexes/test_base.py @@ -274,7 +274,7 @@ def test_constructor_dtypes_datetime(self, tz_naive_fixture, attr, klass): @pytest.mark.parametrize("attr", ["values", "asi8"]) @pytest.mark.parametrize("klass", [Index, TimedeltaIndex]) def test_constructor_dtypes_timedelta(self, attr, klass): - index = timedelta_range("1 days", periods=5) + index = timedelta_range("1 days", periods=5, unit="ns") index = index._with_freq(None) # won't be preserved by constructors dtype = index.dtype diff --git a/pandas/tests/indexes/timedeltas/methods/test_shift.py b/pandas/tests/indexes/timedeltas/methods/test_shift.py index 9bbf06dc51a0c..297887596dc82 100644 --- a/pandas/tests/indexes/timedeltas/methods/test_shift.py +++ b/pandas/tests/indexes/timedeltas/methods/test_shift.py @@ -49,6 +49,7 @@ def test_tdi_shift_int(self): "5 days 01:00:00", ], freq="D", + dtype="m8[ns]", ) tm.assert_index_equal(result, expected) @@ -66,6 +67,7 @@ def test_tdi_shift_nonstandard_freq(self): "10 days 01:00:03", ], freq="D", + dtype="m8[ns]", ) tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexes/timedeltas/test_constructors.py b/pandas/tests/indexes/timedeltas/test_constructors.py index 63d2161dcec09..1dc64ae5c720b 100644 --- a/pandas/tests/indexes/timedeltas/test_constructors.py +++ b/pandas/tests/indexes/timedeltas/test_constructors.py @@ -170,11 +170,11 @@ def test_constructor_coverage(self): # NumPy string array strings = np.array(["1 days", "2 days", "3 days"]) result = TimedeltaIndex(strings) - expected = to_timedelta([1, 2, 3], unit="D") + expected = to_timedelta([1, 2, 3], unit="D").as_unit("us") tm.assert_index_equal(result, expected) - from_ints = TimedeltaIndex(expected.asi8) - tm.assert_index_equal(from_ints, expected) + from_ints = TimedeltaIndex(expected.as_unit("ns").asi8) + tm.assert_index_equal(from_ints, expected.as_unit("ns")) # non-conforming freq msg = ( @@ -265,4 +265,4 @@ def test_unit_deprecated(self, unit, unit_depr): with tm.assert_produces_warning(Pandas4Warning, match=msg): tdi = to_timedelta([1, 2], unit=unit_depr) - tm.assert_index_equal(tdi, expected) + tm.assert_index_equal(tdi, expected.as_unit("ns")) diff --git a/pandas/tests/indexes/timedeltas/test_formats.py b/pandas/tests/indexes/timedeltas/test_formats.py index b1daa0c517570..0ec4c999aa838 100644 --- a/pandas/tests/indexes/timedeltas/test_formats.py +++ b/pandas/tests/indexes/timedeltas/test_formats.py @@ -30,18 +30,18 @@ def test_representation(self, method): exp1 = "TimedeltaIndex([], dtype='timedelta64[ns]', freq='D')" - exp2 = "TimedeltaIndex(['1 days'], dtype='timedelta64[ns]', freq='D')" + exp2 = "TimedeltaIndex(['1 days'], dtype='timedelta64[us]', freq='D')" - exp3 = "TimedeltaIndex(['1 days', '2 days'], dtype='timedelta64[ns]', freq='D')" + exp3 = "TimedeltaIndex(['1 days', '2 days'], dtype='timedelta64[us]', freq='D')" exp4 = ( "TimedeltaIndex(['1 days', '2 days', '3 days'], " - "dtype='timedelta64[ns]', freq='D')" + "dtype='timedelta64[us]', freq='D')" ) exp5 = ( "TimedeltaIndex(['1 days 00:00:01', '2 days 00:00:00', " - "'3 days 00:00:00'], dtype='timedelta64[ns]', freq=None)" + "'3 days 00:00:00'], dtype='timedelta64[us]', freq=None)" ) with pd.option_context("display.width", 300): @@ -61,17 +61,17 @@ def test_representation_to_series(self): exp1 = """Series([], dtype: timedelta64[ns])""" - exp2 = "0 1 days\ndtype: timedelta64[ns]" + exp2 = "0 1 days\ndtype: timedelta64[us]" - exp3 = "0 1 days\n1 2 days\ndtype: timedelta64[ns]" + exp3 = "0 1 days\n1 2 days\ndtype: timedelta64[us]" - exp4 = "0 1 days\n1 2 days\n2 3 days\ndtype: timedelta64[ns]" + exp4 = "0 1 days\n1 2 days\n2 3 days\ndtype: timedelta64[us]" exp5 = ( "0 1 days 00:00:01\n" "1 2 days 00:00:00\n" "2 3 days 00:00:00\n" - "dtype: timedelta64[ns]" + "dtype: timedelta64[us]" ) with pd.option_context("display.width", 300): diff --git a/pandas/tests/indexes/timedeltas/test_scalar_compat.py b/pandas/tests/indexes/timedeltas/test_scalar_compat.py index a79b7bb524368..80c02f9b1bd38 100644 --- a/pandas/tests/indexes/timedeltas/test_scalar_compat.py +++ b/pandas/tests/indexes/timedeltas/test_scalar_compat.py @@ -101,7 +101,7 @@ def test_round(self): t1 = timedelta_range("1 days", periods=3, freq="1 min 2 s 3 us") t2 = -1 * t1 t1a = timedelta_range("1 days", periods=3, freq="1 min 2 s") - t1c = TimedeltaIndex(np.array([1, 1, 1], "m8[D]")).as_unit("ns") + t1c = TimedeltaIndex(np.array([1, 1, 1], "m8[D]")).as_unit("us") # note that negative times round DOWN! so don't give whole numbers msg = "'d' is deprecated and will be removed in a future version." diff --git a/pandas/tests/indexes/timedeltas/test_setops.py b/pandas/tests/indexes/timedeltas/test_setops.py index 951b8346ac9e6..fbbe0700c2380 100644 --- a/pandas/tests/indexes/timedeltas/test_setops.py +++ b/pandas/tests/indexes/timedeltas/test_setops.py @@ -160,7 +160,7 @@ def test_zero_length_input_index(self, sort): # if no overlap exists return empty index ( timedelta_range("1 day", periods=10, freq="h", name="idx")[5:], - TimedeltaIndex([], freq="h", name="idx", dtype="m8[ns]"), + TimedeltaIndex([], freq="h", name="idx", dtype="m8[us]"), ), ], ) diff --git a/pandas/tests/indexes/timedeltas/test_timedelta_range.py b/pandas/tests/indexes/timedeltas/test_timedelta_range.py index 55807e606229a..1cca77e7e44c9 100644 --- a/pandas/tests/indexes/timedeltas/test_timedelta_range.py +++ b/pandas/tests/indexes/timedeltas/test_timedelta_range.py @@ -24,23 +24,25 @@ def test_timedelta_range_unit(self): tm.assert_numpy_array_equal(tdi.to_numpy(), exp_arr) def test_timedelta_range(self): - expected = to_timedelta(np.arange(5), unit="D") + expected = to_timedelta(np.arange(5), unit="D").as_unit("us") result = timedelta_range("0 days", periods=5, freq="D") tm.assert_index_equal(result, expected) - expected = to_timedelta(np.arange(11), unit="D") + expected = to_timedelta(np.arange(11), unit="D").as_unit("us") result = timedelta_range("0 days", "10 days", freq="D") tm.assert_index_equal(result, expected) - expected = to_timedelta(np.arange(5), unit="D") + Second(2) + Day() + expected = ( + to_timedelta(np.arange(5), unit="D").as_unit("us") + Second(2) + Day() + ) result = timedelta_range("1 days, 00:00:02", "5 days, 00:00:02", freq="D") tm.assert_index_equal(result, expected) - expected = to_timedelta([1, 3, 5, 7, 9], unit="D") + Second(2) + expected = to_timedelta([1, 3, 5, 7, 9], unit="D").as_unit("us") + Second(2) result = timedelta_range("1 days, 00:00:02", periods=5, freq="2D") tm.assert_index_equal(result, expected) - expected = to_timedelta(np.arange(50), unit="min") * 30 + expected = to_timedelta(np.arange(50), unit="min").as_unit("us") * 30 result = timedelta_range("0 days", freq="30min", periods=50) tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexing/test_coercion.py b/pandas/tests/indexing/test_coercion.py index e0174a6613615..4a76ca84a8258 100644 --- a/pandas/tests/indexing/test_coercion.py +++ b/pandas/tests/indexing/test_coercion.py @@ -270,12 +270,12 @@ def test_insert_index_datetimes(self, fill_val, exp_dtype, insert_value): def test_insert_index_timedelta64(self): obj = pd.TimedeltaIndex(["1 day", "2 day", "3 day", "4 day"]) - assert obj.dtype == "timedelta64[ns]" + assert obj.dtype == "timedelta64[us]" # timedelta64 + timedelta64 => timedelta64 exp = pd.TimedeltaIndex(["1 day", "10 day", "2 day", "3 day", "4 day"]) self._assert_insert_conversion( - obj, pd.Timedelta("10 day"), exp, "timedelta64[ns]" + obj, pd.Timedelta("10 day"), exp, "timedelta64[us]" ) for item in [pd.Timestamp("2012-01-01").as_unit("s"), 1]: @@ -470,7 +470,8 @@ def test_where_series_period(self): raise NotImplementedError @pytest.mark.parametrize( - "value", [pd.Timedelta(days=9), timedelta(days=9), np.timedelta64(9, "D")] + "value", + [pd.Timedelta(days=9).as_unit("us"), timedelta(days=9), np.timedelta64(9, "D")], ) def test_where_index_timedelta64(self, value): tdi = pd.timedelta_range("1 Day", periods=4) @@ -748,7 +749,7 @@ class TestReplaceSeriesCoercion(CoercionBase): pd.Timestamp("2011-01-03", tz=tz).as_unit("s"), ] - rep["timedelta64[ns]"] = [pd.Timedelta("1 day"), pd.Timedelta("2 day")] + rep["timedelta64[us]"] = [pd.Timedelta("1 day"), pd.Timedelta("2 day")] @pytest.fixture(params=["dict", "series"]) def how(self, request): @@ -764,7 +765,7 @@ def how(self, request): "datetime64[ns]", "datetime64[ns, UTC]", "datetime64[ns, US/Eastern]", - "timedelta64[ns]", + "timedelta64[us]", ] ) def from_key(self, request): @@ -780,7 +781,7 @@ def from_key(self, request): "datetime64[ns]", "datetime64[ns, UTC]", "datetime64[ns, US/Eastern]", - "timedelta64[ns]", + "timedelta64[us]", ], ids=[ "object", @@ -840,7 +841,7 @@ def test_replace_series(self, to_key, from_key, replacer): @pytest.mark.parametrize( "to_key", - ["timedelta64[ns]", "bool", "object", "complex128", "float64", "int64"], + ["timedelta64[us]", "bool", "object", "complex128", "float64", "int64"], indirect=True, ) @pytest.mark.parametrize( diff --git a/pandas/tests/io/json/test_pandas.py b/pandas/tests/io/json/test_pandas.py index a89d0a9de1bbc..5a3ec254c96b0 100644 --- a/pandas/tests/io/json/test_pandas.py +++ b/pandas/tests/io/json/test_pandas.py @@ -1793,8 +1793,11 @@ def test_read_json_with_very_long_file_path(self, compression): @pytest.mark.parametrize( "date_format,key", [("epoch", 86400000), ("iso", "P1DT0H0M0S")] ) - def test_timedelta_as_label(self, date_format, key): - df = DataFrame([[1]], columns=[pd.Timedelta("1D")]) + def test_timedelta_as_label(self, date_format, key, unit, request): + if unit != "ns": + mark = pytest.mark.xfail(reason="GH#63236 failure to round-trip") + request.applymarker(mark) + df = DataFrame([[1]], columns=[pd.Timedelta("1D").as_unit(unit)]) expected = f'{{"{key}":{{"0":1}}}}' expected_warning = None diff --git a/pandas/tests/io/pytables/test_store.py b/pandas/tests/io/pytables/test_store.py index a80c7ea59c2b7..8f814ed3b05ff 100644 --- a/pandas/tests/io/pytables/test_store.py +++ b/pandas/tests/io/pytables/test_store.py @@ -1017,6 +1017,7 @@ def test_duplicate_column_name(tmp_path, setup_path): assert other.equals(df) +@pytest.mark.xfail(reason="non-nano TimedeltaIndex does not round-trip") def test_preserve_timedeltaindex_type(setup_path): # GH9635 df = DataFrame(np.random.default_rng(2).normal(size=(10, 5))) diff --git a/pandas/tests/plotting/test_converter.py b/pandas/tests/plotting/test_converter.py index cfdfa7f723599..e33e91ccf6c6e 100644 --- a/pandas/tests/plotting/test_converter.py +++ b/pandas/tests/plotting/test_converter.py @@ -347,7 +347,7 @@ class TestTimeDeltaConverter: ) def test_format_timedelta_ticks(self, x, decimal, format_expected): tdc = converter.TimeSeries_TimedeltaFormatter - result = tdc.format_timedelta_ticks(x, pos=None, n_decimals=decimal) + result = tdc.format_timedelta_ticks(x, pos=None, n_decimals=decimal, exp=9) assert result == format_expected @pytest.mark.parametrize("view_interval", [(1, 2), (2, 1)]) diff --git a/pandas/tests/plotting/test_datetimelike.py b/pandas/tests/plotting/test_datetimelike.py index ca56185deaebe..5c2a31b8bc548 100644 --- a/pandas/tests/plotting/test_datetimelike.py +++ b/pandas/tests/plotting/test_datetimelike.py @@ -1531,7 +1531,7 @@ def test_format_timedelta_ticks_narrow(self): assert len(result_labels) == len(expected_labels) assert result_labels == expected_labels - def test_format_timedelta_ticks_wide(self): + def test_format_timedelta_ticks_wide(self, unit): expected_labels = [ "00:00:00", "1 days 03:46:40", @@ -1544,7 +1544,7 @@ def test_format_timedelta_ticks_wide(self): "9 days 06:13:20", ] - rng = timedelta_range("0", periods=10, freq="1 D") + rng = timedelta_range("0", periods=10, freq="1 D", unit=unit) df = DataFrame(np.random.default_rng(2).standard_normal((len(rng), 3)), rng) _, ax = mpl.pyplot.subplots() ax = df.plot(fontsize=2, ax=ax) diff --git a/pandas/tests/resample/test_timedelta.py b/pandas/tests/resample/test_timedelta.py index 10aebb6da2d70..e0b4248fab210 100644 --- a/pandas/tests/resample/test_timedelta.py +++ b/pandas/tests/resample/test_timedelta.py @@ -53,7 +53,8 @@ def test_resample_with_timedeltas(): expected.index = timedelta_range("0 days", freq="30min", periods=50) df = DataFrame( - {"A": np.arange(1480)}, index=pd.to_timedelta(np.arange(1480), unit="min") + {"A": np.arange(1480)}, + index=pd.to_timedelta(np.arange(1480), unit="min").as_unit("us"), ) result = df.resample("30min").sum() @@ -170,7 +171,7 @@ def test_resample_with_timedelta_yields_no_empty_groups(duplicates): expected = DataFrame( [[768] * 4] * 12 + [[528] * 4], - index=timedelta_range(start="1s", periods=13, freq="3s"), + index=timedelta_range(start="1s", periods=13, freq="3s", unit="ns"), ) expected.columns = df.columns tm.assert_frame_equal(result, expected) diff --git a/pandas/tests/reshape/concat/test_append_common.py b/pandas/tests/reshape/concat/test_append_common.py index 1b26b18c56b5a..d47fd3c743f95 100644 --- a/pandas/tests/reshape/concat/test_append_common.py +++ b/pandas/tests/reshape/concat/test_append_common.py @@ -31,7 +31,7 @@ pd.Timestamp("2011-01-02", tz="US/Eastern").as_unit("s"), pd.Timestamp("2011-01-03", tz="US/Eastern").as_unit("s"), ], - "timedelta64[ns]": [ + "timedelta64[us]": [ pd.Timedelta("1 days"), pd.Timedelta("2 days"), pd.Timedelta("3 days"), diff --git a/pandas/tests/scalar/timedelta/test_arithmetic.py b/pandas/tests/scalar/timedelta/test_arithmetic.py index 1f6ece9f3e8f1..09bf7ec70d54f 100644 --- a/pandas/tests/scalar/timedelta/test_arithmetic.py +++ b/pandas/tests/scalar/timedelta/test_arithmetic.py @@ -440,7 +440,7 @@ def test_td_mul_td64_ndarray_invalid(self): msg = ( "ufunc '?multiply'? cannot use operands with types " - rf"dtype\('{tm.ENDIAN}m8\[ns\]'\) and dtype\('{tm.ENDIAN}m8\[ns\]'\)" + rf"dtype\('{tm.ENDIAN}m8\[us\]'\) and dtype\('{tm.ENDIAN}m8\[us\]'\)" ) with pytest.raises(TypeError, match=msg): td * other @@ -1225,6 +1225,7 @@ def test_ops_str_deprecated(box): "ufunc 'divide' cannot use operands", "Invalid dtype object for __floordiv__", r"unsupported operand type\(s\) for /: 'int' and 'str'", + r"unsupported operand type\(s\) for /: 'datetime.timedelta' and 'str'", ] ) with pytest.raises(TypeError, match=msg): diff --git a/pandas/tests/scalar/timedelta/test_constructors.py b/pandas/tests/scalar/timedelta/test_constructors.py index 28614c513e8ef..ebf6ed44da8ef 100644 --- a/pandas/tests/scalar/timedelta/test_constructors.py +++ b/pandas/tests/scalar/timedelta/test_constructors.py @@ -133,19 +133,19 @@ def test_unit_parser(self, unit, np_unit, wrapper): [np.timedelta64(i, np_unit) for i in np.arange(5).tolist()], dtype="m8[ns]", ) - # TODO(2.0): the desired output dtype may have non-nano resolution result = to_timedelta(wrapper(range(5)), unit=unit) tm.assert_index_equal(result, expected) str_repr = [f"{x}{unit}" for x in np.arange(5)] + exp_unit = "us" if np_unit != "ns" else "ns" result = to_timedelta(wrapper(str_repr)) - tm.assert_index_equal(result, expected) + tm.assert_index_equal(result, expected.as_unit(exp_unit)) result = to_timedelta(wrapper(str_repr)) - tm.assert_index_equal(result, expected) + tm.assert_index_equal(result, expected.as_unit(exp_unit)) # scalar - expected = Timedelta(np.timedelta64(2, np_unit).astype("timedelta64[ns]")) + expected = Timedelta(np.timedelta64(2, np_unit)).as_unit(exp_unit) result = to_timedelta(2, unit=unit) assert result == expected result = Timedelta(2, unit=unit) @@ -271,12 +271,12 @@ def test_construction(): expected = np.timedelta64(10, "D").astype("m8[ns]").view("i8") assert Timedelta(10, unit="D")._value == expected assert Timedelta(10.0, unit="D")._value == expected - assert Timedelta("10 days")._value == expected + assert Timedelta("10 days")._value == expected // 1000 assert Timedelta(days=10)._value == expected assert Timedelta(days=10.0)._value == expected expected += np.timedelta64(10, "s").astype("m8[ns]").view("i8") - assert Timedelta("10 days 00:00:10")._value == expected + assert Timedelta("10 days 00:00:10")._value == expected // 1000 assert Timedelta(days=10, seconds=10)._value == expected assert Timedelta(days=10, milliseconds=10 * 1000)._value == expected assert Timedelta(days=10, microseconds=10 * 1000 * 1000)._value == expected @@ -434,7 +434,7 @@ def test_td_construction_with_np_dtypes(npdtype, item): def test_td_from_repr_roundtrip(val): # round-trip both for string and value td = Timedelta(val) - assert Timedelta(td._value) == td + assert Timedelta(td.value) == td assert Timedelta(str(td)) == td assert Timedelta(td._repr_base(format="all")) == td @@ -443,7 +443,7 @@ def test_td_from_repr_roundtrip(val): def test_overflow_on_construction(): # GH#3374 - value = Timedelta("1day")._value * 20169940 + value = Timedelta("1day").as_unit("ns")._value * 20169940 msg = "Cannot cast 1742682816000000000000 from ns to 'ns' without overflow" with pytest.raises(OutOfBoundsTimedelta, match=msg): Timedelta(value) @@ -705,3 +705,21 @@ def test_non_nano_value(): # check that the suggested workaround actually works result = td.asm8.view("i8") assert result == 86400000000 + + +def test_parsed_unit(): + td = Timedelta("1 Day") + assert td.unit == "us" + + td = Timedelta("1 Day 2 hours 3 minutes 4 ns") + assert td.unit == "ns" + + td = Timedelta("1 Day 2:03:04.012345") + assert td.unit == "us" + + td = Timedelta("1 Day 2:03:04.012345000") + assert td.unit == "ns" + + # 7 digits after the decimal + td = Timedelta("1 Day 2:03:04.0123450") + assert td.unit == "ns" diff --git a/pandas/tests/scalar/timedelta/test_timedelta.py b/pandas/tests/scalar/timedelta/test_timedelta.py index cf878b1164b3f..b66776ff5e564 100644 --- a/pandas/tests/scalar/timedelta/test_timedelta.py +++ b/pandas/tests/scalar/timedelta/test_timedelta.py @@ -338,6 +338,7 @@ def test_total_seconds_scalar(self): def test_conversion(self): for td in [Timedelta(10, unit="D"), Timedelta("1 days, 10:11:12.012345")]: + td = td.as_unit("ns") pydt = td.to_pytimedelta() assert td == Timedelta(pydt) assert td == pydt @@ -385,8 +386,8 @@ def check(value): assert abs(td) == Timedelta("13:48:48") assert str(td) == "-1 days +10:11:12" assert -td == Timedelta("0 days 13:48:48") - assert -Timedelta("-1 days, 10:11:12")._value == 49728000000000 - assert Timedelta("-1 days, 10:11:12")._value == -49728000000000 + assert -Timedelta("-1 days, 10:11:12")._value == 49728000000 + assert Timedelta("-1 days, 10:11:12")._value == -49728000000 rng = to_timedelta("-1 days, 10:11:12.100123456") assert rng.days == -1 diff --git a/pandas/tests/scalar/timestamp/test_timestamp.py b/pandas/tests/scalar/timestamp/test_timestamp.py index 8e153827ad086..5484b0e3915be 100644 --- a/pandas/tests/scalar/timestamp/test_timestamp.py +++ b/pandas/tests/scalar/timestamp/test_timestamp.py @@ -367,11 +367,11 @@ def test_roundtrip(self): # further test accessors base = Timestamp("20140101 00:00:00").as_unit("ns") - result = Timestamp(base._value + Timedelta("5ms")._value) + result = Timestamp(base._value + Timedelta("5ms").value) assert result == Timestamp(f"{base}.005000") assert result.microsecond == 5000 - result = Timestamp(base._value + Timedelta("5us")._value) + result = Timestamp(base._value + Timedelta("5us").value) assert result == Timestamp(f"{base}.000005") assert result.microsecond == 5 @@ -380,11 +380,11 @@ def test_roundtrip(self): assert result.nanosecond == 5 assert result.microsecond == 0 - result = Timestamp(base._value + Timedelta("6ms 5us")._value) + result = Timestamp(base._value + Timedelta("6ms 5us").value) assert result == Timestamp(f"{base}.006005") assert result.microsecond == 5 + 6 * 1000 - result = Timestamp(base._value + Timedelta("200ms 5us")._value) + result = Timestamp(base._value + Timedelta("200ms 5us").value) assert result == Timestamp(f"{base}.200005") assert result.microsecond == 5 + 200 * 1000 diff --git a/pandas/tests/series/indexing/test_setitem.py b/pandas/tests/series/indexing/test_setitem.py index 35297482703c7..8d35b4b2cfda8 100644 --- a/pandas/tests/series/indexing/test_setitem.py +++ b/pandas/tests/series/indexing/test_setitem.py @@ -1548,7 +1548,7 @@ def raises(self): @pytest.mark.parametrize( "val,exp_dtype,raises", [ - (Timedelta("12 day"), "timedelta64[ns]", False), + (Timedelta("12 day"), "timedelta64[us]", False), (1, object, True), ("x", object, True), ], diff --git a/pandas/tests/series/methods/test_astype.py b/pandas/tests/series/methods/test_astype.py index 11e37bfa8befc..07869d40d85a5 100644 --- a/pandas/tests/series/methods/test_astype.py +++ b/pandas/tests/series/methods/test_astype.py @@ -510,7 +510,7 @@ class TestAstypeString: ([1, None], "UInt16"), (["1/1/2021", "2/1/2021"], "period[M]"), (["1/1/2021", "2/1/2021", NaT], "period[M]"), - (["1 Day", "59 Days", NaT], "timedelta64[ns]"), + (["1 Day", "59 Days", NaT], "timedelta64[us]"), # currently no way to parse IntervalArray from a list of strings ], ) diff --git a/pandas/tests/series/methods/test_diff.py b/pandas/tests/series/methods/test_diff.py index ee94e46ae4191..dde51e5ac56de 100644 --- a/pandas/tests/series/methods/test_diff.py +++ b/pandas/tests/series/methods/test_diff.py @@ -68,7 +68,7 @@ def test_diff_dt64(self): def test_diff_dt64tz(self): # with tz ser = Series( - date_range("2000-01-01 09:00:00", periods=5, tz="US/Eastern", unit="ns"), + date_range("2000-01-01 09:00:00", periods=5, tz="US/Eastern"), name="foo", ) result = ser.diff() diff --git a/pandas/tests/series/methods/test_fillna.py b/pandas/tests/series/methods/test_fillna.py index b345840d61823..568bdb260d4d4 100644 --- a/pandas/tests/series/methods/test_fillna.py +++ b/pandas/tests/series/methods/test_fillna.py @@ -611,7 +611,7 @@ def test_fillna_pytimedelta(self): ser = Series([np.nan, Timedelta("1 days")], index=["A", "B"]) result = ser.fillna(timedelta(1)) - expected = Series(Timedelta("1 days"), index=["A", "B"]) + expected = Series(Timedelta("1 days"), index=["A", "B"], dtype="m8[us]") tm.assert_series_equal(result, expected) def test_fillna_period(self): diff --git a/pandas/tests/series/test_arithmetic.py b/pandas/tests/series/test_arithmetic.py index 2bcfd248c1d86..a77e55612e23d 100644 --- a/pandas/tests/series/test_arithmetic.py +++ b/pandas/tests/series/test_arithmetic.py @@ -356,7 +356,7 @@ def test_arithmetic_with_duplicate_index(self): ser = Series(date_range("20130101 09:00:00", periods=5, unit="ns"), index=index) other = Series(date_range("20130101", periods=5, unit="ns"), index=index) result = ser - other - expected = Series(Timedelta("9 hours"), index=[2, 2, 3, 3, 4]) + expected = Series(Timedelta("9 hours"), index=[2, 2, 3, 3, 4], dtype="m8[ns]") tm.assert_series_equal(result, expected) def test_masked_and_non_masked_propagate_na(self): diff --git a/pandas/tests/series/test_formats.py b/pandas/tests/series/test_formats.py index 721270d98b380..76c8914e60b76 100644 --- a/pandas/tests/series/test_formats.py +++ b/pandas/tests/series/test_formats.py @@ -536,7 +536,7 @@ def test_categorical_series_repr_timedelta(self): 3 4 days 4 5 days dtype: category -Categories (5, timedelta64[ns]): [1 days, 2 days, 3 days, 4 days, 5 days]""" +Categories (5, timedelta64[us]): [1 days, 2 days, 3 days, 4 days, 5 days]""" assert repr(s) == exp @@ -553,7 +553,7 @@ def test_categorical_series_repr_timedelta(self): 8 8 days 01:00:00 9 9 days 01:00:00 dtype: category -Categories (10, timedelta64[ns]): [0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, +Categories (10, timedelta64[us]): [0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, ..., 6 days 01:00:00, 7 days 01:00:00, 8 days 01:00:00, 9 days 01:00:00]""" # noqa: E501 @@ -568,7 +568,7 @@ def test_categorical_series_repr_timedelta_ordered(self): 3 4 days 4 5 days dtype: category -Categories (5, timedelta64[ns]): [1 days < 2 days < 3 days < 4 days < 5 days]""" +Categories (5, timedelta64[us]): [1 days < 2 days < 3 days < 4 days < 5 days]""" assert repr(s) == exp @@ -585,7 +585,7 @@ def test_categorical_series_repr_timedelta_ordered(self): 8 8 days 01:00:00 9 9 days 01:00:00 dtype: category -Categories (10, timedelta64[ns]): [0 days 01:00:00 < 1 days 01:00:00 < 2 days 01:00:00 < +Categories (10, timedelta64[us]): [0 days 01:00:00 < 1 days 01:00:00 < 2 days 01:00:00 < 3 days 01:00:00 ... 6 days 01:00:00 < 7 days 01:00:00 < 8 days 01:00:00 < 9 days 01:00:00]""" # noqa: E501 diff --git a/pandas/tests/tools/test_to_timedelta.py b/pandas/tests/tools/test_to_timedelta.py index 16610e2320f5c..878f9ecf79ef1 100644 --- a/pandas/tests/tools/test_to_timedelta.py +++ b/pandas/tests/tools/test_to_timedelta.py @@ -27,6 +27,13 @@ class TestTimedeltas: + def test_to_timedelta_mixed_unit_strings(self): + # https://github.com/pandas-dev/pandas/pull/63196#issuecomment-3595743721 + result = to_timedelta(["1 days 06:05:01.00003", "15.5us"]) + + expected = TimedeltaIndex([108_301_000_030_000, 15_500], dtype="m8[ns]") + tm.assert_index_equal(result, expected) + def test_to_timedelta_all_nat_unit(self): # With all-NaT entries, we get "s" unit result = to_timedelta([None]) @@ -84,7 +91,7 @@ def test_to_timedelta_same_np_timedelta64(self): def test_to_timedelta_series(self): # Series expected = Series( - [timedelta(days=1), timedelta(days=1, seconds=1)], dtype="m8[ns]" + [timedelta(days=1), timedelta(days=1, seconds=1)], dtype="m8[us]" ) msg = "'d' is deprecated and will be removed in a future version." @@ -208,7 +215,7 @@ def test_unambiguous_timedelta_values(self, val, errors): def test_to_timedelta_via_apply(self): # GH 5458 - expected = Series([np.timedelta64(1, "s")], dtype="m8[ns]") + expected = Series([np.timedelta64(1, "s")], dtype="m8[us]") result = Series(["00:00:01"]).apply(to_timedelta) tm.assert_series_equal(result, expected) @@ -222,7 +229,7 @@ def test_to_timedelta_inference_without_warning(self): with tm.assert_produces_warning(None): result = to_timedelta(vals) - expected = TimedeltaIndex([pd.Timedelta(seconds=1), pd.NaT]) + expected = TimedeltaIndex([pd.Timedelta(seconds=1), pd.NaT], dtype="m8[us]") tm.assert_index_equal(result, expected) def test_to_timedelta_on_missing_values(self): @@ -232,11 +239,11 @@ def test_to_timedelta_on_missing_values(self): actual = to_timedelta(Series(["00:00:01", np.nan])) expected = Series( [np.timedelta64(1000000000, "ns"), timedelta_NaT], - dtype=f"{tm.ENDIAN}m8[ns]", + dtype=f"{tm.ENDIAN}m8[us]", ) tm.assert_series_equal(actual, expected) - ser = Series(["00:00:01", pd.NaT], dtype="m8[ns]") + ser = Series(["00:00:01", pd.NaT], dtype="m8[us]") actual = to_timedelta(ser) tm.assert_series_equal(actual, expected)