diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 094c9c3df0bed..631257b7a5264 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -727,6 +727,10 @@ def __add__(self, other): else: # pragma: no cover return NotImplemented + if is_timedelta64_dtype(result) and isinstance(result, np.ndarray): + from pandas.core.arrays import TimedeltaArrayMixin + # TODO: infer freq? + return TimedeltaArrayMixin(result) return result cls.__add__ = __add__ @@ -791,6 +795,10 @@ def __sub__(self, other): else: # pragma: no cover return NotImplemented + if is_timedelta64_dtype(result) and isinstance(result, np.ndarray): + from pandas.core.arrays import TimedeltaArrayMixin + # TODO: infer freq? + return TimedeltaArrayMixin(result) return result cls.__sub__ = __sub__ diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index d52a97f5e3db4..c56e994e0ca2f 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -222,6 +222,11 @@ def __new__(cls, values, freq=None, tz=None, dtype=None): # if dtype has an embedded tz, capture it tz = dtl.validate_tz_from_dtype(dtype, tz) + if is_object_dtype(values): + # kludge; dispatch until the DatetimeArray constructor is complete + from pandas import DatetimeIndex + values = DatetimeIndex(values, freq=freq, tz=tz) + if isinstance(values, ABCSeries): # extract to ndarray or DatetimeIndex values = values._values diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index ad564ca34930f..cb630c9b66c20 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -300,6 +300,11 @@ def _evaluate_with_timedelta_like(self, other, op): return NotImplemented + def __neg__(self): + if self.freq is not None: + return type(self)(-self._data, freq=-self.freq) + return type(self)(-self._data) + # ---------------------------------------------------------------- # Conversion Methods - Vectorized analogues of Timedelta methods diff --git a/pandas/core/ops.py b/pandas/core/ops.py index 1f422a6098fa0..869a1d6e2fb14 100644 --- a/pandas/core/ops.py +++ b/pandas/core/ops.py @@ -947,6 +947,10 @@ def should_series_dispatch(left, right, op): # numpy integer dtypes as timedelta64 dtypes in this scenario return True + if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype): + # in particular case where right is an array of DateOffsets + return True + return False diff --git a/pandas/tests/arithmetic/conftest.py b/pandas/tests/arithmetic/conftest.py index 63a5c40a31f1d..2714b68fa6ff4 100644 --- a/pandas/tests/arithmetic/conftest.py +++ b/pandas/tests/arithmetic/conftest.py @@ -57,14 +57,6 @@ def numeric_idx(request): return request.param -@pytest.fixture -def tdser(): - """ - Return a Series with dtype='timedelta64[ns]', including a NaT. - """ - return pd.Series(['59 Days', '59 Days', 'NaT'], dtype='timedelta64[ns]') - - # ------------------------------------------------------------------ # Scalar Fixtures diff --git a/pandas/tests/arithmetic/test_datetime64.py b/pandas/tests/arithmetic/test_datetime64.py index 78af506978443..873c7c92cbaf6 100644 --- a/pandas/tests/arithmetic/test_datetime64.py +++ b/pandas/tests/arithmetic/test_datetime64.py @@ -23,7 +23,6 @@ from pandas import ( Timestamp, Timedelta, Period, Series, date_range, NaT, DatetimeIndex, TimedeltaIndex) -from pandas.core.arrays import DatetimeArrayMixin as DatetimeArray # ------------------------------------------------------------------ @@ -51,16 +50,16 @@ def test_dt64_nat_comparison(self): class TestDatetime64SeriesComparison(object): # TODO: moved from tests.series.test_operators; needs cleanup - def test_comparison_invalid(self, box): + def test_comparison_invalid(self, box_with_array): # GH#4968 # invalid date/int comparisons - xbox = box if box not in [pd.Index, DatetimeArray] else np.ndarray + xbox = box_with_array if box_with_array is not pd.Index else np.ndarray ser = Series(range(5)) ser2 = Series(pd.date_range('20010101', periods=5)) - ser = tm.box_expected(ser, box) - ser2 = tm.box_expected(ser2, box) + ser = tm.box_expected(ser, box_with_array) + ser2 = tm.box_expected(ser2, box_with_array) for (x, y) in [(ser, ser2), (ser2, ser)]: @@ -228,11 +227,10 @@ def test_timestamp_compare_series(self, left, right): def test_dt64arr_timestamp_equality(self, box_with_array): # GH#11034 - box = box_with_array - xbox = box if box not in [pd.Index, DatetimeArray] else np.ndarray + xbox = box_with_array if box_with_array is not pd.Index else np.ndarray ser = pd.Series([pd.Timestamp('2000-01-29 01:59:00'), 'NaT']) - ser = tm.box_expected(ser, box) + ser = tm.box_expected(ser, box_with_array) result = ser != ser expected = tm.box_expected([False, True], xbox) @@ -796,28 +794,28 @@ def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, pd.Timestamp('2013-01-01'), pd.Timestamp('2013-01-01').to_pydatetime(), pd.Timestamp('2013-01-01').to_datetime64()]) - def test_dt64arr_sub_dtscalar(self, box, ts): + def test_dt64arr_sub_dtscalar(self, box_with_array, ts): # GH#8554, GH#22163 DataFrame op should _not_ return dt64 dtype idx = pd.date_range('2013-01-01', periods=3) - idx = tm.box_expected(idx, box) + idx = tm.box_expected(idx, box_with_array) expected = pd.TimedeltaIndex(['0 Days', '1 Day', '2 Days']) - expected = tm.box_expected(expected, box) + expected = tm.box_expected(expected, box_with_array) result = idx - ts tm.assert_equal(result, expected) - def test_dt64arr_sub_datetime64_not_ns(self, box): + def test_dt64arr_sub_datetime64_not_ns(self, box_with_array): # GH#7996, GH#22163 ensure non-nano datetime64 is converted to nano # for DataFrame operation dt64 = np.datetime64('2013-01-01') assert dt64.dtype == 'datetime64[D]' dti = pd.date_range('20130101', periods=3) - dtarr = tm.box_expected(dti, box) + dtarr = tm.box_expected(dti, box_with_array) expected = pd.TimedeltaIndex(['0 Days', '1 Day', '2 Days']) - expected = tm.box_expected(expected, box) + expected = tm.box_expected(expected, box_with_array) result = dtarr - dt64 tm.assert_equal(result, expected) @@ -825,38 +823,39 @@ def test_dt64arr_sub_datetime64_not_ns(self, box): result = dt64 - dtarr tm.assert_equal(result, -expected) - def test_dt64arr_sub_timestamp(self, box): + def test_dt64arr_sub_timestamp(self, box_with_array): ser = pd.date_range('2014-03-17', periods=2, freq='D', tz='US/Eastern') ts = ser[0] # FIXME: transpose raises ValueError - ser = tm.box_expected(ser, box, transpose=False) + ser = tm.box_expected(ser, box_with_array, transpose=False) delta_series = pd.Series([np.timedelta64(0, 'D'), np.timedelta64(1, 'D')]) - expected = tm.box_expected(delta_series, box, transpose=False) + expected = tm.box_expected(delta_series, box_with_array, + transpose=False) tm.assert_equal(ser - ts, expected) tm.assert_equal(ts - ser, -expected) - def test_dt64arr_sub_NaT(self, box): + def test_dt64arr_sub_NaT(self, box_with_array): # GH#18808 dti = pd.DatetimeIndex([pd.NaT, pd.Timestamp('19900315')]) - ser = tm.box_expected(dti, box, transpose=False) + ser = tm.box_expected(dti, box_with_array, transpose=False) result = ser - pd.NaT expected = pd.Series([pd.NaT, pd.NaT], dtype='timedelta64[ns]') # FIXME: raises ValueError with transpose - expected = tm.box_expected(expected, box, transpose=False) + expected = tm.box_expected(expected, box_with_array, transpose=False) tm.assert_equal(result, expected) dti_tz = dti.tz_localize('Asia/Tokyo') - ser_tz = tm.box_expected(dti_tz, box, transpose=False) + ser_tz = tm.box_expected(dti_tz, box_with_array, transpose=False) result = ser_tz - pd.NaT expected = pd.Series([pd.NaT, pd.NaT], dtype='timedelta64[ns]') - expected = tm.box_expected(expected, box, transpose=False) + expected = tm.box_expected(expected, box_with_array, transpose=False) tm.assert_equal(result, expected) # ------------------------------------------------------------- @@ -976,6 +975,196 @@ def test_dt64arr_add_sub_period_scalar(self, dti_freq, box_with_array): per - dtarr +class TestDatetime64DateOffsetArithmetic(object): + + # ------------------------------------------------------------- + # Tick DateOffsets + + # TODO: parametrize over timezone? + def test_dt64arr_series_add_tick_DateOffset(self, box_with_array): + # GH#4532 + # operate with pd.offsets + ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')]) + expected = Series([Timestamp('20130101 9:01:05'), + Timestamp('20130101 9:02:05')]) + + ser = tm.box_expected(ser, box_with_array) + expected = tm.box_expected(expected, box_with_array) + + result = ser + pd.offsets.Second(5) + tm.assert_equal(result, expected) + + result2 = pd.offsets.Second(5) + ser + tm.assert_equal(result2, expected) + + def test_dt64arr_series_sub_tick_DateOffset(self, box_with_array): + # GH#4532 + # operate with pd.offsets + ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')]) + expected = Series([Timestamp('20130101 9:00:55'), + Timestamp('20130101 9:01:55')]) + + ser = tm.box_expected(ser, box_with_array) + expected = tm.box_expected(expected, box_with_array) + + result = ser - pd.offsets.Second(5) + tm.assert_equal(result, expected) + + result2 = -pd.offsets.Second(5) + ser + tm.assert_equal(result2, expected) + + with pytest.raises(TypeError): + pd.offsets.Second(5) - ser + + @pytest.mark.parametrize('cls_name', ['Day', 'Hour', 'Minute', 'Second', + 'Milli', 'Micro', 'Nano']) + def test_dt64arr_add_sub_tick_DateOffset_smoke(self, cls_name, + box_with_array): + # GH#4532 + # smoke tests for valid DateOffsets + ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')]) + ser = tm.box_expected(ser, box_with_array) + + offset_cls = getattr(pd.offsets, cls_name) + ser + offset_cls(5) + offset_cls(5) + ser + ser - offset_cls(5) + + def test_dti_add_tick_tzaware(self, tz_aware_fixture, box_with_array): + # GH#21610, GH#22163 ensure DataFrame doesn't return object-dtype + tz = tz_aware_fixture + if tz == 'US/Pacific': + dates = date_range('2012-11-01', periods=3, tz=tz) + offset = dates + pd.offsets.Hour(5) + assert dates[0] + pd.offsets.Hour(5) == offset[0] + + dates = date_range('2010-11-01 00:00', + periods=3, tz=tz, freq='H') + expected = DatetimeIndex(['2010-11-01 05:00', '2010-11-01 06:00', + '2010-11-01 07:00'], freq='H', tz=tz) + + # FIXME: these raise ValueError with transpose=True + dates = tm.box_expected(dates, box_with_array, transpose=False) + expected = tm.box_expected(expected, box_with_array, transpose=False) + + # TODO: parametrize over the scalar being added? radd? sub? + offset = dates + pd.offsets.Hour(5) + tm.assert_equal(offset, expected) + offset = dates + np.timedelta64(5, 'h') + tm.assert_equal(offset, expected) + offset = dates + timedelta(hours=5) + tm.assert_equal(offset, expected) + + # ------------------------------------------------------------- + # RelativeDelta DateOffsets + + # ------------------------------------------------------------- + # Non-Tick, Non-RelativeDelta DateOffsets + + +class TestDatetime64OverflowHandling(object): + # TODO: box + de-duplicate + + def test_dt64_series_arith_overflow(self): + # GH#12534, fixed by GH#19024 + dt = pd.Timestamp('1700-01-31') + td = pd.Timedelta('20000 Days') + dti = pd.date_range('1949-09-30', freq='100Y', periods=4) + ser = pd.Series(dti) + with pytest.raises(OverflowError): + ser - dt + with pytest.raises(OverflowError): + dt - ser + with pytest.raises(OverflowError): + ser + td + with pytest.raises(OverflowError): + td + ser + + ser.iloc[-1] = pd.NaT + expected = pd.Series(['2004-10-03', '2104-10-04', '2204-10-04', 'NaT'], + dtype='datetime64[ns]') + res = ser + td + tm.assert_series_equal(res, expected) + res = td + ser + tm.assert_series_equal(res, expected) + + ser.iloc[1:] = pd.NaT + expected = pd.Series(['91279 Days', 'NaT', 'NaT', 'NaT'], + dtype='timedelta64[ns]') + res = ser - dt + tm.assert_series_equal(res, expected) + res = dt - ser + tm.assert_series_equal(res, -expected) + + def test_datetimeindex_sub_timestamp_overflow(self): + dtimax = pd.to_datetime(['now', pd.Timestamp.max]) + dtimin = pd.to_datetime(['now', pd.Timestamp.min]) + + tsneg = Timestamp('1950-01-01') + ts_neg_variants = [tsneg, + tsneg.to_pydatetime(), + tsneg.to_datetime64().astype('datetime64[ns]'), + tsneg.to_datetime64().astype('datetime64[D]')] + + tspos = Timestamp('1980-01-01') + ts_pos_variants = [tspos, + tspos.to_pydatetime(), + tspos.to_datetime64().astype('datetime64[ns]'), + tspos.to_datetime64().astype('datetime64[D]')] + + for variant in ts_neg_variants: + with pytest.raises(OverflowError): + dtimax - variant + + expected = pd.Timestamp.max.value - tspos.value + for variant in ts_pos_variants: + res = dtimax - variant + assert res[1].value == expected + + expected = pd.Timestamp.min.value - tsneg.value + for variant in ts_neg_variants: + res = dtimin - variant + assert res[1].value == expected + + for variant in ts_pos_variants: + with pytest.raises(OverflowError): + dtimin - variant + + def test_datetimeindex_sub_datetimeindex_overflow(self): + # GH#22492, GH#22508 + dtimax = pd.to_datetime(['now', pd.Timestamp.max]) + dtimin = pd.to_datetime(['now', pd.Timestamp.min]) + + ts_neg = pd.to_datetime(['1950-01-01', '1950-01-01']) + ts_pos = pd.to_datetime(['1980-01-01', '1980-01-01']) + + # General tests + expected = pd.Timestamp.max.value - ts_pos[1].value + result = dtimax - ts_pos + assert result[1].value == expected + + expected = pd.Timestamp.min.value - ts_neg[1].value + result = dtimin - ts_neg + assert result[1].value == expected + + with pytest.raises(OverflowError): + dtimax - ts_neg + + with pytest.raises(OverflowError): + dtimin - ts_pos + + # Edge cases + tmin = pd.to_datetime([pd.Timestamp.min]) + t1 = tmin + pd.Timedelta.max + pd.Timedelta('1us') + with pytest.raises(OverflowError): + t1 - tmin + + tmax = pd.to_datetime([pd.Timestamp.max]) + t2 = tmax + pd.Timedelta.min - pd.Timedelta('1us') + with pytest.raises(OverflowError): + tmax - t2 + + class TestTimestampSeriesArithmetic(object): def test_dt64ser_sub_datetime_dtype(self): @@ -1065,52 +1254,6 @@ def test_sub_datetime_compat(self): tm.assert_series_equal(s - dt, exp) tm.assert_series_equal(s - Timestamp(dt), exp) - def test_dt64_series_add_tick_DateOffset(self, box_with_array): - # GH#4532 - # operate with pd.offsets - ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')]) - expected = Series([Timestamp('20130101 9:01:05'), - Timestamp('20130101 9:02:05')]) - - ser = tm.box_expected(ser, box_with_array) - expected = tm.box_expected(expected, box_with_array) - - result = ser + pd.offsets.Second(5) - tm.assert_equal(result, expected) - - result2 = pd.offsets.Second(5) + ser - tm.assert_equal(result2, expected) - - def test_dt64_series_sub_tick_DateOffset(self, box_with_array): - # GH#4532 - # operate with pd.offsets - ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')]) - expected = Series([Timestamp('20130101 9:00:55'), - Timestamp('20130101 9:01:55')]) - - ser = tm.box_expected(ser, box_with_array) - expected = tm.box_expected(expected, box_with_array) - - result = ser - pd.offsets.Second(5) - tm.assert_equal(result, expected) - - result2 = -pd.offsets.Second(5) + ser - tm.assert_equal(result2, expected) - - with pytest.raises(TypeError): - pd.offsets.Second(5) - ser - - @pytest.mark.parametrize('cls_name', ['Day', 'Hour', 'Minute', 'Second', - 'Milli', 'Micro', 'Nano']) - def test_dt64_series_add_tick_DateOffset_smoke(self, cls_name): - # GH#4532 - # smoke tests for valid DateOffsets - ser = Series([Timestamp('20130101 9:01'), Timestamp('20130101 9:02')]) - - offset_cls = getattr(pd.offsets, cls_name) - ser + offset_cls(5) - offset_cls(5) + ser - def test_dt64_series_add_mixed_tick_DateOffset(self): # GH#4532 # operate with pd.offsets @@ -1128,37 +1271,6 @@ def test_dt64_series_add_mixed_tick_DateOffset(self): Timestamp('20130101 9:07:00.005')]) tm.assert_series_equal(result, expected) - def test_dt64_series_arith_overflow(self): - # GH#12534, fixed by GH#19024 - dt = pd.Timestamp('1700-01-31') - td = pd.Timedelta('20000 Days') - dti = pd.date_range('1949-09-30', freq='100Y', periods=4) - ser = pd.Series(dti) - with pytest.raises(OverflowError): - ser - dt - with pytest.raises(OverflowError): - dt - ser - with pytest.raises(OverflowError): - ser + td - with pytest.raises(OverflowError): - td + ser - - ser.iloc[-1] = pd.NaT - expected = pd.Series(['2004-10-03', '2104-10-04', '2204-10-04', 'NaT'], - dtype='datetime64[ns]') - res = ser + td - tm.assert_series_equal(res, expected) - res = td + ser - tm.assert_series_equal(res, expected) - - ser.iloc[1:] = pd.NaT - expected = pd.Series(['91279 Days', 'NaT', 'NaT', 'NaT'], - dtype='timedelta64[ns]') - res = ser - dt - tm.assert_series_equal(res, expected) - res = dt - ser - tm.assert_series_equal(res, -expected) - def test_datetime64_ops_nat(self): # GH#11349 datetime_series = Series([NaT, Timestamp('19900315')]) @@ -1344,11 +1456,11 @@ def test_dti_isub_int(self, tz_naive_fixture, one): # __add__/__sub__ with integer arrays @pytest.mark.parametrize('freq', ['H', 'D']) - @pytest.mark.parametrize('box', [np.array, pd.Index]) - def test_dti_add_intarray_tick(self, box, freq): + @pytest.mark.parametrize('int_holder', [np.array, pd.Index]) + def test_dti_add_intarray_tick(self, int_holder, freq): # GH#19959 dti = pd.date_range('2016-01-01', periods=2, freq=freq) - other = box([4, -1]) + other = int_holder([4, -1]) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): expected = DatetimeIndex([dti[n] + other[n] @@ -1361,11 +1473,11 @@ def test_dti_add_intarray_tick(self, box, freq): tm.assert_index_equal(result, expected) @pytest.mark.parametrize('freq', ['W', 'M', 'MS', 'Q']) - @pytest.mark.parametrize('box', [np.array, pd.Index]) - def test_dti_add_intarray_non_tick(self, box, freq): + @pytest.mark.parametrize('int_holder', [np.array, pd.Index]) + def test_dti_add_intarray_non_tick(self, int_holder, freq): # GH#19959 dti = pd.date_range('2016-01-01', periods=2, freq=freq) - other = box([4, -1]) + other = int_holder([4, -1]) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): expected = DatetimeIndex([dti[n] + other[n] @@ -1384,11 +1496,11 @@ def test_dti_add_intarray_non_tick(self, box, freq): result = other + dti tm.assert_index_equal(result, expected) - @pytest.mark.parametrize('box', [np.array, pd.Index]) - def test_dti_add_intarray_no_freq(self, box): + @pytest.mark.parametrize('int_holder', [np.array, pd.Index]) + def test_dti_add_intarray_no_freq(self, int_holder): # GH#19959 dti = pd.DatetimeIndex(['2016-01-01', 'NaT', '2017-04-05 06:07:08']) - other = box([9, 4, -1]) + other = int_holder([9, 4, -1]) with pytest.raises(NullFrequencyError): dti + other with pytest.raises(NullFrequencyError): @@ -1684,74 +1796,6 @@ def test_ufunc_coercions(self): tm.assert_index_equal(result, exp) assert result.freq == 'D' - def test_datetimeindex_sub_timestamp_overflow(self): - dtimax = pd.to_datetime(['now', pd.Timestamp.max]) - dtimin = pd.to_datetime(['now', pd.Timestamp.min]) - - tsneg = Timestamp('1950-01-01') - ts_neg_variants = [tsneg, - tsneg.to_pydatetime(), - tsneg.to_datetime64().astype('datetime64[ns]'), - tsneg.to_datetime64().astype('datetime64[D]')] - - tspos = Timestamp('1980-01-01') - ts_pos_variants = [tspos, - tspos.to_pydatetime(), - tspos.to_datetime64().astype('datetime64[ns]'), - tspos.to_datetime64().astype('datetime64[D]')] - - for variant in ts_neg_variants: - with pytest.raises(OverflowError): - dtimax - variant - - expected = pd.Timestamp.max.value - tspos.value - for variant in ts_pos_variants: - res = dtimax - variant - assert res[1].value == expected - - expected = pd.Timestamp.min.value - tsneg.value - for variant in ts_neg_variants: - res = dtimin - variant - assert res[1].value == expected - - for variant in ts_pos_variants: - with pytest.raises(OverflowError): - dtimin - variant - - def test_datetimeindex_sub_datetimeindex_overflow(self): - # GH#22492, GH#22508 - dtimax = pd.to_datetime(['now', pd.Timestamp.max]) - dtimin = pd.to_datetime(['now', pd.Timestamp.min]) - - ts_neg = pd.to_datetime(['1950-01-01', '1950-01-01']) - ts_pos = pd.to_datetime(['1980-01-01', '1980-01-01']) - - # General tests - expected = pd.Timestamp.max.value - ts_pos[1].value - result = dtimax - ts_pos - assert result[1].value == expected - - expected = pd.Timestamp.min.value - ts_neg[1].value - result = dtimin - ts_neg - assert result[1].value == expected - - with pytest.raises(OverflowError): - dtimax - ts_neg - - with pytest.raises(OverflowError): - dtimin - ts_pos - - # Edge cases - tmin = pd.to_datetime([pd.Timestamp.min]) - t1 = tmin + pd.Timedelta.max + pd.Timedelta('1us') - with pytest.raises(OverflowError): - t1 - tmin - - tmax = pd.to_datetime([pd.Timestamp.max]) - t2 = tmax + pd.Timedelta.min - pd.Timedelta('1us') - with pytest.raises(OverflowError): - tmax - t2 - @pytest.mark.parametrize('names', [('foo', None, None), ('baz', 'bar', None), ('bar', 'bar', 'bar')]) @@ -1880,77 +1924,77 @@ def test_dti_with_offset_series(self, tz_naive_fixture, names): res3 = dti - other tm.assert_series_equal(res3, expected_sub) - def test_dti_add_offset_tzaware(self, tz_aware_fixture, box_with_array): - # GH#21610, GH#22163 ensure DataFrame doesn't return object-dtype - timezone = tz_aware_fixture - if timezone == 'US/Pacific': - dates = date_range('2012-11-01', periods=3, tz=timezone) - offset = dates + pd.offsets.Hour(5) - assert dates[0] + pd.offsets.Hour(5) == offset[0] - - dates = date_range('2010-11-01 00:00', - periods=3, tz=timezone, freq='H') - expected = DatetimeIndex(['2010-11-01 05:00', '2010-11-01 06:00', - '2010-11-01 07:00'], freq='H', tz=timezone) - - # FIXME: these raise ValueError with transpose=True - dates = tm.box_expected(dates, box_with_array, transpose=False) - expected = tm.box_expected(expected, box_with_array, transpose=False) - # TODO: parametrize over the scalar being added? radd? sub? - offset = dates + pd.offsets.Hour(5) - tm.assert_equal(offset, expected) - offset = dates + np.timedelta64(5, 'h') - tm.assert_equal(offset, expected) - offset = dates + timedelta(hours=5) - tm.assert_equal(offset, expected) - - -@pytest.mark.parametrize('klass', [Series, DatetimeIndex]) -def test_dt64_with_offset_array(klass): +def test_dt64_with_offset_array(box_with_array): # GH#10699 # array of offsets - box = Series if klass is Series else pd.Index - - s = klass([Timestamp('2000-1-1'), Timestamp('2000-2-1')]) + s = DatetimeIndex([Timestamp('2000-1-1'), Timestamp('2000-2-1')]) + s = tm.box_expected(s, box_with_array) - with tm.assert_produces_warning(PerformanceWarning, + warn = PerformanceWarning if box_with_array is not pd.DataFrame else None + with tm.assert_produces_warning(warn, clear=[pd.core.arrays.datetimelike]): - result = s + box([pd.offsets.DateOffset(years=1), + other = pd.Index([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()]) - exp = klass([Timestamp('2001-1-1'), Timestamp('2000-2-29')]) + other = tm.box_expected(other, box_with_array) + result = s + other + exp = DatetimeIndex([Timestamp('2001-1-1'), Timestamp('2000-2-29')]) + exp = tm.box_expected(exp, box_with_array) tm.assert_equal(result, exp) # same offset - result = s + box([pd.offsets.DateOffset(years=1), + other = pd.Index([pd.offsets.DateOffset(years=1), pd.offsets.DateOffset(years=1)]) - exp = klass([Timestamp('2001-1-1'), Timestamp('2001-2-1')]) + other = tm.box_expected(other, box_with_array) + result = s + other + exp = DatetimeIndex([Timestamp('2001-1-1'), Timestamp('2001-2-1')]) + exp = tm.box_expected(exp, box_with_array) tm.assert_equal(result, exp) -@pytest.mark.parametrize('klass', [Series, DatetimeIndex]) -def test_dt64_with_DateOffsets_relativedelta(klass): +def test_dt64_with_DateOffsets_relativedelta(box_with_array): # GH#10699 - vec = klass([Timestamp('2000-01-05 00:15:00'), - Timestamp('2000-01-31 00:23:00'), - Timestamp('2000-01-01'), - Timestamp('2000-03-31'), - Timestamp('2000-02-29'), - Timestamp('2000-12-31'), - Timestamp('2000-05-15'), - Timestamp('2001-06-15')]) + if box_with_array is tm.to_array: + pytest.xfail("apply_index implementations are Index-specific") + + vec = DatetimeIndex([Timestamp('2000-01-05 00:15:00'), + Timestamp('2000-01-31 00:23:00'), + Timestamp('2000-01-01'), + Timestamp('2000-03-31'), + Timestamp('2000-02-29'), + Timestamp('2000-12-31'), + Timestamp('2000-05-15'), + Timestamp('2001-06-15')]) + vec = tm.box_expected(vec, box_with_array) + vec_items = vec.squeeze() if box_with_array is pd.DataFrame else vec # DateOffset relativedelta fastpath relative_kwargs = [('years', 2), ('months', 5), ('days', 3), ('hours', 5), ('minutes', 10), ('seconds', 2), ('microseconds', 5)] for i, kwd in enumerate(relative_kwargs): - op = pd.DateOffset(**dict([kwd])) - tm.assert_equal(klass([x + op for x in vec]), vec + op) - tm.assert_equal(klass([x - op for x in vec]), vec - op) - op = pd.DateOffset(**dict(relative_kwargs[:i + 1])) - tm.assert_equal(klass([x + op for x in vec]), vec + op) - tm.assert_equal(klass([x - op for x in vec]), vec - op) + off = pd.DateOffset(**dict([kwd])) + + expected = DatetimeIndex([x + off for x in vec_items]) + expected = tm.box_expected(expected, box_with_array) + tm.assert_equal(expected, vec + off) + + expected = DatetimeIndex([x - off for x in vec_items]) + expected = tm.box_expected(expected, box_with_array) + tm.assert_equal(expected, vec - off) + + off = pd.DateOffset(**dict(relative_kwargs[:i + 1])) + + expected = DatetimeIndex([x + off for x in vec_items]) + expected = tm.box_expected(expected, box_with_array) + tm.assert_equal(expected, vec + off) + + expected = DatetimeIndex([x - off for x in vec_items]) + expected = tm.box_expected(expected, box_with_array) + tm.assert_equal(expected, vec - off) + + with pytest.raises(TypeError): + off - vec @pytest.mark.parametrize('cls_and_kwargs', [ @@ -1974,18 +2018,22 @@ def test_dt64_with_DateOffsets_relativedelta(klass): 'Easter', ('DateOffset', {'day': 4}), ('DateOffset', {'month': 5})]) @pytest.mark.parametrize('normalize', [True, False]) -@pytest.mark.parametrize('klass', [Series, DatetimeIndex]) -def test_dt64_with_DateOffsets(klass, normalize, cls_and_kwargs): +def test_dt64_with_DateOffsets(box_with_array, normalize, cls_and_kwargs): # GH#10699 # assert these are equal on a piecewise basis - vec = klass([Timestamp('2000-01-05 00:15:00'), - Timestamp('2000-01-31 00:23:00'), - Timestamp('2000-01-01'), - Timestamp('2000-03-31'), - Timestamp('2000-02-29'), - Timestamp('2000-12-31'), - Timestamp('2000-05-15'), - Timestamp('2001-06-15')]) + if box_with_array is tm.to_array: + pytest.xfail("apply_index implementations are Index-specific") + + vec = DatetimeIndex([Timestamp('2000-01-05 00:15:00'), + Timestamp('2000-01-31 00:23:00'), + Timestamp('2000-01-01'), + Timestamp('2000-03-31'), + Timestamp('2000-02-29'), + Timestamp('2000-12-31'), + Timestamp('2000-05-15'), + Timestamp('2001-06-15')]) + vec = tm.box_expected(vec, box_with_array) + vec_items = vec.squeeze() if box_with_array is pd.DataFrame else vec if isinstance(cls_and_kwargs, tuple): # If cls_name param is a tuple, then 2nd entry is kwargs for @@ -2009,40 +2057,62 @@ def test_dt64_with_DateOffsets(klass, normalize, cls_and_kwargs): continue offset = offset_cls(n, normalize=normalize, **kwargs) - tm.assert_equal(klass([x + offset for x in vec]), vec + offset) - tm.assert_equal(klass([x - offset for x in vec]), vec - offset) - tm.assert_equal(klass([offset + x for x in vec]), offset + vec) + expected = DatetimeIndex([x + offset for x in vec_items]) + expected = tm.box_expected(expected, box_with_array) + tm.assert_equal(expected, vec + offset) + + expected = DatetimeIndex([x - offset for x in vec_items]) + expected = tm.box_expected(expected, box_with_array) + tm.assert_equal(expected, vec - offset) -@pytest.mark.parametrize('klass', [Series, DatetimeIndex]) -def test_datetime64_with_DateOffset(klass): + expected = DatetimeIndex([offset + x for x in vec_items]) + expected = tm.box_expected(expected, box_with_array) + tm.assert_equal(expected, offset + vec) + + with pytest.raises(TypeError): + offset - vec + + +def test_datetime64_with_DateOffset(box_with_array): # GH#10699 - s = klass(date_range('2000-01-01', '2000-01-31'), name='a') + if box_with_array is tm.to_array: + pytest.xfail("DateOffset.apply_index uses _shallow_copy") + + s = date_range('2000-01-01', '2000-01-31', name='a') + s = tm.box_expected(s, box_with_array) result = s + pd.DateOffset(years=1) result2 = pd.DateOffset(years=1) + s - exp = klass(date_range('2001-01-01', '2001-01-31'), name='a') + exp = date_range('2001-01-01', '2001-01-31', name='a') + exp = tm.box_expected(exp, box_with_array) tm.assert_equal(result, exp) tm.assert_equal(result2, exp) result = s - pd.DateOffset(years=1) - exp = klass(date_range('1999-01-01', '1999-01-31'), name='a') + exp = date_range('1999-01-01', '1999-01-31', name='a') + exp = tm.box_expected(exp, box_with_array) tm.assert_equal(result, exp) - s = klass([Timestamp('2000-01-15 00:15:00', tz='US/Central'), - pd.Timestamp('2000-02-15', tz='US/Central')], name='a') + s = DatetimeIndex([Timestamp('2000-01-15 00:15:00', tz='US/Central'), + Timestamp('2000-02-15', tz='US/Central')], name='a') + # FIXME: ValueError with tzaware DataFrame transpose + s = tm.box_expected(s, box_with_array, transpose=False) result = s + pd.offsets.Day() result2 = pd.offsets.Day() + s - exp = klass([Timestamp('2000-01-16 00:15:00', tz='US/Central'), - Timestamp('2000-02-16', tz='US/Central')], name='a') + exp = DatetimeIndex([Timestamp('2000-01-16 00:15:00', tz='US/Central'), + Timestamp('2000-02-16', tz='US/Central')], name='a') + exp = tm.box_expected(exp, box_with_array, transpose=False) tm.assert_equal(result, exp) tm.assert_equal(result2, exp) - s = klass([Timestamp('2000-01-15 00:15:00', tz='US/Central'), - pd.Timestamp('2000-02-15', tz='US/Central')], name='a') + s = DatetimeIndex([Timestamp('2000-01-15 00:15:00', tz='US/Central'), + Timestamp('2000-02-15', tz='US/Central')], name='a') + s = tm.box_expected(s, box_with_array, transpose=False) result = s + pd.offsets.MonthEnd() result2 = pd.offsets.MonthEnd() + s - exp = klass([Timestamp('2000-01-31 00:15:00', tz='US/Central'), - Timestamp('2000-02-29', tz='US/Central')], name='a') + exp = DatetimeIndex([Timestamp('2000-01-31 00:15:00', tz='US/Central'), + Timestamp('2000-02-29', tz='US/Central')], name='a') + exp = tm.box_expected(exp, box_with_array, transpose=False) tm.assert_equal(result, exp) tm.assert_equal(result2, exp) diff --git a/pandas/tests/arithmetic/test_period.py b/pandas/tests/arithmetic/test_period.py index af3aeaaff0479..7158eae376ba6 100644 --- a/pandas/tests/arithmetic/test_period.py +++ b/pandas/tests/arithmetic/test_period.py @@ -30,15 +30,15 @@ def test_pi_cmp_period(self): tm.assert_numpy_array_equal(result, exp) # TODO: moved from test_datetime64; de-duplicate with version below - def test_parr_cmp_period_scalar2(self, box): - xbox = box if box is not pd.Index else np.ndarray + def test_parr_cmp_period_scalar2(self, box_with_array): + xbox = box_with_array if box_with_array is not pd.Index else np.ndarray pi = pd.period_range('2000-01-01', periods=10, freq='D') val = Period('2000-01-04', freq='D') expected = [x > val for x in pi] - ser = tm.box_expected(pi, box) + ser = tm.box_expected(pi, box_with_array) expected = tm.box_expected(expected, xbox) result = ser > val tm.assert_equal(result, expected) @@ -50,13 +50,13 @@ def test_parr_cmp_period_scalar2(self, box): tm.assert_equal(result, expected) @pytest.mark.parametrize('freq', ['M', '2M', '3M']) - def test_parr_cmp_period_scalar(self, freq, box): + def test_parr_cmp_period_scalar(self, freq, box_with_array): # GH#13200 - xbox = np.ndarray if box is pd.Index else box + xbox = np.ndarray if box_with_array is pd.Index else box_with_array base = PeriodIndex(['2011-01', '2011-02', '2011-03', '2011-04'], freq=freq) - base = tm.box_expected(base, box) + base = tm.box_expected(base, box_with_array) per = Period('2011-02', freq=freq) exp = np.array([False, True, False, False]) @@ -90,13 +90,13 @@ def test_parr_cmp_period_scalar(self, freq, box): tm.assert_equal(per >= base, exp) @pytest.mark.parametrize('freq', ['M', '2M', '3M']) - def test_parr_cmp_pi(self, freq, box): + def test_parr_cmp_pi(self, freq, box_with_array): # GH#13200 - xbox = np.ndarray if box is pd.Index else box + xbox = np.ndarray if box_with_array is pd.Index else box_with_array base = PeriodIndex(['2011-01', '2011-02', '2011-03', '2011-04'], freq=freq) - base = tm.box_expected(base, box) + base = tm.box_expected(base, box_with_array) # TODO: could also box idx? idx = PeriodIndex(['2011-02', '2011-01', '2011-03', '2011-05'], @@ -127,12 +127,12 @@ def test_parr_cmp_pi(self, freq, box): tm.assert_equal(base <= idx, exp) @pytest.mark.parametrize('freq', ['M', '2M', '3M']) - def test_parr_cmp_pi_mismatched_freq_raises(self, freq, box): + def test_parr_cmp_pi_mismatched_freq_raises(self, freq, box_with_array): # GH#13200 # different base freq base = PeriodIndex(['2011-01', '2011-02', '2011-03', '2011-04'], freq=freq) - base = tm.box_expected(base, box) + base = tm.box_expected(base, box_with_array) msg = "Input has different freq=A-DEC from " with pytest.raises(IncompatibleFrequency, match=msg): @@ -598,14 +598,12 @@ def test_pi_add_offset_n_gt1(self, box_transpose_fail): def test_pi_add_offset_n_gt1_not_divisible(self, box_with_array): # GH#23215 # PeriodIndex with freq.n > 1 add offset with offset.n % freq.n != 0 - box = box_with_array - pi = pd.PeriodIndex(['2016-01'], freq='2M') expected = pd.PeriodIndex(['2016-04'], freq='2M') # FIXME: with transposing these tests fail - pi = tm.box_expected(pi, box, transpose=False) - expected = tm.box_expected(expected, box, transpose=False) + pi = tm.box_expected(pi, box_with_array, transpose=False) + expected = tm.box_expected(expected, box_with_array, transpose=False) result = pi + to_offset('3M') tm.assert_equal(result, expected) @@ -616,23 +614,23 @@ def test_pi_add_offset_n_gt1_not_divisible(self, box_with_array): # --------------------------------------------------------------- # __add__/__sub__ with integer arrays - @pytest.mark.parametrize('box', [np.array, pd.Index]) + @pytest.mark.parametrize('int_holder', [np.array, pd.Index]) @pytest.mark.parametrize('op', [operator.add, ops.radd]) - def test_pi_add_intarray(self, box, op): + def test_pi_add_intarray(self, int_holder, op): # GH#19959 pi = pd.PeriodIndex([pd.Period('2015Q1'), pd.Period('NaT')]) - other = box([4, -1]) + other = int_holder([4, -1]) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False, clear=[pd.core.arrays.datetimelike]): result = op(pi, other) expected = pd.PeriodIndex([pd.Period('2016Q1'), pd.Period('NaT')]) tm.assert_index_equal(result, expected) - @pytest.mark.parametrize('box', [np.array, pd.Index]) - def test_pi_sub_intarray(self, box): + @pytest.mark.parametrize('int_holder', [np.array, pd.Index]) + def test_pi_sub_intarray(self, int_holder): # GH#19959 pi = pd.PeriodIndex([pd.Period('2015Q1'), pd.Period('NaT')]) - other = box([4, -1]) + other = int_holder([4, -1]) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False, clear=[pd.core.arrays.datetimelike]): result = pi - other diff --git a/pandas/tests/arithmetic/test_timedelta64.py b/pandas/tests/arithmetic/test_timedelta64.py index 4d12a86e27e64..07c48554c65b8 100644 --- a/pandas/tests/arithmetic/test_timedelta64.py +++ b/pandas/tests/arithmetic/test_timedelta64.py @@ -354,10 +354,10 @@ def test_timedelta64_ops_nat(self): # ------------------------------------------------------------- # Invalid Operations - def test_td64arr_add_str_invalid(self, box): + def test_td64arr_add_str_invalid(self, box_with_array): # GH#13624 tdi = TimedeltaIndex(['1 day', '2 days']) - tdi = tm.box_expected(tdi, box) + tdi = tm.box_expected(tdi, box_with_array) with pytest.raises(TypeError): tdi + 'a' @@ -368,20 +368,20 @@ def test_td64arr_add_str_invalid(self, box): @pytest.mark.parametrize('op', [operator.add, ops.radd, operator.sub, ops.rsub], ids=lambda x: x.__name__) - def test_td64arr_add_sub_float(self, box, op, other): + def test_td64arr_add_sub_float(self, box_with_array, op, other): tdi = TimedeltaIndex(['-1 days', '-1 days']) - tdi = tm.box_expected(tdi, box) + tdi = tm.box_expected(tdi, box_with_array) with pytest.raises(TypeError): op(tdi, other) @pytest.mark.parametrize('freq', [None, 'H']) - def test_td64arr_sub_period(self, box, freq): + def test_td64arr_sub_period(self, box_with_array, freq): # GH#13078 # not supported, check TypeError p = pd.Period('2011-01-01', freq='D') idx = TimedeltaIndex(['1 hours', '2 hours'], freq=freq) - idx = tm.box_expected(idx, box) + idx = tm.box_expected(idx, box_with_array) with pytest.raises(TypeError): idx - p @@ -391,23 +391,23 @@ def test_td64arr_sub_period(self, box, freq): @pytest.mark.parametrize('pi_freq', ['D', 'W', 'Q', 'H']) @pytest.mark.parametrize('tdi_freq', [None, 'H']) - def test_td64arr_sub_pi(self, box, tdi_freq, pi_freq): + def test_td64arr_sub_pi(self, box_with_array, tdi_freq, pi_freq): # GH#20049 subtracting PeriodIndex should raise TypeError tdi = TimedeltaIndex(['1 hours', '2 hours'], freq=tdi_freq) dti = Timestamp('2018-03-07 17:16:40') + tdi pi = dti.to_period(pi_freq) # TODO: parametrize over box for pi? - tdi = tm.box_expected(tdi, box) + tdi = tm.box_expected(tdi, box_with_array) with pytest.raises(TypeError): tdi - pi # ------------------------------------------------------------- # Binary operations td64 arraylike and datetime-like - def test_td64arr_sub_timestamp_raises(self, box): + def test_td64arr_sub_timestamp_raises(self, box_with_array): idx = TimedeltaIndex(['1 day', '2 day']) - idx = tm.box_expected(idx, box) + idx = tm.box_expected(idx, box_with_array) msg = ("cannot subtract a datelike from|" "Could not operate|" @@ -415,7 +415,7 @@ def test_td64arr_sub_timestamp_raises(self, box): with pytest.raises(TypeError, match=msg): idx - Timestamp('2011-01-01') - def test_td64arr_add_timestamp(self, box, tz_naive_fixture): + def test_td64arr_add_timestamp(self, box_with_array, tz_naive_fixture): # GH#23215 # TODO: parametrize over scalar datetime types? tz = tz_naive_fixture @@ -426,8 +426,8 @@ def test_td64arr_add_timestamp(self, box, tz_naive_fixture): # FIXME: fails with transpose=True because of tz-aware DataFrame # transpose bug - idx = tm.box_expected(idx, box, transpose=False) - expected = tm.box_expected(expected, box, transpose=False) + idx = tm.box_expected(idx, box_with_array, transpose=False) + expected = tm.box_expected(expected, box_with_array, transpose=False) result = idx + other tm.assert_equal(result, expected) @@ -435,23 +435,27 @@ def test_td64arr_add_timestamp(self, box, tz_naive_fixture): result = other + idx tm.assert_equal(result, expected) - def test_td64arr_add_sub_timestamp(self, box): + def test_td64arr_add_sub_timestamp(self, box_with_array): # GH#11925 + if box_with_array is tm.to_array: + pytest.xfail("DatetimeArray.__sub__ returns ndarray instead " + "of TimedeltaArray") + ts = Timestamp('2012-01-01') # TODO: parametrize over types of datetime scalar? tdser = Series(timedelta_range('1 day', periods=3)) expected = Series(pd.date_range('2012-01-02', periods=3)) - tdser = tm.box_expected(tdser, box) - expected = tm.box_expected(expected, box) + tdser = tm.box_expected(tdser, box_with_array) + expected = tm.box_expected(expected, box_with_array) tm.assert_equal(ts + tdser, expected) tm.assert_equal(tdser + ts, expected) expected2 = Series(pd.date_range('2011-12-31', periods=3, freq='-1D')) - expected2 = tm.box_expected(expected2, box) + expected2 = tm.box_expected(expected2, box_with_array) tm.assert_equal(ts - tdser, expected2) tm.assert_equal(ts + (-tdser), expected2) @@ -505,7 +509,8 @@ def test_td64arr_add_datetime64_nat(self, box): # ------------------------------------------------------------------ # Operations with int-like others - def test_td64arr_add_int_series_invalid(self, box, tdser): + def test_td64arr_add_int_series_invalid(self, box): + tdser = pd.Series(['59 Days', '59 Days', 'NaT'], dtype='m8[ns]') tdser = tm.box_expected(tdser, box) err = TypeError if box is not pd.Index else NullFrequencyError int_ser = Series([2, 3, 4]) @@ -549,7 +554,8 @@ def test_td64arr_add_intlike(self, box): ser - pd.Index(other) @pytest.mark.parametrize('scalar', [1, 1.5, np.array(2)]) - def test_td64arr_add_sub_numeric_scalar_invalid(self, box, scalar, tdser): + def test_td64arr_add_sub_numeric_scalar_invalid(self, box, scalar): + tdser = pd.Series(['59 Days', '59 Days', 'NaT'], dtype='m8[ns]') tdser = tm.box_expected(tdser, box) err = TypeError if box is pd.Index and not isinstance(scalar, float): @@ -573,7 +579,8 @@ def test_td64arr_add_sub_numeric_scalar_invalid(self, box, scalar, tdser): Series([1, 2, 3]) # TODO: Add DataFrame in here? ], ids=lambda x: type(x).__name__) - def test_td64arr_add_sub_numeric_arr_invalid(self, box, vec, dtype, tdser): + def test_td64arr_add_sub_numeric_arr_invalid(self, box, vec, dtype): + tdser = pd.Series(['59 Days', '59 Days', 'NaT'], dtype='m8[ns]') tdser = tm.box_expected(tdser, box) err = TypeError if box is pd.Index and not dtype.startswith('float'): @@ -893,7 +900,7 @@ def test_td64arr_sub_offset_index(self, names, box): res = tdi - other tm.assert_equal(res, expected) - def test_td64arr_sub_offset_array(self, box): + def test_td64arr_sub_offset_array(self, box_with_array): # GH#18824 tdi = TimedeltaIndex(['1 days 00:00:00', '3 days 04:00:00']) other = np.array([pd.offsets.Hour(n=1), pd.offsets.Minute(n=-2)]) @@ -901,12 +908,12 @@ def test_td64arr_sub_offset_array(self, box): expected = TimedeltaIndex([tdi[n] - other[n] for n in range(len(tdi))], freq='infer') - tdi = tm.box_expected(tdi, box) - expected = tm.box_expected(expected, box) + tdi = tm.box_expected(tdi, box_with_array) + expected = tm.box_expected(expected, box_with_array) # The DataFrame operation is transposed and so operates as separate # scalar operations, which do not issue a PerformanceWarning - warn = PerformanceWarning if box is not pd.DataFrame else None + warn = None if box_with_array is pd.DataFrame else PerformanceWarning with tm.assert_produces_warning(warn): res = tdi - other tm.assert_equal(res, expected) @@ -917,7 +924,7 @@ def test_td64arr_sub_offset_array(self, box): def test_td64arr_with_offset_series(self, names, box_df_fail): # GH#18849 box = box_df_fail - box2 = Series if box is pd.Index else box + box2 = Series if box in [pd.Index, tm.to_array] else box tdi = TimedeltaIndex(['1 days 00:00:00', '3 days 04:00:00'], name=names[0]) @@ -947,10 +954,11 @@ def test_td64arr_with_offset_series(self, names, box_df_fail): tm.assert_equal(res3, expected_sub) @pytest.mark.parametrize('obox', [np.array, pd.Index, pd.Series]) - def test_td64arr_addsub_anchored_offset_arraylike(self, obox, box): + def test_td64arr_addsub_anchored_offset_arraylike(self, obox, + box_with_array): # GH#18824 tdi = TimedeltaIndex(['1 days 00:00:00', '3 days 04:00:00']) - tdi = tm.box_expected(tdi, box) + tdi = tm.box_expected(tdi, box_with_array) anchored = obox([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)]) @@ -1009,9 +1017,9 @@ def test_td64arr_mul_int(self, box): result = 1 * idx tm.assert_equal(result, idx) - def test_td64arr_mul_tdlike_scalar_raises(self, two_hours, box): + def test_td64arr_mul_tdlike_scalar_raises(self, two_hours, box_with_array): rng = timedelta_range('1 days', '10 days', name='foo') - rng = tm.box_expected(rng, box) + rng = tm.box_expected(rng, box_with_array) with pytest.raises(TypeError): rng * two_hours @@ -1084,10 +1092,10 @@ def test_tdi_rmul_arraylike(self, other, box): # ------------------------------------------------------------------ # __div__ - def test_td64arr_div_nat_invalid(self, box): + def test_td64arr_div_nat_invalid(self, box_with_array): # don't allow division by NaT (maybe could in the future) rng = timedelta_range('1 days', '10 days', name='foo') - rng = tm.box_expected(rng, box) + rng = tm.box_expected(rng, box_with_array) with pytest.raises(TypeError): rng / pd.NaT @@ -1205,11 +1213,11 @@ def test_td64arr_rfloordiv_tdlike_scalar(self, scalar_td, box): # ------------------------------------------------------------------ # Operations with invalid others - def test_td64arr_mul_tdscalar_invalid(self, box, scalar_td): + def test_td64arr_mul_tdscalar_invalid(self, box_with_array, scalar_td): td1 = Series([timedelta(minutes=5, seconds=3)] * 3) td1.iloc[2] = np.nan - td1 = tm.box_expected(td1, box) + td1 = tm.box_expected(td1, box_with_array) # check that we are getting a TypeError # with 'operate' (from core/ops.py) for the ops that are not @@ -1220,17 +1228,17 @@ def test_td64arr_mul_tdscalar_invalid(self, box, scalar_td): with pytest.raises(TypeError, match=pattern): scalar_td * td1 - def test_td64arr_mul_too_short_raises(self, box): + def test_td64arr_mul_too_short_raises(self, box_with_array): idx = TimedeltaIndex(np.arange(5, dtype='int64')) - idx = tm.box_expected(idx, box) + idx = tm.box_expected(idx, box_with_array) with pytest.raises(TypeError): idx * idx[:3] with pytest.raises(ValueError): idx * np.array([1, 2]) - def test_td64arr_mul_td64arr_raises(self, box): + def test_td64arr_mul_td64arr_raises(self, box_with_array): idx = TimedeltaIndex(np.arange(5, dtype='int64')) - idx = tm.box_expected(idx, box) + idx = tm.box_expected(idx, box_with_array) with pytest.raises(TypeError): idx * idx @@ -1238,9 +1246,10 @@ def test_td64arr_mul_td64arr_raises(self, box): # Operations with numeric others @pytest.mark.parametrize('one', [1, np.array(1), 1.0, np.array(1.0)]) - def test_td64arr_mul_numeric_scalar(self, box, one, tdser): + def test_td64arr_mul_numeric_scalar(self, box, one): # GH#4521 # divide/multiply by integers + tdser = pd.Series(['59 Days', '59 Days', 'NaT'], dtype='m8[ns]') expected = Series(['-59 Days', '-59 Days', 'NaT'], dtype='timedelta64[ns]') @@ -1262,9 +1271,10 @@ def test_td64arr_mul_numeric_scalar(self, box, one, tdser): tm.assert_equal(result, expected) @pytest.mark.parametrize('two', [2, 2.0, np.array(2), np.array(2.0)]) - def test_td64arr_div_numeric_scalar(self, box, two, tdser): + def test_td64arr_div_numeric_scalar(self, box, two): # GH#4521 # divide/multiply by integers + tdser = pd.Series(['59 Days', '59 Days', 'NaT'], dtype='m8[ns]') expected = Series(['29.5D', '29.5D', 'NaT'], dtype='timedelta64[ns]') tdser = tm.box_expected(tdser, box) @@ -1281,10 +1291,10 @@ def test_td64arr_div_numeric_scalar(self, box, two, tdser): Series([20, 30, 40])], ids=lambda x: type(x).__name__) @pytest.mark.parametrize('op', [operator.mul, ops.rmul]) - def test_td64arr_rmul_numeric_array(self, op, box, - vector, dtype, tdser): + def test_td64arr_rmul_numeric_array(self, op, box, vector, dtype): # GH#4521 # divide/multiply by integers + tdser = pd.Series(['59 Days', '59 Days', 'NaT'], dtype='m8[ns]') vector = vector.astype(dtype) expected = Series(['1180 Days', '1770 Days', 'NaT'], @@ -1305,9 +1315,10 @@ def test_td64arr_rmul_numeric_array(self, op, box, pd.Index([20, 30, 40]), Series([20, 30, 40])], ids=lambda x: type(x).__name__) - def test_td64arr_div_numeric_array(self, box, vector, dtype, tdser): + def test_td64arr_div_numeric_array(self, box, vector, dtype): # GH#4521 # divide/multiply by integers + tdser = pd.Series(['59 Days', '59 Days', 'NaT'], dtype='m8[ns]') vector = vector.astype(dtype) expected = Series(['2.95D', '1D 23H 12m', 'NaT'], dtype='timedelta64[ns]') @@ -1378,11 +1389,11 @@ def test_float_series_rdiv_td64arr(self, box, names): class TestTimedeltaArraylikeInvalidArithmeticOps(object): - def test_td64arr_pow_invalid(self, scalar_td, box): + def test_td64arr_pow_invalid(self, scalar_td, box_with_array): td1 = Series([timedelta(minutes=5, seconds=3)] * 3) td1.iloc[2] = np.nan - td1 = tm.box_expected(td1, box) + td1 = tm.box_expected(td1, box_with_array) # check that we are getting a TypeError # with 'operate' (from core/ops.py) for the ops that are not diff --git a/pandas/tseries/frequencies.py b/pandas/tseries/frequencies.py index d6e4824575468..ac9a87b258056 100644 --- a/pandas/tseries/frequencies.py +++ b/pandas/tseries/frequencies.py @@ -292,7 +292,7 @@ class _FrequencyInferer(object): def __init__(self, index, warn=True): self.index = index - self.values = np.asarray(index).view('i8') + self.values = index.asi8 # This moves the values, which are implicitly in UTC, to the # the timezone so they are in local time