Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_arithmetic.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_asfreq.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_period.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/test_arithmetic.py +486 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/test_asfreq.py +828 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/test_period.py +1154 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_arithmetic.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_constructors.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_formats.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_timedelta.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__pycache__/test_as_unit.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__pycache__/test_round.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/test_as_unit.py +80 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/test_round.py +187 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/test_arithmetic.py +1183 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/test_formats.py +109 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/__pycache__/test_arithmetic.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_as_unit.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_replace.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_timestamp_method.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_to_julian_date.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_to_pydatetime.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_tz_convert.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_tz_localize.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/test_timestamp_method.py +31 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_freq_code.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_frequencies.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_inference.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_freq_code.py +69 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_frequencies.py +29 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_inference.py +558 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_calendar.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_federal.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_holiday.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_observance.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_calendar.py +119 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_federal.py +58 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_holiday.py +332 -0
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (179 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_arithmetic.cpython-310.pyc
ADDED
|
Binary file (13.1 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_asfreq.cpython-310.pyc
ADDED
|
Binary file (22.1 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_period.cpython-310.pyc
ADDED
|
Binary file (33.7 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/test_arithmetic.py
ADDED
|
@@ -0,0 +1,486 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import timedelta
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
from pandas._libs.tslibs.period import IncompatibleFrequency
|
| 7 |
+
|
| 8 |
+
from pandas import (
|
| 9 |
+
NaT,
|
| 10 |
+
Period,
|
| 11 |
+
Timedelta,
|
| 12 |
+
Timestamp,
|
| 13 |
+
offsets,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class TestPeriodArithmetic:
|
| 18 |
+
def test_add_overflow_raises(self):
|
| 19 |
+
# GH#55503
|
| 20 |
+
per = Timestamp.max.to_period("ns")
|
| 21 |
+
|
| 22 |
+
msg = "|".join(
|
| 23 |
+
[
|
| 24 |
+
"Python int too large to convert to C long",
|
| 25 |
+
# windows, 32bit linux builds
|
| 26 |
+
"int too big to convert",
|
| 27 |
+
]
|
| 28 |
+
)
|
| 29 |
+
with pytest.raises(OverflowError, match=msg):
|
| 30 |
+
per + 1
|
| 31 |
+
|
| 32 |
+
msg = "value too large"
|
| 33 |
+
with pytest.raises(OverflowError, match=msg):
|
| 34 |
+
per + Timedelta(1)
|
| 35 |
+
with pytest.raises(OverflowError, match=msg):
|
| 36 |
+
per + offsets.Nano(1)
|
| 37 |
+
|
| 38 |
+
def test_period_add_integer(self):
|
| 39 |
+
per1 = Period(freq="D", year=2008, month=1, day=1)
|
| 40 |
+
per2 = Period(freq="D", year=2008, month=1, day=2)
|
| 41 |
+
assert per1 + 1 == per2
|
| 42 |
+
assert 1 + per1 == per2
|
| 43 |
+
|
| 44 |
+
def test_period_add_invalid(self):
|
| 45 |
+
# GH#4731
|
| 46 |
+
per1 = Period(freq="D", year=2008, month=1, day=1)
|
| 47 |
+
per2 = Period(freq="D", year=2008, month=1, day=2)
|
| 48 |
+
|
| 49 |
+
msg = "|".join(
|
| 50 |
+
[
|
| 51 |
+
r"unsupported operand type\(s\)",
|
| 52 |
+
"can only concatenate str",
|
| 53 |
+
"must be str, not Period",
|
| 54 |
+
]
|
| 55 |
+
)
|
| 56 |
+
with pytest.raises(TypeError, match=msg):
|
| 57 |
+
per1 + "str"
|
| 58 |
+
with pytest.raises(TypeError, match=msg):
|
| 59 |
+
"str" + per1
|
| 60 |
+
with pytest.raises(TypeError, match=msg):
|
| 61 |
+
per1 + per2
|
| 62 |
+
|
| 63 |
+
def test_period_sub_period_annual(self):
|
| 64 |
+
left, right = Period("2011", freq="Y"), Period("2007", freq="Y")
|
| 65 |
+
result = left - right
|
| 66 |
+
assert result == 4 * right.freq
|
| 67 |
+
|
| 68 |
+
msg = r"Input has different freq=M from Period\(freq=Y-DEC\)"
|
| 69 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 70 |
+
left - Period("2007-01", freq="M")
|
| 71 |
+
|
| 72 |
+
def test_period_sub_period(self):
|
| 73 |
+
per1 = Period("2011-01-01", freq="D")
|
| 74 |
+
per2 = Period("2011-01-15", freq="D")
|
| 75 |
+
|
| 76 |
+
off = per1.freq
|
| 77 |
+
assert per1 - per2 == -14 * off
|
| 78 |
+
assert per2 - per1 == 14 * off
|
| 79 |
+
|
| 80 |
+
msg = r"Input has different freq=M from Period\(freq=D\)"
|
| 81 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 82 |
+
per1 - Period("2011-02", freq="M")
|
| 83 |
+
|
| 84 |
+
@pytest.mark.parametrize("n", [1, 2, 3, 4])
|
| 85 |
+
def test_sub_n_gt_1_ticks(self, tick_classes, n):
|
| 86 |
+
# GH#23878
|
| 87 |
+
p1 = Period("19910905", freq=tick_classes(n))
|
| 88 |
+
p2 = Period("19920406", freq=tick_classes(n))
|
| 89 |
+
|
| 90 |
+
expected = Period(str(p2), freq=p2.freq.base) - Period(
|
| 91 |
+
str(p1), freq=p1.freq.base
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
assert (p2 - p1) == expected
|
| 95 |
+
|
| 96 |
+
@pytest.mark.parametrize("normalize", [True, False])
|
| 97 |
+
@pytest.mark.parametrize("n", [1, 2, 3, 4])
|
| 98 |
+
@pytest.mark.parametrize(
|
| 99 |
+
"offset, kwd_name",
|
| 100 |
+
[
|
| 101 |
+
(offsets.YearEnd, "month"),
|
| 102 |
+
(offsets.QuarterEnd, "startingMonth"),
|
| 103 |
+
(offsets.MonthEnd, None),
|
| 104 |
+
(offsets.Week, "weekday"),
|
| 105 |
+
],
|
| 106 |
+
)
|
| 107 |
+
def test_sub_n_gt_1_offsets(self, offset, kwd_name, n, normalize):
|
| 108 |
+
# GH#23878
|
| 109 |
+
kwds = {kwd_name: 3} if kwd_name is not None else {}
|
| 110 |
+
p1_d = "19910905"
|
| 111 |
+
p2_d = "19920406"
|
| 112 |
+
p1 = Period(p1_d, freq=offset(n, normalize, **kwds))
|
| 113 |
+
p2 = Period(p2_d, freq=offset(n, normalize, **kwds))
|
| 114 |
+
|
| 115 |
+
expected = Period(p2_d, freq=p2.freq.base) - Period(p1_d, freq=p1.freq.base)
|
| 116 |
+
|
| 117 |
+
assert (p2 - p1) == expected
|
| 118 |
+
|
| 119 |
+
def test_period_add_offset(self):
|
| 120 |
+
# freq is DateOffset
|
| 121 |
+
for freq in ["Y", "2Y", "3Y"]:
|
| 122 |
+
per = Period("2011", freq=freq)
|
| 123 |
+
exp = Period("2013", freq=freq)
|
| 124 |
+
assert per + offsets.YearEnd(2) == exp
|
| 125 |
+
assert offsets.YearEnd(2) + per == exp
|
| 126 |
+
|
| 127 |
+
for off in [
|
| 128 |
+
offsets.YearBegin(2),
|
| 129 |
+
offsets.MonthBegin(1),
|
| 130 |
+
offsets.Minute(),
|
| 131 |
+
np.timedelta64(365, "D"),
|
| 132 |
+
timedelta(365),
|
| 133 |
+
]:
|
| 134 |
+
msg = "Input has different freq|Input cannot be converted to Period"
|
| 135 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 136 |
+
per + off
|
| 137 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 138 |
+
off + per
|
| 139 |
+
|
| 140 |
+
for freq in ["M", "2M", "3M"]:
|
| 141 |
+
per = Period("2011-03", freq=freq)
|
| 142 |
+
exp = Period("2011-05", freq=freq)
|
| 143 |
+
assert per + offsets.MonthEnd(2) == exp
|
| 144 |
+
assert offsets.MonthEnd(2) + per == exp
|
| 145 |
+
|
| 146 |
+
exp = Period("2012-03", freq=freq)
|
| 147 |
+
assert per + offsets.MonthEnd(12) == exp
|
| 148 |
+
assert offsets.MonthEnd(12) + per == exp
|
| 149 |
+
|
| 150 |
+
msg = "|".join(
|
| 151 |
+
[
|
| 152 |
+
"Input has different freq",
|
| 153 |
+
"Input cannot be converted to Period",
|
| 154 |
+
]
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
for off in [
|
| 158 |
+
offsets.YearBegin(2),
|
| 159 |
+
offsets.MonthBegin(1),
|
| 160 |
+
offsets.Minute(),
|
| 161 |
+
np.timedelta64(365, "D"),
|
| 162 |
+
timedelta(365),
|
| 163 |
+
]:
|
| 164 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 165 |
+
per + off
|
| 166 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 167 |
+
off + per
|
| 168 |
+
|
| 169 |
+
# freq is Tick
|
| 170 |
+
for freq in ["D", "2D", "3D"]:
|
| 171 |
+
per = Period("2011-04-01", freq=freq)
|
| 172 |
+
|
| 173 |
+
exp = Period("2011-04-06", freq=freq)
|
| 174 |
+
assert per + offsets.Day(5) == exp
|
| 175 |
+
assert offsets.Day(5) + per == exp
|
| 176 |
+
|
| 177 |
+
exp = Period("2011-04-02", freq=freq)
|
| 178 |
+
assert per + offsets.Hour(24) == exp
|
| 179 |
+
assert offsets.Hour(24) + per == exp
|
| 180 |
+
|
| 181 |
+
exp = Period("2011-04-03", freq=freq)
|
| 182 |
+
assert per + np.timedelta64(2, "D") == exp
|
| 183 |
+
assert np.timedelta64(2, "D") + per == exp
|
| 184 |
+
|
| 185 |
+
exp = Period("2011-04-02", freq=freq)
|
| 186 |
+
assert per + np.timedelta64(3600 * 24, "s") == exp
|
| 187 |
+
assert np.timedelta64(3600 * 24, "s") + per == exp
|
| 188 |
+
|
| 189 |
+
exp = Period("2011-03-30", freq=freq)
|
| 190 |
+
assert per + timedelta(-2) == exp
|
| 191 |
+
assert timedelta(-2) + per == exp
|
| 192 |
+
|
| 193 |
+
exp = Period("2011-04-03", freq=freq)
|
| 194 |
+
assert per + timedelta(hours=48) == exp
|
| 195 |
+
assert timedelta(hours=48) + per == exp
|
| 196 |
+
|
| 197 |
+
msg = "|".join(
|
| 198 |
+
[
|
| 199 |
+
"Input has different freq",
|
| 200 |
+
"Input cannot be converted to Period",
|
| 201 |
+
]
|
| 202 |
+
)
|
| 203 |
+
|
| 204 |
+
for off in [
|
| 205 |
+
offsets.YearBegin(2),
|
| 206 |
+
offsets.MonthBegin(1),
|
| 207 |
+
offsets.Minute(),
|
| 208 |
+
np.timedelta64(4, "h"),
|
| 209 |
+
timedelta(hours=23),
|
| 210 |
+
]:
|
| 211 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 212 |
+
per + off
|
| 213 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 214 |
+
off + per
|
| 215 |
+
|
| 216 |
+
for freq in ["h", "2h", "3h"]:
|
| 217 |
+
per = Period("2011-04-01 09:00", freq=freq)
|
| 218 |
+
|
| 219 |
+
exp = Period("2011-04-03 09:00", freq=freq)
|
| 220 |
+
assert per + offsets.Day(2) == exp
|
| 221 |
+
assert offsets.Day(2) + per == exp
|
| 222 |
+
|
| 223 |
+
exp = Period("2011-04-01 12:00", freq=freq)
|
| 224 |
+
assert per + offsets.Hour(3) == exp
|
| 225 |
+
assert offsets.Hour(3) + per == exp
|
| 226 |
+
|
| 227 |
+
msg = "cannot use operands with types"
|
| 228 |
+
exp = Period("2011-04-01 12:00", freq=freq)
|
| 229 |
+
assert per + np.timedelta64(3, "h") == exp
|
| 230 |
+
assert np.timedelta64(3, "h") + per == exp
|
| 231 |
+
|
| 232 |
+
exp = Period("2011-04-01 10:00", freq=freq)
|
| 233 |
+
assert per + np.timedelta64(3600, "s") == exp
|
| 234 |
+
assert np.timedelta64(3600, "s") + per == exp
|
| 235 |
+
|
| 236 |
+
exp = Period("2011-04-01 11:00", freq=freq)
|
| 237 |
+
assert per + timedelta(minutes=120) == exp
|
| 238 |
+
assert timedelta(minutes=120) + per == exp
|
| 239 |
+
|
| 240 |
+
exp = Period("2011-04-05 12:00", freq=freq)
|
| 241 |
+
assert per + timedelta(days=4, minutes=180) == exp
|
| 242 |
+
assert timedelta(days=4, minutes=180) + per == exp
|
| 243 |
+
|
| 244 |
+
msg = "|".join(
|
| 245 |
+
[
|
| 246 |
+
"Input has different freq",
|
| 247 |
+
"Input cannot be converted to Period",
|
| 248 |
+
]
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
for off in [
|
| 252 |
+
offsets.YearBegin(2),
|
| 253 |
+
offsets.MonthBegin(1),
|
| 254 |
+
offsets.Minute(),
|
| 255 |
+
np.timedelta64(3200, "s"),
|
| 256 |
+
timedelta(hours=23, minutes=30),
|
| 257 |
+
]:
|
| 258 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 259 |
+
per + off
|
| 260 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 261 |
+
off + per
|
| 262 |
+
|
| 263 |
+
def test_period_sub_offset(self):
|
| 264 |
+
# freq is DateOffset
|
| 265 |
+
msg = "|".join(
|
| 266 |
+
[
|
| 267 |
+
"Input has different freq",
|
| 268 |
+
"Input cannot be converted to Period",
|
| 269 |
+
]
|
| 270 |
+
)
|
| 271 |
+
|
| 272 |
+
for freq in ["Y", "2Y", "3Y"]:
|
| 273 |
+
per = Period("2011", freq=freq)
|
| 274 |
+
assert per - offsets.YearEnd(2) == Period("2009", freq=freq)
|
| 275 |
+
|
| 276 |
+
for off in [
|
| 277 |
+
offsets.YearBegin(2),
|
| 278 |
+
offsets.MonthBegin(1),
|
| 279 |
+
offsets.Minute(),
|
| 280 |
+
np.timedelta64(365, "D"),
|
| 281 |
+
timedelta(365),
|
| 282 |
+
]:
|
| 283 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 284 |
+
per - off
|
| 285 |
+
|
| 286 |
+
for freq in ["M", "2M", "3M"]:
|
| 287 |
+
per = Period("2011-03", freq=freq)
|
| 288 |
+
assert per - offsets.MonthEnd(2) == Period("2011-01", freq=freq)
|
| 289 |
+
assert per - offsets.MonthEnd(12) == Period("2010-03", freq=freq)
|
| 290 |
+
|
| 291 |
+
for off in [
|
| 292 |
+
offsets.YearBegin(2),
|
| 293 |
+
offsets.MonthBegin(1),
|
| 294 |
+
offsets.Minute(),
|
| 295 |
+
np.timedelta64(365, "D"),
|
| 296 |
+
timedelta(365),
|
| 297 |
+
]:
|
| 298 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 299 |
+
per - off
|
| 300 |
+
|
| 301 |
+
# freq is Tick
|
| 302 |
+
for freq in ["D", "2D", "3D"]:
|
| 303 |
+
per = Period("2011-04-01", freq=freq)
|
| 304 |
+
assert per - offsets.Day(5) == Period("2011-03-27", freq=freq)
|
| 305 |
+
assert per - offsets.Hour(24) == Period("2011-03-31", freq=freq)
|
| 306 |
+
assert per - np.timedelta64(2, "D") == Period("2011-03-30", freq=freq)
|
| 307 |
+
assert per - np.timedelta64(3600 * 24, "s") == Period(
|
| 308 |
+
"2011-03-31", freq=freq
|
| 309 |
+
)
|
| 310 |
+
assert per - timedelta(-2) == Period("2011-04-03", freq=freq)
|
| 311 |
+
assert per - timedelta(hours=48) == Period("2011-03-30", freq=freq)
|
| 312 |
+
|
| 313 |
+
for off in [
|
| 314 |
+
offsets.YearBegin(2),
|
| 315 |
+
offsets.MonthBegin(1),
|
| 316 |
+
offsets.Minute(),
|
| 317 |
+
np.timedelta64(4, "h"),
|
| 318 |
+
timedelta(hours=23),
|
| 319 |
+
]:
|
| 320 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 321 |
+
per - off
|
| 322 |
+
|
| 323 |
+
for freq in ["h", "2h", "3h"]:
|
| 324 |
+
per = Period("2011-04-01 09:00", freq=freq)
|
| 325 |
+
assert per - offsets.Day(2) == Period("2011-03-30 09:00", freq=freq)
|
| 326 |
+
assert per - offsets.Hour(3) == Period("2011-04-01 06:00", freq=freq)
|
| 327 |
+
assert per - np.timedelta64(3, "h") == Period("2011-04-01 06:00", freq=freq)
|
| 328 |
+
assert per - np.timedelta64(3600, "s") == Period(
|
| 329 |
+
"2011-04-01 08:00", freq=freq
|
| 330 |
+
)
|
| 331 |
+
assert per - timedelta(minutes=120) == Period("2011-04-01 07:00", freq=freq)
|
| 332 |
+
assert per - timedelta(days=4, minutes=180) == Period(
|
| 333 |
+
"2011-03-28 06:00", freq=freq
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
for off in [
|
| 337 |
+
offsets.YearBegin(2),
|
| 338 |
+
offsets.MonthBegin(1),
|
| 339 |
+
offsets.Minute(),
|
| 340 |
+
np.timedelta64(3200, "s"),
|
| 341 |
+
timedelta(hours=23, minutes=30),
|
| 342 |
+
]:
|
| 343 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 344 |
+
per - off
|
| 345 |
+
|
| 346 |
+
@pytest.mark.parametrize("freq", ["M", "2M", "3M"])
|
| 347 |
+
def test_period_addsub_nat(self, freq):
|
| 348 |
+
# GH#13071
|
| 349 |
+
per = Period("2011-01", freq=freq)
|
| 350 |
+
|
| 351 |
+
# For subtraction, NaT is treated as another Period object
|
| 352 |
+
assert NaT - per is NaT
|
| 353 |
+
assert per - NaT is NaT
|
| 354 |
+
|
| 355 |
+
# For addition, NaT is treated as offset-like
|
| 356 |
+
assert NaT + per is NaT
|
| 357 |
+
assert per + NaT is NaT
|
| 358 |
+
|
| 359 |
+
@pytest.mark.parametrize("unit", ["ns", "us", "ms", "s", "m"])
|
| 360 |
+
def test_period_add_sub_td64_nat(self, unit):
|
| 361 |
+
# GH#47196
|
| 362 |
+
per = Period("2022-06-01", "D")
|
| 363 |
+
nat = np.timedelta64("NaT", unit)
|
| 364 |
+
|
| 365 |
+
assert per + nat is NaT
|
| 366 |
+
assert nat + per is NaT
|
| 367 |
+
assert per - nat is NaT
|
| 368 |
+
|
| 369 |
+
with pytest.raises(TypeError, match="unsupported operand"):
|
| 370 |
+
nat - per
|
| 371 |
+
|
| 372 |
+
def test_period_ops_offset(self):
|
| 373 |
+
per = Period("2011-04-01", freq="D")
|
| 374 |
+
result = per + offsets.Day()
|
| 375 |
+
exp = Period("2011-04-02", freq="D")
|
| 376 |
+
assert result == exp
|
| 377 |
+
|
| 378 |
+
result = per - offsets.Day(2)
|
| 379 |
+
exp = Period("2011-03-30", freq="D")
|
| 380 |
+
assert result == exp
|
| 381 |
+
|
| 382 |
+
msg = r"Input cannot be converted to Period\(freq=D\)"
|
| 383 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 384 |
+
per + offsets.Hour(2)
|
| 385 |
+
|
| 386 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 387 |
+
per - offsets.Hour(2)
|
| 388 |
+
|
| 389 |
+
def test_period_add_timestamp_raises(self):
|
| 390 |
+
# GH#17983
|
| 391 |
+
ts = Timestamp("2017")
|
| 392 |
+
per = Period("2017", freq="M")
|
| 393 |
+
|
| 394 |
+
msg = r"unsupported operand type\(s\) for \+: 'Timestamp' and 'Period'"
|
| 395 |
+
with pytest.raises(TypeError, match=msg):
|
| 396 |
+
ts + per
|
| 397 |
+
|
| 398 |
+
msg = r"unsupported operand type\(s\) for \+: 'Period' and 'Timestamp'"
|
| 399 |
+
with pytest.raises(TypeError, match=msg):
|
| 400 |
+
per + ts
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
class TestPeriodComparisons:
|
| 404 |
+
def test_period_comparison_same_freq(self):
|
| 405 |
+
jan = Period("2000-01", "M")
|
| 406 |
+
feb = Period("2000-02", "M")
|
| 407 |
+
|
| 408 |
+
assert not jan == feb
|
| 409 |
+
assert jan != feb
|
| 410 |
+
assert jan < feb
|
| 411 |
+
assert jan <= feb
|
| 412 |
+
assert not jan > feb
|
| 413 |
+
assert not jan >= feb
|
| 414 |
+
|
| 415 |
+
def test_period_comparison_same_period_different_object(self):
|
| 416 |
+
# Separate Period objects for the same period
|
| 417 |
+
left = Period("2000-01", "M")
|
| 418 |
+
right = Period("2000-01", "M")
|
| 419 |
+
|
| 420 |
+
assert left == right
|
| 421 |
+
assert left >= right
|
| 422 |
+
assert left <= right
|
| 423 |
+
assert not left < right
|
| 424 |
+
assert not left > right
|
| 425 |
+
|
| 426 |
+
def test_period_comparison_mismatched_freq(self):
|
| 427 |
+
jan = Period("2000-01", "M")
|
| 428 |
+
day = Period("2012-01-01", "D")
|
| 429 |
+
|
| 430 |
+
assert not jan == day
|
| 431 |
+
assert jan != day
|
| 432 |
+
msg = r"Input has different freq=D from Period\(freq=M\)"
|
| 433 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 434 |
+
jan < day
|
| 435 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 436 |
+
jan <= day
|
| 437 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 438 |
+
jan > day
|
| 439 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 440 |
+
jan >= day
|
| 441 |
+
|
| 442 |
+
def test_period_comparison_invalid_type(self):
|
| 443 |
+
jan = Period("2000-01", "M")
|
| 444 |
+
|
| 445 |
+
assert not jan == 1
|
| 446 |
+
assert jan != 1
|
| 447 |
+
|
| 448 |
+
int_or_per = "'(Period|int)'"
|
| 449 |
+
msg = f"not supported between instances of {int_or_per} and {int_or_per}"
|
| 450 |
+
for left, right in [(jan, 1), (1, jan)]:
|
| 451 |
+
with pytest.raises(TypeError, match=msg):
|
| 452 |
+
left > right
|
| 453 |
+
with pytest.raises(TypeError, match=msg):
|
| 454 |
+
left >= right
|
| 455 |
+
with pytest.raises(TypeError, match=msg):
|
| 456 |
+
left < right
|
| 457 |
+
with pytest.raises(TypeError, match=msg):
|
| 458 |
+
left <= right
|
| 459 |
+
|
| 460 |
+
def test_period_comparison_nat(self):
|
| 461 |
+
per = Period("2011-01-01", freq="D")
|
| 462 |
+
|
| 463 |
+
ts = Timestamp("2011-01-01")
|
| 464 |
+
# confirm Period('NaT') work identical with Timestamp('NaT')
|
| 465 |
+
for left, right in [
|
| 466 |
+
(NaT, per),
|
| 467 |
+
(per, NaT),
|
| 468 |
+
(NaT, ts),
|
| 469 |
+
(ts, NaT),
|
| 470 |
+
]:
|
| 471 |
+
assert not left < right
|
| 472 |
+
assert not left > right
|
| 473 |
+
assert not left == right
|
| 474 |
+
assert left != right
|
| 475 |
+
assert not left <= right
|
| 476 |
+
assert not left >= right
|
| 477 |
+
|
| 478 |
+
@pytest.mark.parametrize(
|
| 479 |
+
"zerodim_arr, expected",
|
| 480 |
+
((np.array(0), False), (np.array(Period("2000-01", "M")), True)),
|
| 481 |
+
)
|
| 482 |
+
def test_period_comparison_numpy_zerodim_arr(self, zerodim_arr, expected):
|
| 483 |
+
per = Period("2000-01", "M")
|
| 484 |
+
|
| 485 |
+
assert (per == zerodim_arr) is expected
|
| 486 |
+
assert (zerodim_arr == per) is expected
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/test_asfreq.py
ADDED
|
@@ -0,0 +1,828 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
|
| 4 |
+
from pandas.errors import OutOfBoundsDatetime
|
| 5 |
+
|
| 6 |
+
from pandas import (
|
| 7 |
+
Period,
|
| 8 |
+
Timestamp,
|
| 9 |
+
offsets,
|
| 10 |
+
)
|
| 11 |
+
import pandas._testing as tm
|
| 12 |
+
|
| 13 |
+
bday_msg = "Period with BDay freq is deprecated"
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TestFreqConversion:
|
| 17 |
+
"""Test frequency conversion of date objects"""
|
| 18 |
+
|
| 19 |
+
@pytest.mark.filterwarnings("ignore:Period with BDay:FutureWarning")
|
| 20 |
+
@pytest.mark.parametrize("freq", ["Y", "Q", "M", "W", "B", "D"])
|
| 21 |
+
def test_asfreq_near_zero(self, freq):
|
| 22 |
+
# GH#19643, GH#19650
|
| 23 |
+
per = Period("0001-01-01", freq=freq)
|
| 24 |
+
tup1 = (per.year, per.hour, per.day)
|
| 25 |
+
|
| 26 |
+
prev = per - 1
|
| 27 |
+
assert prev.ordinal == per.ordinal - 1
|
| 28 |
+
tup2 = (prev.year, prev.month, prev.day)
|
| 29 |
+
assert tup2 < tup1
|
| 30 |
+
|
| 31 |
+
def test_asfreq_near_zero_weekly(self):
|
| 32 |
+
# GH#19834
|
| 33 |
+
per1 = Period("0001-01-01", "D") + 6
|
| 34 |
+
per2 = Period("0001-01-01", "D") - 6
|
| 35 |
+
week1 = per1.asfreq("W")
|
| 36 |
+
week2 = per2.asfreq("W")
|
| 37 |
+
assert week1 != week2
|
| 38 |
+
assert week1.asfreq("D", "E") >= per1
|
| 39 |
+
assert week2.asfreq("D", "S") <= per2
|
| 40 |
+
|
| 41 |
+
def test_to_timestamp_out_of_bounds(self):
|
| 42 |
+
# GH#19643, used to incorrectly give Timestamp in 1754
|
| 43 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 44 |
+
per = Period("0001-01-01", freq="B")
|
| 45 |
+
msg = "Out of bounds nanosecond timestamp"
|
| 46 |
+
with pytest.raises(OutOfBoundsDatetime, match=msg):
|
| 47 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 48 |
+
per.to_timestamp()
|
| 49 |
+
|
| 50 |
+
def test_asfreq_corner(self):
|
| 51 |
+
val = Period(freq="Y", year=2007)
|
| 52 |
+
result1 = val.asfreq("5min")
|
| 53 |
+
result2 = val.asfreq("min")
|
| 54 |
+
expected = Period("2007-12-31 23:59", freq="min")
|
| 55 |
+
assert result1.ordinal == expected.ordinal
|
| 56 |
+
assert result1.freqstr == "5min"
|
| 57 |
+
assert result2.ordinal == expected.ordinal
|
| 58 |
+
assert result2.freqstr == "min"
|
| 59 |
+
|
| 60 |
+
def test_conv_annual(self):
|
| 61 |
+
# frequency conversion tests: from Annual Frequency
|
| 62 |
+
|
| 63 |
+
ival_A = Period(freq="Y", year=2007)
|
| 64 |
+
|
| 65 |
+
ival_AJAN = Period(freq="Y-JAN", year=2007)
|
| 66 |
+
ival_AJUN = Period(freq="Y-JUN", year=2007)
|
| 67 |
+
ival_ANOV = Period(freq="Y-NOV", year=2007)
|
| 68 |
+
|
| 69 |
+
ival_A_to_Q_start = Period(freq="Q", year=2007, quarter=1)
|
| 70 |
+
ival_A_to_Q_end = Period(freq="Q", year=2007, quarter=4)
|
| 71 |
+
ival_A_to_M_start = Period(freq="M", year=2007, month=1)
|
| 72 |
+
ival_A_to_M_end = Period(freq="M", year=2007, month=12)
|
| 73 |
+
ival_A_to_W_start = Period(freq="W", year=2007, month=1, day=1)
|
| 74 |
+
ival_A_to_W_end = Period(freq="W", year=2007, month=12, day=31)
|
| 75 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 76 |
+
ival_A_to_B_start = Period(freq="B", year=2007, month=1, day=1)
|
| 77 |
+
ival_A_to_B_end = Period(freq="B", year=2007, month=12, day=31)
|
| 78 |
+
ival_A_to_D_start = Period(freq="D", year=2007, month=1, day=1)
|
| 79 |
+
ival_A_to_D_end = Period(freq="D", year=2007, month=12, day=31)
|
| 80 |
+
ival_A_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 81 |
+
ival_A_to_H_end = Period(freq="h", year=2007, month=12, day=31, hour=23)
|
| 82 |
+
ival_A_to_T_start = Period(
|
| 83 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0
|
| 84 |
+
)
|
| 85 |
+
ival_A_to_T_end = Period(
|
| 86 |
+
freq="Min", year=2007, month=12, day=31, hour=23, minute=59
|
| 87 |
+
)
|
| 88 |
+
ival_A_to_S_start = Period(
|
| 89 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 90 |
+
)
|
| 91 |
+
ival_A_to_S_end = Period(
|
| 92 |
+
freq="s", year=2007, month=12, day=31, hour=23, minute=59, second=59
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
ival_AJAN_to_D_end = Period(freq="D", year=2007, month=1, day=31)
|
| 96 |
+
ival_AJAN_to_D_start = Period(freq="D", year=2006, month=2, day=1)
|
| 97 |
+
ival_AJUN_to_D_end = Period(freq="D", year=2007, month=6, day=30)
|
| 98 |
+
ival_AJUN_to_D_start = Period(freq="D", year=2006, month=7, day=1)
|
| 99 |
+
ival_ANOV_to_D_end = Period(freq="D", year=2007, month=11, day=30)
|
| 100 |
+
ival_ANOV_to_D_start = Period(freq="D", year=2006, month=12, day=1)
|
| 101 |
+
|
| 102 |
+
assert ival_A.asfreq("Q", "s") == ival_A_to_Q_start
|
| 103 |
+
assert ival_A.asfreq("Q", "e") == ival_A_to_Q_end
|
| 104 |
+
assert ival_A.asfreq("M", "s") == ival_A_to_M_start
|
| 105 |
+
assert ival_A.asfreq("M", "E") == ival_A_to_M_end
|
| 106 |
+
assert ival_A.asfreq("W", "s") == ival_A_to_W_start
|
| 107 |
+
assert ival_A.asfreq("W", "E") == ival_A_to_W_end
|
| 108 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 109 |
+
assert ival_A.asfreq("B", "s") == ival_A_to_B_start
|
| 110 |
+
assert ival_A.asfreq("B", "E") == ival_A_to_B_end
|
| 111 |
+
assert ival_A.asfreq("D", "s") == ival_A_to_D_start
|
| 112 |
+
assert ival_A.asfreq("D", "E") == ival_A_to_D_end
|
| 113 |
+
msg = "'H' is deprecated and will be removed in a future version."
|
| 114 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 115 |
+
assert ival_A.asfreq("H", "s") == ival_A_to_H_start
|
| 116 |
+
assert ival_A.asfreq("H", "E") == ival_A_to_H_end
|
| 117 |
+
assert ival_A.asfreq("min", "s") == ival_A_to_T_start
|
| 118 |
+
assert ival_A.asfreq("min", "E") == ival_A_to_T_end
|
| 119 |
+
msg = "'T' is deprecated and will be removed in a future version."
|
| 120 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 121 |
+
assert ival_A.asfreq("T", "s") == ival_A_to_T_start
|
| 122 |
+
assert ival_A.asfreq("T", "E") == ival_A_to_T_end
|
| 123 |
+
msg = "'S' is deprecated and will be removed in a future version."
|
| 124 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 125 |
+
assert ival_A.asfreq("S", "S") == ival_A_to_S_start
|
| 126 |
+
assert ival_A.asfreq("S", "E") == ival_A_to_S_end
|
| 127 |
+
|
| 128 |
+
assert ival_AJAN.asfreq("D", "s") == ival_AJAN_to_D_start
|
| 129 |
+
assert ival_AJAN.asfreq("D", "E") == ival_AJAN_to_D_end
|
| 130 |
+
|
| 131 |
+
assert ival_AJUN.asfreq("D", "s") == ival_AJUN_to_D_start
|
| 132 |
+
assert ival_AJUN.asfreq("D", "E") == ival_AJUN_to_D_end
|
| 133 |
+
|
| 134 |
+
assert ival_ANOV.asfreq("D", "s") == ival_ANOV_to_D_start
|
| 135 |
+
assert ival_ANOV.asfreq("D", "E") == ival_ANOV_to_D_end
|
| 136 |
+
|
| 137 |
+
assert ival_A.asfreq("Y") == ival_A
|
| 138 |
+
|
| 139 |
+
def test_conv_quarterly(self):
|
| 140 |
+
# frequency conversion tests: from Quarterly Frequency
|
| 141 |
+
|
| 142 |
+
ival_Q = Period(freq="Q", year=2007, quarter=1)
|
| 143 |
+
ival_Q_end_of_year = Period(freq="Q", year=2007, quarter=4)
|
| 144 |
+
|
| 145 |
+
ival_QEJAN = Period(freq="Q-JAN", year=2007, quarter=1)
|
| 146 |
+
ival_QEJUN = Period(freq="Q-JUN", year=2007, quarter=1)
|
| 147 |
+
|
| 148 |
+
ival_Q_to_A = Period(freq="Y", year=2007)
|
| 149 |
+
ival_Q_to_M_start = Period(freq="M", year=2007, month=1)
|
| 150 |
+
ival_Q_to_M_end = Period(freq="M", year=2007, month=3)
|
| 151 |
+
ival_Q_to_W_start = Period(freq="W", year=2007, month=1, day=1)
|
| 152 |
+
ival_Q_to_W_end = Period(freq="W", year=2007, month=3, day=31)
|
| 153 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 154 |
+
ival_Q_to_B_start = Period(freq="B", year=2007, month=1, day=1)
|
| 155 |
+
ival_Q_to_B_end = Period(freq="B", year=2007, month=3, day=30)
|
| 156 |
+
ival_Q_to_D_start = Period(freq="D", year=2007, month=1, day=1)
|
| 157 |
+
ival_Q_to_D_end = Period(freq="D", year=2007, month=3, day=31)
|
| 158 |
+
ival_Q_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 159 |
+
ival_Q_to_H_end = Period(freq="h", year=2007, month=3, day=31, hour=23)
|
| 160 |
+
ival_Q_to_T_start = Period(
|
| 161 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0
|
| 162 |
+
)
|
| 163 |
+
ival_Q_to_T_end = Period(
|
| 164 |
+
freq="Min", year=2007, month=3, day=31, hour=23, minute=59
|
| 165 |
+
)
|
| 166 |
+
ival_Q_to_S_start = Period(
|
| 167 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 168 |
+
)
|
| 169 |
+
ival_Q_to_S_end = Period(
|
| 170 |
+
freq="s", year=2007, month=3, day=31, hour=23, minute=59, second=59
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
ival_QEJAN_to_D_start = Period(freq="D", year=2006, month=2, day=1)
|
| 174 |
+
ival_QEJAN_to_D_end = Period(freq="D", year=2006, month=4, day=30)
|
| 175 |
+
|
| 176 |
+
ival_QEJUN_to_D_start = Period(freq="D", year=2006, month=7, day=1)
|
| 177 |
+
ival_QEJUN_to_D_end = Period(freq="D", year=2006, month=9, day=30)
|
| 178 |
+
|
| 179 |
+
assert ival_Q.asfreq("Y") == ival_Q_to_A
|
| 180 |
+
assert ival_Q_end_of_year.asfreq("Y") == ival_Q_to_A
|
| 181 |
+
|
| 182 |
+
assert ival_Q.asfreq("M", "s") == ival_Q_to_M_start
|
| 183 |
+
assert ival_Q.asfreq("M", "E") == ival_Q_to_M_end
|
| 184 |
+
assert ival_Q.asfreq("W", "s") == ival_Q_to_W_start
|
| 185 |
+
assert ival_Q.asfreq("W", "E") == ival_Q_to_W_end
|
| 186 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 187 |
+
assert ival_Q.asfreq("B", "s") == ival_Q_to_B_start
|
| 188 |
+
assert ival_Q.asfreq("B", "E") == ival_Q_to_B_end
|
| 189 |
+
assert ival_Q.asfreq("D", "s") == ival_Q_to_D_start
|
| 190 |
+
assert ival_Q.asfreq("D", "E") == ival_Q_to_D_end
|
| 191 |
+
assert ival_Q.asfreq("h", "s") == ival_Q_to_H_start
|
| 192 |
+
assert ival_Q.asfreq("h", "E") == ival_Q_to_H_end
|
| 193 |
+
assert ival_Q.asfreq("Min", "s") == ival_Q_to_T_start
|
| 194 |
+
assert ival_Q.asfreq("Min", "E") == ival_Q_to_T_end
|
| 195 |
+
assert ival_Q.asfreq("s", "s") == ival_Q_to_S_start
|
| 196 |
+
assert ival_Q.asfreq("s", "E") == ival_Q_to_S_end
|
| 197 |
+
|
| 198 |
+
assert ival_QEJAN.asfreq("D", "s") == ival_QEJAN_to_D_start
|
| 199 |
+
assert ival_QEJAN.asfreq("D", "E") == ival_QEJAN_to_D_end
|
| 200 |
+
assert ival_QEJUN.asfreq("D", "s") == ival_QEJUN_to_D_start
|
| 201 |
+
assert ival_QEJUN.asfreq("D", "E") == ival_QEJUN_to_D_end
|
| 202 |
+
|
| 203 |
+
assert ival_Q.asfreq("Q") == ival_Q
|
| 204 |
+
|
| 205 |
+
def test_conv_monthly(self):
|
| 206 |
+
# frequency conversion tests: from Monthly Frequency
|
| 207 |
+
|
| 208 |
+
ival_M = Period(freq="M", year=2007, month=1)
|
| 209 |
+
ival_M_end_of_year = Period(freq="M", year=2007, month=12)
|
| 210 |
+
ival_M_end_of_quarter = Period(freq="M", year=2007, month=3)
|
| 211 |
+
ival_M_to_A = Period(freq="Y", year=2007)
|
| 212 |
+
ival_M_to_Q = Period(freq="Q", year=2007, quarter=1)
|
| 213 |
+
ival_M_to_W_start = Period(freq="W", year=2007, month=1, day=1)
|
| 214 |
+
ival_M_to_W_end = Period(freq="W", year=2007, month=1, day=31)
|
| 215 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 216 |
+
ival_M_to_B_start = Period(freq="B", year=2007, month=1, day=1)
|
| 217 |
+
ival_M_to_B_end = Period(freq="B", year=2007, month=1, day=31)
|
| 218 |
+
ival_M_to_D_start = Period(freq="D", year=2007, month=1, day=1)
|
| 219 |
+
ival_M_to_D_end = Period(freq="D", year=2007, month=1, day=31)
|
| 220 |
+
ival_M_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 221 |
+
ival_M_to_H_end = Period(freq="h", year=2007, month=1, day=31, hour=23)
|
| 222 |
+
ival_M_to_T_start = Period(
|
| 223 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0
|
| 224 |
+
)
|
| 225 |
+
ival_M_to_T_end = Period(
|
| 226 |
+
freq="Min", year=2007, month=1, day=31, hour=23, minute=59
|
| 227 |
+
)
|
| 228 |
+
ival_M_to_S_start = Period(
|
| 229 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 230 |
+
)
|
| 231 |
+
ival_M_to_S_end = Period(
|
| 232 |
+
freq="s", year=2007, month=1, day=31, hour=23, minute=59, second=59
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
assert ival_M.asfreq("Y") == ival_M_to_A
|
| 236 |
+
assert ival_M_end_of_year.asfreq("Y") == ival_M_to_A
|
| 237 |
+
assert ival_M.asfreq("Q") == ival_M_to_Q
|
| 238 |
+
assert ival_M_end_of_quarter.asfreq("Q") == ival_M_to_Q
|
| 239 |
+
|
| 240 |
+
assert ival_M.asfreq("W", "s") == ival_M_to_W_start
|
| 241 |
+
assert ival_M.asfreq("W", "E") == ival_M_to_W_end
|
| 242 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 243 |
+
assert ival_M.asfreq("B", "s") == ival_M_to_B_start
|
| 244 |
+
assert ival_M.asfreq("B", "E") == ival_M_to_B_end
|
| 245 |
+
assert ival_M.asfreq("D", "s") == ival_M_to_D_start
|
| 246 |
+
assert ival_M.asfreq("D", "E") == ival_M_to_D_end
|
| 247 |
+
assert ival_M.asfreq("h", "s") == ival_M_to_H_start
|
| 248 |
+
assert ival_M.asfreq("h", "E") == ival_M_to_H_end
|
| 249 |
+
assert ival_M.asfreq("Min", "s") == ival_M_to_T_start
|
| 250 |
+
assert ival_M.asfreq("Min", "E") == ival_M_to_T_end
|
| 251 |
+
assert ival_M.asfreq("s", "s") == ival_M_to_S_start
|
| 252 |
+
assert ival_M.asfreq("s", "E") == ival_M_to_S_end
|
| 253 |
+
|
| 254 |
+
assert ival_M.asfreq("M") == ival_M
|
| 255 |
+
|
| 256 |
+
def test_conv_weekly(self):
|
| 257 |
+
# frequency conversion tests: from Weekly Frequency
|
| 258 |
+
ival_W = Period(freq="W", year=2007, month=1, day=1)
|
| 259 |
+
|
| 260 |
+
ival_WSUN = Period(freq="W", year=2007, month=1, day=7)
|
| 261 |
+
ival_WSAT = Period(freq="W-SAT", year=2007, month=1, day=6)
|
| 262 |
+
ival_WFRI = Period(freq="W-FRI", year=2007, month=1, day=5)
|
| 263 |
+
ival_WTHU = Period(freq="W-THU", year=2007, month=1, day=4)
|
| 264 |
+
ival_WWED = Period(freq="W-WED", year=2007, month=1, day=3)
|
| 265 |
+
ival_WTUE = Period(freq="W-TUE", year=2007, month=1, day=2)
|
| 266 |
+
ival_WMON = Period(freq="W-MON", year=2007, month=1, day=1)
|
| 267 |
+
|
| 268 |
+
ival_WSUN_to_D_start = Period(freq="D", year=2007, month=1, day=1)
|
| 269 |
+
ival_WSUN_to_D_end = Period(freq="D", year=2007, month=1, day=7)
|
| 270 |
+
ival_WSAT_to_D_start = Period(freq="D", year=2006, month=12, day=31)
|
| 271 |
+
ival_WSAT_to_D_end = Period(freq="D", year=2007, month=1, day=6)
|
| 272 |
+
ival_WFRI_to_D_start = Period(freq="D", year=2006, month=12, day=30)
|
| 273 |
+
ival_WFRI_to_D_end = Period(freq="D", year=2007, month=1, day=5)
|
| 274 |
+
ival_WTHU_to_D_start = Period(freq="D", year=2006, month=12, day=29)
|
| 275 |
+
ival_WTHU_to_D_end = Period(freq="D", year=2007, month=1, day=4)
|
| 276 |
+
ival_WWED_to_D_start = Period(freq="D", year=2006, month=12, day=28)
|
| 277 |
+
ival_WWED_to_D_end = Period(freq="D", year=2007, month=1, day=3)
|
| 278 |
+
ival_WTUE_to_D_start = Period(freq="D", year=2006, month=12, day=27)
|
| 279 |
+
ival_WTUE_to_D_end = Period(freq="D", year=2007, month=1, day=2)
|
| 280 |
+
ival_WMON_to_D_start = Period(freq="D", year=2006, month=12, day=26)
|
| 281 |
+
ival_WMON_to_D_end = Period(freq="D", year=2007, month=1, day=1)
|
| 282 |
+
|
| 283 |
+
ival_W_end_of_year = Period(freq="W", year=2007, month=12, day=31)
|
| 284 |
+
ival_W_end_of_quarter = Period(freq="W", year=2007, month=3, day=31)
|
| 285 |
+
ival_W_end_of_month = Period(freq="W", year=2007, month=1, day=31)
|
| 286 |
+
ival_W_to_A = Period(freq="Y", year=2007)
|
| 287 |
+
ival_W_to_Q = Period(freq="Q", year=2007, quarter=1)
|
| 288 |
+
ival_W_to_M = Period(freq="M", year=2007, month=1)
|
| 289 |
+
|
| 290 |
+
if Period(freq="D", year=2007, month=12, day=31).weekday == 6:
|
| 291 |
+
ival_W_to_A_end_of_year = Period(freq="Y", year=2007)
|
| 292 |
+
else:
|
| 293 |
+
ival_W_to_A_end_of_year = Period(freq="Y", year=2008)
|
| 294 |
+
|
| 295 |
+
if Period(freq="D", year=2007, month=3, day=31).weekday == 6:
|
| 296 |
+
ival_W_to_Q_end_of_quarter = Period(freq="Q", year=2007, quarter=1)
|
| 297 |
+
else:
|
| 298 |
+
ival_W_to_Q_end_of_quarter = Period(freq="Q", year=2007, quarter=2)
|
| 299 |
+
|
| 300 |
+
if Period(freq="D", year=2007, month=1, day=31).weekday == 6:
|
| 301 |
+
ival_W_to_M_end_of_month = Period(freq="M", year=2007, month=1)
|
| 302 |
+
else:
|
| 303 |
+
ival_W_to_M_end_of_month = Period(freq="M", year=2007, month=2)
|
| 304 |
+
|
| 305 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 306 |
+
ival_W_to_B_start = Period(freq="B", year=2007, month=1, day=1)
|
| 307 |
+
ival_W_to_B_end = Period(freq="B", year=2007, month=1, day=5)
|
| 308 |
+
ival_W_to_D_start = Period(freq="D", year=2007, month=1, day=1)
|
| 309 |
+
ival_W_to_D_end = Period(freq="D", year=2007, month=1, day=7)
|
| 310 |
+
ival_W_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 311 |
+
ival_W_to_H_end = Period(freq="h", year=2007, month=1, day=7, hour=23)
|
| 312 |
+
ival_W_to_T_start = Period(
|
| 313 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0
|
| 314 |
+
)
|
| 315 |
+
ival_W_to_T_end = Period(
|
| 316 |
+
freq="Min", year=2007, month=1, day=7, hour=23, minute=59
|
| 317 |
+
)
|
| 318 |
+
ival_W_to_S_start = Period(
|
| 319 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 320 |
+
)
|
| 321 |
+
ival_W_to_S_end = Period(
|
| 322 |
+
freq="s", year=2007, month=1, day=7, hour=23, minute=59, second=59
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
assert ival_W.asfreq("Y") == ival_W_to_A
|
| 326 |
+
assert ival_W_end_of_year.asfreq("Y") == ival_W_to_A_end_of_year
|
| 327 |
+
|
| 328 |
+
assert ival_W.asfreq("Q") == ival_W_to_Q
|
| 329 |
+
assert ival_W_end_of_quarter.asfreq("Q") == ival_W_to_Q_end_of_quarter
|
| 330 |
+
|
| 331 |
+
assert ival_W.asfreq("M") == ival_W_to_M
|
| 332 |
+
assert ival_W_end_of_month.asfreq("M") == ival_W_to_M_end_of_month
|
| 333 |
+
|
| 334 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 335 |
+
assert ival_W.asfreq("B", "s") == ival_W_to_B_start
|
| 336 |
+
assert ival_W.asfreq("B", "E") == ival_W_to_B_end
|
| 337 |
+
|
| 338 |
+
assert ival_W.asfreq("D", "s") == ival_W_to_D_start
|
| 339 |
+
assert ival_W.asfreq("D", "E") == ival_W_to_D_end
|
| 340 |
+
|
| 341 |
+
assert ival_WSUN.asfreq("D", "s") == ival_WSUN_to_D_start
|
| 342 |
+
assert ival_WSUN.asfreq("D", "E") == ival_WSUN_to_D_end
|
| 343 |
+
assert ival_WSAT.asfreq("D", "s") == ival_WSAT_to_D_start
|
| 344 |
+
assert ival_WSAT.asfreq("D", "E") == ival_WSAT_to_D_end
|
| 345 |
+
assert ival_WFRI.asfreq("D", "s") == ival_WFRI_to_D_start
|
| 346 |
+
assert ival_WFRI.asfreq("D", "E") == ival_WFRI_to_D_end
|
| 347 |
+
assert ival_WTHU.asfreq("D", "s") == ival_WTHU_to_D_start
|
| 348 |
+
assert ival_WTHU.asfreq("D", "E") == ival_WTHU_to_D_end
|
| 349 |
+
assert ival_WWED.asfreq("D", "s") == ival_WWED_to_D_start
|
| 350 |
+
assert ival_WWED.asfreq("D", "E") == ival_WWED_to_D_end
|
| 351 |
+
assert ival_WTUE.asfreq("D", "s") == ival_WTUE_to_D_start
|
| 352 |
+
assert ival_WTUE.asfreq("D", "E") == ival_WTUE_to_D_end
|
| 353 |
+
assert ival_WMON.asfreq("D", "s") == ival_WMON_to_D_start
|
| 354 |
+
assert ival_WMON.asfreq("D", "E") == ival_WMON_to_D_end
|
| 355 |
+
|
| 356 |
+
assert ival_W.asfreq("h", "s") == ival_W_to_H_start
|
| 357 |
+
assert ival_W.asfreq("h", "E") == ival_W_to_H_end
|
| 358 |
+
assert ival_W.asfreq("Min", "s") == ival_W_to_T_start
|
| 359 |
+
assert ival_W.asfreq("Min", "E") == ival_W_to_T_end
|
| 360 |
+
assert ival_W.asfreq("s", "s") == ival_W_to_S_start
|
| 361 |
+
assert ival_W.asfreq("s", "E") == ival_W_to_S_end
|
| 362 |
+
|
| 363 |
+
assert ival_W.asfreq("W") == ival_W
|
| 364 |
+
|
| 365 |
+
msg = INVALID_FREQ_ERR_MSG
|
| 366 |
+
with pytest.raises(ValueError, match=msg):
|
| 367 |
+
ival_W.asfreq("WK")
|
| 368 |
+
|
| 369 |
+
def test_conv_weekly_legacy(self):
|
| 370 |
+
# frequency conversion tests: from Weekly Frequency
|
| 371 |
+
msg = INVALID_FREQ_ERR_MSG
|
| 372 |
+
with pytest.raises(ValueError, match=msg):
|
| 373 |
+
Period(freq="WK", year=2007, month=1, day=1)
|
| 374 |
+
|
| 375 |
+
with pytest.raises(ValueError, match=msg):
|
| 376 |
+
Period(freq="WK-SAT", year=2007, month=1, day=6)
|
| 377 |
+
with pytest.raises(ValueError, match=msg):
|
| 378 |
+
Period(freq="WK-FRI", year=2007, month=1, day=5)
|
| 379 |
+
with pytest.raises(ValueError, match=msg):
|
| 380 |
+
Period(freq="WK-THU", year=2007, month=1, day=4)
|
| 381 |
+
with pytest.raises(ValueError, match=msg):
|
| 382 |
+
Period(freq="WK-WED", year=2007, month=1, day=3)
|
| 383 |
+
with pytest.raises(ValueError, match=msg):
|
| 384 |
+
Period(freq="WK-TUE", year=2007, month=1, day=2)
|
| 385 |
+
with pytest.raises(ValueError, match=msg):
|
| 386 |
+
Period(freq="WK-MON", year=2007, month=1, day=1)
|
| 387 |
+
|
| 388 |
+
def test_conv_business(self):
|
| 389 |
+
# frequency conversion tests: from Business Frequency"
|
| 390 |
+
|
| 391 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 392 |
+
ival_B = Period(freq="B", year=2007, month=1, day=1)
|
| 393 |
+
ival_B_end_of_year = Period(freq="B", year=2007, month=12, day=31)
|
| 394 |
+
ival_B_end_of_quarter = Period(freq="B", year=2007, month=3, day=30)
|
| 395 |
+
ival_B_end_of_month = Period(freq="B", year=2007, month=1, day=31)
|
| 396 |
+
ival_B_end_of_week = Period(freq="B", year=2007, month=1, day=5)
|
| 397 |
+
|
| 398 |
+
ival_B_to_A = Period(freq="Y", year=2007)
|
| 399 |
+
ival_B_to_Q = Period(freq="Q", year=2007, quarter=1)
|
| 400 |
+
ival_B_to_M = Period(freq="M", year=2007, month=1)
|
| 401 |
+
ival_B_to_W = Period(freq="W", year=2007, month=1, day=7)
|
| 402 |
+
ival_B_to_D = Period(freq="D", year=2007, month=1, day=1)
|
| 403 |
+
ival_B_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 404 |
+
ival_B_to_H_end = Period(freq="h", year=2007, month=1, day=1, hour=23)
|
| 405 |
+
ival_B_to_T_start = Period(
|
| 406 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0
|
| 407 |
+
)
|
| 408 |
+
ival_B_to_T_end = Period(
|
| 409 |
+
freq="Min", year=2007, month=1, day=1, hour=23, minute=59
|
| 410 |
+
)
|
| 411 |
+
ival_B_to_S_start = Period(
|
| 412 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 413 |
+
)
|
| 414 |
+
ival_B_to_S_end = Period(
|
| 415 |
+
freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
|
| 416 |
+
)
|
| 417 |
+
|
| 418 |
+
assert ival_B.asfreq("Y") == ival_B_to_A
|
| 419 |
+
assert ival_B_end_of_year.asfreq("Y") == ival_B_to_A
|
| 420 |
+
assert ival_B.asfreq("Q") == ival_B_to_Q
|
| 421 |
+
assert ival_B_end_of_quarter.asfreq("Q") == ival_B_to_Q
|
| 422 |
+
assert ival_B.asfreq("M") == ival_B_to_M
|
| 423 |
+
assert ival_B_end_of_month.asfreq("M") == ival_B_to_M
|
| 424 |
+
assert ival_B.asfreq("W") == ival_B_to_W
|
| 425 |
+
assert ival_B_end_of_week.asfreq("W") == ival_B_to_W
|
| 426 |
+
|
| 427 |
+
assert ival_B.asfreq("D") == ival_B_to_D
|
| 428 |
+
|
| 429 |
+
assert ival_B.asfreq("h", "s") == ival_B_to_H_start
|
| 430 |
+
assert ival_B.asfreq("h", "E") == ival_B_to_H_end
|
| 431 |
+
assert ival_B.asfreq("Min", "s") == ival_B_to_T_start
|
| 432 |
+
assert ival_B.asfreq("Min", "E") == ival_B_to_T_end
|
| 433 |
+
assert ival_B.asfreq("s", "s") == ival_B_to_S_start
|
| 434 |
+
assert ival_B.asfreq("s", "E") == ival_B_to_S_end
|
| 435 |
+
|
| 436 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 437 |
+
assert ival_B.asfreq("B") == ival_B
|
| 438 |
+
|
| 439 |
+
def test_conv_daily(self):
|
| 440 |
+
# frequency conversion tests: from Business Frequency"
|
| 441 |
+
|
| 442 |
+
ival_D = Period(freq="D", year=2007, month=1, day=1)
|
| 443 |
+
ival_D_end_of_year = Period(freq="D", year=2007, month=12, day=31)
|
| 444 |
+
ival_D_end_of_quarter = Period(freq="D", year=2007, month=3, day=31)
|
| 445 |
+
ival_D_end_of_month = Period(freq="D", year=2007, month=1, day=31)
|
| 446 |
+
ival_D_end_of_week = Period(freq="D", year=2007, month=1, day=7)
|
| 447 |
+
|
| 448 |
+
ival_D_friday = Period(freq="D", year=2007, month=1, day=5)
|
| 449 |
+
ival_D_saturday = Period(freq="D", year=2007, month=1, day=6)
|
| 450 |
+
ival_D_sunday = Period(freq="D", year=2007, month=1, day=7)
|
| 451 |
+
|
| 452 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 453 |
+
ival_B_friday = Period(freq="B", year=2007, month=1, day=5)
|
| 454 |
+
ival_B_monday = Period(freq="B", year=2007, month=1, day=8)
|
| 455 |
+
|
| 456 |
+
ival_D_to_A = Period(freq="Y", year=2007)
|
| 457 |
+
|
| 458 |
+
ival_Deoq_to_AJAN = Period(freq="Y-JAN", year=2008)
|
| 459 |
+
ival_Deoq_to_AJUN = Period(freq="Y-JUN", year=2007)
|
| 460 |
+
ival_Deoq_to_ADEC = Period(freq="Y-DEC", year=2007)
|
| 461 |
+
|
| 462 |
+
ival_D_to_QEJAN = Period(freq="Q-JAN", year=2007, quarter=4)
|
| 463 |
+
ival_D_to_QEJUN = Period(freq="Q-JUN", year=2007, quarter=3)
|
| 464 |
+
ival_D_to_QEDEC = Period(freq="Q-DEC", year=2007, quarter=1)
|
| 465 |
+
|
| 466 |
+
ival_D_to_M = Period(freq="M", year=2007, month=1)
|
| 467 |
+
ival_D_to_W = Period(freq="W", year=2007, month=1, day=7)
|
| 468 |
+
|
| 469 |
+
ival_D_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 470 |
+
ival_D_to_H_end = Period(freq="h", year=2007, month=1, day=1, hour=23)
|
| 471 |
+
ival_D_to_T_start = Period(
|
| 472 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0
|
| 473 |
+
)
|
| 474 |
+
ival_D_to_T_end = Period(
|
| 475 |
+
freq="Min", year=2007, month=1, day=1, hour=23, minute=59
|
| 476 |
+
)
|
| 477 |
+
ival_D_to_S_start = Period(
|
| 478 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 479 |
+
)
|
| 480 |
+
ival_D_to_S_end = Period(
|
| 481 |
+
freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
assert ival_D.asfreq("Y") == ival_D_to_A
|
| 485 |
+
|
| 486 |
+
assert ival_D_end_of_quarter.asfreq("Y-JAN") == ival_Deoq_to_AJAN
|
| 487 |
+
assert ival_D_end_of_quarter.asfreq("Y-JUN") == ival_Deoq_to_AJUN
|
| 488 |
+
assert ival_D_end_of_quarter.asfreq("Y-DEC") == ival_Deoq_to_ADEC
|
| 489 |
+
|
| 490 |
+
assert ival_D_end_of_year.asfreq("Y") == ival_D_to_A
|
| 491 |
+
assert ival_D_end_of_quarter.asfreq("Q") == ival_D_to_QEDEC
|
| 492 |
+
assert ival_D.asfreq("Q-JAN") == ival_D_to_QEJAN
|
| 493 |
+
assert ival_D.asfreq("Q-JUN") == ival_D_to_QEJUN
|
| 494 |
+
assert ival_D.asfreq("Q-DEC") == ival_D_to_QEDEC
|
| 495 |
+
assert ival_D.asfreq("M") == ival_D_to_M
|
| 496 |
+
assert ival_D_end_of_month.asfreq("M") == ival_D_to_M
|
| 497 |
+
assert ival_D.asfreq("W") == ival_D_to_W
|
| 498 |
+
assert ival_D_end_of_week.asfreq("W") == ival_D_to_W
|
| 499 |
+
|
| 500 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 501 |
+
assert ival_D_friday.asfreq("B") == ival_B_friday
|
| 502 |
+
assert ival_D_saturday.asfreq("B", "s") == ival_B_friday
|
| 503 |
+
assert ival_D_saturday.asfreq("B", "E") == ival_B_monday
|
| 504 |
+
assert ival_D_sunday.asfreq("B", "s") == ival_B_friday
|
| 505 |
+
assert ival_D_sunday.asfreq("B", "E") == ival_B_monday
|
| 506 |
+
|
| 507 |
+
assert ival_D.asfreq("h", "s") == ival_D_to_H_start
|
| 508 |
+
assert ival_D.asfreq("h", "E") == ival_D_to_H_end
|
| 509 |
+
assert ival_D.asfreq("Min", "s") == ival_D_to_T_start
|
| 510 |
+
assert ival_D.asfreq("Min", "E") == ival_D_to_T_end
|
| 511 |
+
assert ival_D.asfreq("s", "s") == ival_D_to_S_start
|
| 512 |
+
assert ival_D.asfreq("s", "E") == ival_D_to_S_end
|
| 513 |
+
|
| 514 |
+
assert ival_D.asfreq("D") == ival_D
|
| 515 |
+
|
| 516 |
+
def test_conv_hourly(self):
|
| 517 |
+
# frequency conversion tests: from Hourly Frequency"
|
| 518 |
+
|
| 519 |
+
ival_H = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 520 |
+
ival_H_end_of_year = Period(freq="h", year=2007, month=12, day=31, hour=23)
|
| 521 |
+
ival_H_end_of_quarter = Period(freq="h", year=2007, month=3, day=31, hour=23)
|
| 522 |
+
ival_H_end_of_month = Period(freq="h", year=2007, month=1, day=31, hour=23)
|
| 523 |
+
ival_H_end_of_week = Period(freq="h", year=2007, month=1, day=7, hour=23)
|
| 524 |
+
ival_H_end_of_day = Period(freq="h", year=2007, month=1, day=1, hour=23)
|
| 525 |
+
ival_H_end_of_bus = Period(freq="h", year=2007, month=1, day=1, hour=23)
|
| 526 |
+
|
| 527 |
+
ival_H_to_A = Period(freq="Y", year=2007)
|
| 528 |
+
ival_H_to_Q = Period(freq="Q", year=2007, quarter=1)
|
| 529 |
+
ival_H_to_M = Period(freq="M", year=2007, month=1)
|
| 530 |
+
ival_H_to_W = Period(freq="W", year=2007, month=1, day=7)
|
| 531 |
+
ival_H_to_D = Period(freq="D", year=2007, month=1, day=1)
|
| 532 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 533 |
+
ival_H_to_B = Period(freq="B", year=2007, month=1, day=1)
|
| 534 |
+
|
| 535 |
+
ival_H_to_T_start = Period(
|
| 536 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0
|
| 537 |
+
)
|
| 538 |
+
ival_H_to_T_end = Period(
|
| 539 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=59
|
| 540 |
+
)
|
| 541 |
+
ival_H_to_S_start = Period(
|
| 542 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 543 |
+
)
|
| 544 |
+
ival_H_to_S_end = Period(
|
| 545 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=59, second=59
|
| 546 |
+
)
|
| 547 |
+
|
| 548 |
+
assert ival_H.asfreq("Y") == ival_H_to_A
|
| 549 |
+
assert ival_H_end_of_year.asfreq("Y") == ival_H_to_A
|
| 550 |
+
assert ival_H.asfreq("Q") == ival_H_to_Q
|
| 551 |
+
assert ival_H_end_of_quarter.asfreq("Q") == ival_H_to_Q
|
| 552 |
+
assert ival_H.asfreq("M") == ival_H_to_M
|
| 553 |
+
assert ival_H_end_of_month.asfreq("M") == ival_H_to_M
|
| 554 |
+
assert ival_H.asfreq("W") == ival_H_to_W
|
| 555 |
+
assert ival_H_end_of_week.asfreq("W") == ival_H_to_W
|
| 556 |
+
assert ival_H.asfreq("D") == ival_H_to_D
|
| 557 |
+
assert ival_H_end_of_day.asfreq("D") == ival_H_to_D
|
| 558 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 559 |
+
assert ival_H.asfreq("B") == ival_H_to_B
|
| 560 |
+
assert ival_H_end_of_bus.asfreq("B") == ival_H_to_B
|
| 561 |
+
|
| 562 |
+
assert ival_H.asfreq("Min", "s") == ival_H_to_T_start
|
| 563 |
+
assert ival_H.asfreq("Min", "E") == ival_H_to_T_end
|
| 564 |
+
assert ival_H.asfreq("s", "s") == ival_H_to_S_start
|
| 565 |
+
assert ival_H.asfreq("s", "E") == ival_H_to_S_end
|
| 566 |
+
|
| 567 |
+
assert ival_H.asfreq("h") == ival_H
|
| 568 |
+
|
| 569 |
+
def test_conv_minutely(self):
|
| 570 |
+
# frequency conversion tests: from Minutely Frequency"
|
| 571 |
+
|
| 572 |
+
ival_T = Period(freq="Min", year=2007, month=1, day=1, hour=0, minute=0)
|
| 573 |
+
ival_T_end_of_year = Period(
|
| 574 |
+
freq="Min", year=2007, month=12, day=31, hour=23, minute=59
|
| 575 |
+
)
|
| 576 |
+
ival_T_end_of_quarter = Period(
|
| 577 |
+
freq="Min", year=2007, month=3, day=31, hour=23, minute=59
|
| 578 |
+
)
|
| 579 |
+
ival_T_end_of_month = Period(
|
| 580 |
+
freq="Min", year=2007, month=1, day=31, hour=23, minute=59
|
| 581 |
+
)
|
| 582 |
+
ival_T_end_of_week = Period(
|
| 583 |
+
freq="Min", year=2007, month=1, day=7, hour=23, minute=59
|
| 584 |
+
)
|
| 585 |
+
ival_T_end_of_day = Period(
|
| 586 |
+
freq="Min", year=2007, month=1, day=1, hour=23, minute=59
|
| 587 |
+
)
|
| 588 |
+
ival_T_end_of_bus = Period(
|
| 589 |
+
freq="Min", year=2007, month=1, day=1, hour=23, minute=59
|
| 590 |
+
)
|
| 591 |
+
ival_T_end_of_hour = Period(
|
| 592 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=59
|
| 593 |
+
)
|
| 594 |
+
|
| 595 |
+
ival_T_to_A = Period(freq="Y", year=2007)
|
| 596 |
+
ival_T_to_Q = Period(freq="Q", year=2007, quarter=1)
|
| 597 |
+
ival_T_to_M = Period(freq="M", year=2007, month=1)
|
| 598 |
+
ival_T_to_W = Period(freq="W", year=2007, month=1, day=7)
|
| 599 |
+
ival_T_to_D = Period(freq="D", year=2007, month=1, day=1)
|
| 600 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 601 |
+
ival_T_to_B = Period(freq="B", year=2007, month=1, day=1)
|
| 602 |
+
ival_T_to_H = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 603 |
+
|
| 604 |
+
ival_T_to_S_start = Period(
|
| 605 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 606 |
+
)
|
| 607 |
+
ival_T_to_S_end = Period(
|
| 608 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=59
|
| 609 |
+
)
|
| 610 |
+
|
| 611 |
+
assert ival_T.asfreq("Y") == ival_T_to_A
|
| 612 |
+
assert ival_T_end_of_year.asfreq("Y") == ival_T_to_A
|
| 613 |
+
assert ival_T.asfreq("Q") == ival_T_to_Q
|
| 614 |
+
assert ival_T_end_of_quarter.asfreq("Q") == ival_T_to_Q
|
| 615 |
+
assert ival_T.asfreq("M") == ival_T_to_M
|
| 616 |
+
assert ival_T_end_of_month.asfreq("M") == ival_T_to_M
|
| 617 |
+
assert ival_T.asfreq("W") == ival_T_to_W
|
| 618 |
+
assert ival_T_end_of_week.asfreq("W") == ival_T_to_W
|
| 619 |
+
assert ival_T.asfreq("D") == ival_T_to_D
|
| 620 |
+
assert ival_T_end_of_day.asfreq("D") == ival_T_to_D
|
| 621 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 622 |
+
assert ival_T.asfreq("B") == ival_T_to_B
|
| 623 |
+
assert ival_T_end_of_bus.asfreq("B") == ival_T_to_B
|
| 624 |
+
assert ival_T.asfreq("h") == ival_T_to_H
|
| 625 |
+
assert ival_T_end_of_hour.asfreq("h") == ival_T_to_H
|
| 626 |
+
|
| 627 |
+
assert ival_T.asfreq("s", "s") == ival_T_to_S_start
|
| 628 |
+
assert ival_T.asfreq("s", "E") == ival_T_to_S_end
|
| 629 |
+
|
| 630 |
+
assert ival_T.asfreq("Min") == ival_T
|
| 631 |
+
|
| 632 |
+
def test_conv_secondly(self):
|
| 633 |
+
# frequency conversion tests: from Secondly Frequency"
|
| 634 |
+
|
| 635 |
+
ival_S = Period(freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0)
|
| 636 |
+
ival_S_end_of_year = Period(
|
| 637 |
+
freq="s", year=2007, month=12, day=31, hour=23, minute=59, second=59
|
| 638 |
+
)
|
| 639 |
+
ival_S_end_of_quarter = Period(
|
| 640 |
+
freq="s", year=2007, month=3, day=31, hour=23, minute=59, second=59
|
| 641 |
+
)
|
| 642 |
+
ival_S_end_of_month = Period(
|
| 643 |
+
freq="s", year=2007, month=1, day=31, hour=23, minute=59, second=59
|
| 644 |
+
)
|
| 645 |
+
ival_S_end_of_week = Period(
|
| 646 |
+
freq="s", year=2007, month=1, day=7, hour=23, minute=59, second=59
|
| 647 |
+
)
|
| 648 |
+
ival_S_end_of_day = Period(
|
| 649 |
+
freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
|
| 650 |
+
)
|
| 651 |
+
ival_S_end_of_bus = Period(
|
| 652 |
+
freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
|
| 653 |
+
)
|
| 654 |
+
ival_S_end_of_hour = Period(
|
| 655 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=59, second=59
|
| 656 |
+
)
|
| 657 |
+
ival_S_end_of_minute = Period(
|
| 658 |
+
freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=59
|
| 659 |
+
)
|
| 660 |
+
|
| 661 |
+
ival_S_to_A = Period(freq="Y", year=2007)
|
| 662 |
+
ival_S_to_Q = Period(freq="Q", year=2007, quarter=1)
|
| 663 |
+
ival_S_to_M = Period(freq="M", year=2007, month=1)
|
| 664 |
+
ival_S_to_W = Period(freq="W", year=2007, month=1, day=7)
|
| 665 |
+
ival_S_to_D = Period(freq="D", year=2007, month=1, day=1)
|
| 666 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 667 |
+
ival_S_to_B = Period(freq="B", year=2007, month=1, day=1)
|
| 668 |
+
ival_S_to_H = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 669 |
+
ival_S_to_T = Period(freq="Min", year=2007, month=1, day=1, hour=0, minute=0)
|
| 670 |
+
|
| 671 |
+
assert ival_S.asfreq("Y") == ival_S_to_A
|
| 672 |
+
assert ival_S_end_of_year.asfreq("Y") == ival_S_to_A
|
| 673 |
+
assert ival_S.asfreq("Q") == ival_S_to_Q
|
| 674 |
+
assert ival_S_end_of_quarter.asfreq("Q") == ival_S_to_Q
|
| 675 |
+
assert ival_S.asfreq("M") == ival_S_to_M
|
| 676 |
+
assert ival_S_end_of_month.asfreq("M") == ival_S_to_M
|
| 677 |
+
assert ival_S.asfreq("W") == ival_S_to_W
|
| 678 |
+
assert ival_S_end_of_week.asfreq("W") == ival_S_to_W
|
| 679 |
+
assert ival_S.asfreq("D") == ival_S_to_D
|
| 680 |
+
assert ival_S_end_of_day.asfreq("D") == ival_S_to_D
|
| 681 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 682 |
+
assert ival_S.asfreq("B") == ival_S_to_B
|
| 683 |
+
assert ival_S_end_of_bus.asfreq("B") == ival_S_to_B
|
| 684 |
+
assert ival_S.asfreq("h") == ival_S_to_H
|
| 685 |
+
assert ival_S_end_of_hour.asfreq("h") == ival_S_to_H
|
| 686 |
+
assert ival_S.asfreq("Min") == ival_S_to_T
|
| 687 |
+
assert ival_S_end_of_minute.asfreq("Min") == ival_S_to_T
|
| 688 |
+
|
| 689 |
+
assert ival_S.asfreq("s") == ival_S
|
| 690 |
+
|
| 691 |
+
def test_conv_microsecond(self):
|
| 692 |
+
# GH#31475 Avoid floating point errors dropping the start_time to
|
| 693 |
+
# before the beginning of the Period
|
| 694 |
+
per = Period("2020-01-30 15:57:27.576166", freq="us")
|
| 695 |
+
assert per.ordinal == 1580399847576166
|
| 696 |
+
|
| 697 |
+
start = per.start_time
|
| 698 |
+
expected = Timestamp("2020-01-30 15:57:27.576166")
|
| 699 |
+
assert start == expected
|
| 700 |
+
assert start._value == per.ordinal * 1000
|
| 701 |
+
|
| 702 |
+
per2 = Period("2300-01-01", "us")
|
| 703 |
+
msg = "2300-01-01"
|
| 704 |
+
with pytest.raises(OutOfBoundsDatetime, match=msg):
|
| 705 |
+
per2.start_time
|
| 706 |
+
with pytest.raises(OutOfBoundsDatetime, match=msg):
|
| 707 |
+
per2.end_time
|
| 708 |
+
|
| 709 |
+
def test_asfreq_mult(self):
|
| 710 |
+
# normal freq to mult freq
|
| 711 |
+
p = Period(freq="Y", year=2007)
|
| 712 |
+
# ordinal will not change
|
| 713 |
+
for freq in ["3Y", offsets.YearEnd(3)]:
|
| 714 |
+
result = p.asfreq(freq)
|
| 715 |
+
expected = Period("2007", freq="3Y")
|
| 716 |
+
|
| 717 |
+
assert result == expected
|
| 718 |
+
assert result.ordinal == expected.ordinal
|
| 719 |
+
assert result.freq == expected.freq
|
| 720 |
+
# ordinal will not change
|
| 721 |
+
for freq in ["3Y", offsets.YearEnd(3)]:
|
| 722 |
+
result = p.asfreq(freq, how="S")
|
| 723 |
+
expected = Period("2007", freq="3Y")
|
| 724 |
+
|
| 725 |
+
assert result == expected
|
| 726 |
+
assert result.ordinal == expected.ordinal
|
| 727 |
+
assert result.freq == expected.freq
|
| 728 |
+
|
| 729 |
+
# mult freq to normal freq
|
| 730 |
+
p = Period(freq="3Y", year=2007)
|
| 731 |
+
# ordinal will change because how=E is the default
|
| 732 |
+
for freq in ["Y", offsets.YearEnd()]:
|
| 733 |
+
result = p.asfreq(freq)
|
| 734 |
+
expected = Period("2009", freq="Y")
|
| 735 |
+
|
| 736 |
+
assert result == expected
|
| 737 |
+
assert result.ordinal == expected.ordinal
|
| 738 |
+
assert result.freq == expected.freq
|
| 739 |
+
# ordinal will not change
|
| 740 |
+
for freq in ["Y", offsets.YearEnd()]:
|
| 741 |
+
result = p.asfreq(freq, how="s")
|
| 742 |
+
expected = Period("2007", freq="Y")
|
| 743 |
+
|
| 744 |
+
assert result == expected
|
| 745 |
+
assert result.ordinal == expected.ordinal
|
| 746 |
+
assert result.freq == expected.freq
|
| 747 |
+
|
| 748 |
+
p = Period(freq="Y", year=2007)
|
| 749 |
+
for freq in ["2M", offsets.MonthEnd(2)]:
|
| 750 |
+
result = p.asfreq(freq)
|
| 751 |
+
expected = Period("2007-12", freq="2M")
|
| 752 |
+
|
| 753 |
+
assert result == expected
|
| 754 |
+
assert result.ordinal == expected.ordinal
|
| 755 |
+
assert result.freq == expected.freq
|
| 756 |
+
for freq in ["2M", offsets.MonthEnd(2)]:
|
| 757 |
+
result = p.asfreq(freq, how="s")
|
| 758 |
+
expected = Period("2007-01", freq="2M")
|
| 759 |
+
|
| 760 |
+
assert result == expected
|
| 761 |
+
assert result.ordinal == expected.ordinal
|
| 762 |
+
assert result.freq == expected.freq
|
| 763 |
+
|
| 764 |
+
p = Period(freq="3Y", year=2007)
|
| 765 |
+
for freq in ["2M", offsets.MonthEnd(2)]:
|
| 766 |
+
result = p.asfreq(freq)
|
| 767 |
+
expected = Period("2009-12", freq="2M")
|
| 768 |
+
|
| 769 |
+
assert result == expected
|
| 770 |
+
assert result.ordinal == expected.ordinal
|
| 771 |
+
assert result.freq == expected.freq
|
| 772 |
+
for freq in ["2M", offsets.MonthEnd(2)]:
|
| 773 |
+
result = p.asfreq(freq, how="s")
|
| 774 |
+
expected = Period("2007-01", freq="2M")
|
| 775 |
+
|
| 776 |
+
assert result == expected
|
| 777 |
+
assert result.ordinal == expected.ordinal
|
| 778 |
+
assert result.freq == expected.freq
|
| 779 |
+
|
| 780 |
+
def test_asfreq_combined(self):
|
| 781 |
+
# normal freq to combined freq
|
| 782 |
+
p = Period("2007", freq="h")
|
| 783 |
+
|
| 784 |
+
# ordinal will not change
|
| 785 |
+
expected = Period("2007", freq="25h")
|
| 786 |
+
for freq, how in zip(["1D1h", "1h1D"], ["E", "S"]):
|
| 787 |
+
result = p.asfreq(freq, how=how)
|
| 788 |
+
assert result == expected
|
| 789 |
+
assert result.ordinal == expected.ordinal
|
| 790 |
+
assert result.freq == expected.freq
|
| 791 |
+
|
| 792 |
+
# combined freq to normal freq
|
| 793 |
+
p1 = Period(freq="1D1h", year=2007)
|
| 794 |
+
p2 = Period(freq="1h1D", year=2007)
|
| 795 |
+
|
| 796 |
+
# ordinal will change because how=E is the default
|
| 797 |
+
result1 = p1.asfreq("h")
|
| 798 |
+
result2 = p2.asfreq("h")
|
| 799 |
+
expected = Period("2007-01-02", freq="h")
|
| 800 |
+
assert result1 == expected
|
| 801 |
+
assert result1.ordinal == expected.ordinal
|
| 802 |
+
assert result1.freq == expected.freq
|
| 803 |
+
assert result2 == expected
|
| 804 |
+
assert result2.ordinal == expected.ordinal
|
| 805 |
+
assert result2.freq == expected.freq
|
| 806 |
+
|
| 807 |
+
# ordinal will not change
|
| 808 |
+
result1 = p1.asfreq("h", how="S")
|
| 809 |
+
result2 = p2.asfreq("h", how="S")
|
| 810 |
+
expected = Period("2007-01-01", freq="h")
|
| 811 |
+
assert result1 == expected
|
| 812 |
+
assert result1.ordinal == expected.ordinal
|
| 813 |
+
assert result1.freq == expected.freq
|
| 814 |
+
assert result2 == expected
|
| 815 |
+
assert result2.ordinal == expected.ordinal
|
| 816 |
+
assert result2.freq == expected.freq
|
| 817 |
+
|
| 818 |
+
def test_asfreq_MS(self):
|
| 819 |
+
initial = Period("2013")
|
| 820 |
+
|
| 821 |
+
assert initial.asfreq(freq="M", how="S") == Period("2013-01", "M")
|
| 822 |
+
|
| 823 |
+
msg = "MS is not supported as period frequency"
|
| 824 |
+
with pytest.raises(ValueError, match=msg):
|
| 825 |
+
initial.asfreq(freq="MS", how="S")
|
| 826 |
+
|
| 827 |
+
with pytest.raises(ValueError, match=msg):
|
| 828 |
+
Period("2013-01", "MS")
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/period/test_period.py
ADDED
|
@@ -0,0 +1,1154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import (
|
| 2 |
+
date,
|
| 3 |
+
datetime,
|
| 4 |
+
timedelta,
|
| 5 |
+
)
|
| 6 |
+
import re
|
| 7 |
+
|
| 8 |
+
import numpy as np
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from pandas._libs.tslibs import iNaT
|
| 12 |
+
from pandas._libs.tslibs.ccalendar import (
|
| 13 |
+
DAYS,
|
| 14 |
+
MONTHS,
|
| 15 |
+
)
|
| 16 |
+
from pandas._libs.tslibs.np_datetime import OutOfBoundsDatetime
|
| 17 |
+
from pandas._libs.tslibs.parsing import DateParseError
|
| 18 |
+
from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
|
| 19 |
+
|
| 20 |
+
from pandas import (
|
| 21 |
+
NaT,
|
| 22 |
+
Period,
|
| 23 |
+
Timedelta,
|
| 24 |
+
Timestamp,
|
| 25 |
+
offsets,
|
| 26 |
+
)
|
| 27 |
+
import pandas._testing as tm
|
| 28 |
+
|
| 29 |
+
bday_msg = "Period with BDay freq is deprecated"
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class TestPeriodDisallowedFreqs:
|
| 33 |
+
@pytest.mark.parametrize(
|
| 34 |
+
"freq, freq_msg",
|
| 35 |
+
[
|
| 36 |
+
(offsets.BYearBegin(), "BYearBegin"),
|
| 37 |
+
(offsets.YearBegin(2), "YearBegin"),
|
| 38 |
+
(offsets.QuarterBegin(startingMonth=12), "QuarterBegin"),
|
| 39 |
+
(offsets.BusinessMonthEnd(2), "BusinessMonthEnd"),
|
| 40 |
+
],
|
| 41 |
+
)
|
| 42 |
+
def test_offsets_not_supported(self, freq, freq_msg):
|
| 43 |
+
# GH#55785
|
| 44 |
+
msg = re.escape(f"{freq} is not supported as period frequency")
|
| 45 |
+
with pytest.raises(ValueError, match=msg):
|
| 46 |
+
Period(year=2014, freq=freq)
|
| 47 |
+
|
| 48 |
+
def test_custom_business_day_freq_raises(self):
|
| 49 |
+
# GH#52534
|
| 50 |
+
msg = "C is not supported as period frequency"
|
| 51 |
+
with pytest.raises(ValueError, match=msg):
|
| 52 |
+
Period("2023-04-10", freq="C")
|
| 53 |
+
msg = f"{offsets.CustomBusinessDay().base} is not supported as period frequency"
|
| 54 |
+
with pytest.raises(ValueError, match=msg):
|
| 55 |
+
Period("2023-04-10", freq=offsets.CustomBusinessDay())
|
| 56 |
+
|
| 57 |
+
def test_invalid_frequency_error_message(self):
|
| 58 |
+
msg = "WOM-1MON is not supported as period frequency"
|
| 59 |
+
with pytest.raises(ValueError, match=msg):
|
| 60 |
+
Period("2012-01-02", freq="WOM-1MON")
|
| 61 |
+
|
| 62 |
+
def test_invalid_frequency_period_error_message(self):
|
| 63 |
+
msg = "for Period, please use 'M' instead of 'ME'"
|
| 64 |
+
with pytest.raises(ValueError, match=msg):
|
| 65 |
+
Period("2012-01-02", freq="ME")
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class TestPeriodConstruction:
|
| 69 |
+
def test_from_td64nat_raises(self):
|
| 70 |
+
# GH#44507
|
| 71 |
+
td = NaT.to_numpy("m8[ns]")
|
| 72 |
+
|
| 73 |
+
msg = "Value must be Period, string, integer, or datetime"
|
| 74 |
+
with pytest.raises(ValueError, match=msg):
|
| 75 |
+
Period(td)
|
| 76 |
+
|
| 77 |
+
with pytest.raises(ValueError, match=msg):
|
| 78 |
+
Period(td, freq="D")
|
| 79 |
+
|
| 80 |
+
def test_construction(self):
|
| 81 |
+
i1 = Period("1/1/2005", freq="M")
|
| 82 |
+
i2 = Period("Jan 2005")
|
| 83 |
+
|
| 84 |
+
assert i1 == i2
|
| 85 |
+
|
| 86 |
+
# GH#54105 - Period can be confusingly instantiated with lowercase freq
|
| 87 |
+
# TODO: raise in the future an error when passing lowercase freq
|
| 88 |
+
i1 = Period("2005", freq="Y")
|
| 89 |
+
i2 = Period("2005")
|
| 90 |
+
|
| 91 |
+
assert i1 == i2
|
| 92 |
+
|
| 93 |
+
i4 = Period("2005", freq="M")
|
| 94 |
+
assert i1 != i4
|
| 95 |
+
|
| 96 |
+
i1 = Period.now(freq="Q")
|
| 97 |
+
i2 = Period(datetime.now(), freq="Q")
|
| 98 |
+
|
| 99 |
+
assert i1 == i2
|
| 100 |
+
|
| 101 |
+
# Pass in freq as a keyword argument sometimes as a test for
|
| 102 |
+
# https://github.com/pandas-dev/pandas/issues/53369
|
| 103 |
+
i1 = Period.now(freq="D")
|
| 104 |
+
i2 = Period(datetime.now(), freq="D")
|
| 105 |
+
i3 = Period.now(offsets.Day())
|
| 106 |
+
|
| 107 |
+
assert i1 == i2
|
| 108 |
+
assert i1 == i3
|
| 109 |
+
|
| 110 |
+
i1 = Period("1982", freq="min")
|
| 111 |
+
msg = "'MIN' is deprecated and will be removed in a future version."
|
| 112 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 113 |
+
i2 = Period("1982", freq="MIN")
|
| 114 |
+
assert i1 == i2
|
| 115 |
+
|
| 116 |
+
i1 = Period(year=2005, month=3, day=1, freq="D")
|
| 117 |
+
i2 = Period("3/1/2005", freq="D")
|
| 118 |
+
assert i1 == i2
|
| 119 |
+
|
| 120 |
+
i3 = Period(year=2005, month=3, day=1, freq="d")
|
| 121 |
+
assert i1 == i3
|
| 122 |
+
|
| 123 |
+
i1 = Period("2007-01-01 09:00:00.001")
|
| 124 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1000), freq="ms")
|
| 125 |
+
assert i1 == expected
|
| 126 |
+
|
| 127 |
+
expected = Period("2007-01-01 09:00:00.001", freq="ms")
|
| 128 |
+
assert i1 == expected
|
| 129 |
+
|
| 130 |
+
i1 = Period("2007-01-01 09:00:00.00101")
|
| 131 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1010), freq="us")
|
| 132 |
+
assert i1 == expected
|
| 133 |
+
|
| 134 |
+
expected = Period("2007-01-01 09:00:00.00101", freq="us")
|
| 135 |
+
assert i1 == expected
|
| 136 |
+
|
| 137 |
+
msg = "Must supply freq for ordinal value"
|
| 138 |
+
with pytest.raises(ValueError, match=msg):
|
| 139 |
+
Period(ordinal=200701)
|
| 140 |
+
|
| 141 |
+
msg = "Invalid frequency: X"
|
| 142 |
+
with pytest.raises(ValueError, match=msg):
|
| 143 |
+
Period("2007-1-1", freq="X")
|
| 144 |
+
|
| 145 |
+
def test_tuple_freq_disallowed(self):
|
| 146 |
+
# GH#34703 tuple freq disallowed
|
| 147 |
+
with pytest.raises(TypeError, match="pass as a string instead"):
|
| 148 |
+
Period("1982", freq=("Min", 1))
|
| 149 |
+
|
| 150 |
+
with pytest.raises(TypeError, match="pass as a string instead"):
|
| 151 |
+
Period("2006-12-31", ("w", 1))
|
| 152 |
+
|
| 153 |
+
def test_construction_from_timestamp_nanos(self):
|
| 154 |
+
# GH#46811 don't drop nanos from Timestamp
|
| 155 |
+
ts = Timestamp("2022-04-20 09:23:24.123456789")
|
| 156 |
+
per = Period(ts, freq="ns")
|
| 157 |
+
|
| 158 |
+
# should losslessly round-trip, not lose the 789
|
| 159 |
+
rt = per.to_timestamp()
|
| 160 |
+
assert rt == ts
|
| 161 |
+
|
| 162 |
+
# same thing but from a datetime64 object
|
| 163 |
+
dt64 = ts.asm8
|
| 164 |
+
per2 = Period(dt64, freq="ns")
|
| 165 |
+
rt2 = per2.to_timestamp()
|
| 166 |
+
assert rt2.asm8 == dt64
|
| 167 |
+
|
| 168 |
+
def test_construction_bday(self):
|
| 169 |
+
# Biz day construction, roll forward if non-weekday
|
| 170 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 171 |
+
i1 = Period("3/10/12", freq="B")
|
| 172 |
+
i2 = Period("3/10/12", freq="D")
|
| 173 |
+
assert i1 == i2.asfreq("B")
|
| 174 |
+
i2 = Period("3/11/12", freq="D")
|
| 175 |
+
assert i1 == i2.asfreq("B")
|
| 176 |
+
i2 = Period("3/12/12", freq="D")
|
| 177 |
+
assert i1 == i2.asfreq("B")
|
| 178 |
+
|
| 179 |
+
i3 = Period("3/10/12", freq="b")
|
| 180 |
+
assert i1 == i3
|
| 181 |
+
|
| 182 |
+
i1 = Period(year=2012, month=3, day=10, freq="B")
|
| 183 |
+
i2 = Period("3/12/12", freq="B")
|
| 184 |
+
assert i1 == i2
|
| 185 |
+
|
| 186 |
+
def test_construction_quarter(self):
|
| 187 |
+
i1 = Period(year=2005, quarter=1, freq="Q")
|
| 188 |
+
i2 = Period("1/1/2005", freq="Q")
|
| 189 |
+
assert i1 == i2
|
| 190 |
+
|
| 191 |
+
i1 = Period(year=2005, quarter=3, freq="Q")
|
| 192 |
+
i2 = Period("9/1/2005", freq="Q")
|
| 193 |
+
assert i1 == i2
|
| 194 |
+
|
| 195 |
+
i1 = Period("2005Q1")
|
| 196 |
+
i2 = Period(year=2005, quarter=1, freq="Q")
|
| 197 |
+
i3 = Period("2005q1")
|
| 198 |
+
assert i1 == i2
|
| 199 |
+
assert i1 == i3
|
| 200 |
+
|
| 201 |
+
i1 = Period("05Q1")
|
| 202 |
+
assert i1 == i2
|
| 203 |
+
lower = Period("05q1")
|
| 204 |
+
assert i1 == lower
|
| 205 |
+
|
| 206 |
+
i1 = Period("1Q2005")
|
| 207 |
+
assert i1 == i2
|
| 208 |
+
lower = Period("1q2005")
|
| 209 |
+
assert i1 == lower
|
| 210 |
+
|
| 211 |
+
i1 = Period("1Q05")
|
| 212 |
+
assert i1 == i2
|
| 213 |
+
lower = Period("1q05")
|
| 214 |
+
assert i1 == lower
|
| 215 |
+
|
| 216 |
+
i1 = Period("4Q1984")
|
| 217 |
+
assert i1.year == 1984
|
| 218 |
+
lower = Period("4q1984")
|
| 219 |
+
assert i1 == lower
|
| 220 |
+
|
| 221 |
+
def test_construction_month(self):
|
| 222 |
+
expected = Period("2007-01", freq="M")
|
| 223 |
+
i1 = Period("200701", freq="M")
|
| 224 |
+
assert i1 == expected
|
| 225 |
+
|
| 226 |
+
i1 = Period("200701", freq="M")
|
| 227 |
+
assert i1 == expected
|
| 228 |
+
|
| 229 |
+
i1 = Period(200701, freq="M")
|
| 230 |
+
assert i1 == expected
|
| 231 |
+
|
| 232 |
+
i1 = Period(ordinal=200701, freq="M")
|
| 233 |
+
assert i1.year == 18695
|
| 234 |
+
|
| 235 |
+
i1 = Period(datetime(2007, 1, 1), freq="M")
|
| 236 |
+
i2 = Period("200701", freq="M")
|
| 237 |
+
assert i1 == i2
|
| 238 |
+
|
| 239 |
+
i1 = Period(date(2007, 1, 1), freq="M")
|
| 240 |
+
i2 = Period(datetime(2007, 1, 1), freq="M")
|
| 241 |
+
i3 = Period(np.datetime64("2007-01-01"), freq="M")
|
| 242 |
+
i4 = Period("2007-01-01 00:00:00", freq="M")
|
| 243 |
+
i5 = Period("2007-01-01 00:00:00.000", freq="M")
|
| 244 |
+
assert i1 == i2
|
| 245 |
+
assert i1 == i3
|
| 246 |
+
assert i1 == i4
|
| 247 |
+
assert i1 == i5
|
| 248 |
+
|
| 249 |
+
def test_period_constructor_offsets(self):
|
| 250 |
+
assert Period("1/1/2005", freq=offsets.MonthEnd()) == Period(
|
| 251 |
+
"1/1/2005", freq="M"
|
| 252 |
+
)
|
| 253 |
+
assert Period("2005", freq=offsets.YearEnd()) == Period("2005", freq="Y")
|
| 254 |
+
assert Period("2005", freq=offsets.MonthEnd()) == Period("2005", freq="M")
|
| 255 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 256 |
+
assert Period("3/10/12", freq=offsets.BusinessDay()) == Period(
|
| 257 |
+
"3/10/12", freq="B"
|
| 258 |
+
)
|
| 259 |
+
assert Period("3/10/12", freq=offsets.Day()) == Period("3/10/12", freq="D")
|
| 260 |
+
|
| 261 |
+
assert Period(
|
| 262 |
+
year=2005, quarter=1, freq=offsets.QuarterEnd(startingMonth=12)
|
| 263 |
+
) == Period(year=2005, quarter=1, freq="Q")
|
| 264 |
+
assert Period(
|
| 265 |
+
year=2005, quarter=2, freq=offsets.QuarterEnd(startingMonth=12)
|
| 266 |
+
) == Period(year=2005, quarter=2, freq="Q")
|
| 267 |
+
|
| 268 |
+
assert Period(year=2005, month=3, day=1, freq=offsets.Day()) == Period(
|
| 269 |
+
year=2005, month=3, day=1, freq="D"
|
| 270 |
+
)
|
| 271 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 272 |
+
assert Period(year=2012, month=3, day=10, freq=offsets.BDay()) == Period(
|
| 273 |
+
year=2012, month=3, day=10, freq="B"
|
| 274 |
+
)
|
| 275 |
+
|
| 276 |
+
expected = Period("2005-03-01", freq="3D")
|
| 277 |
+
assert Period(year=2005, month=3, day=1, freq=offsets.Day(3)) == expected
|
| 278 |
+
assert Period(year=2005, month=3, day=1, freq="3D") == expected
|
| 279 |
+
|
| 280 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 281 |
+
assert Period(year=2012, month=3, day=10, freq=offsets.BDay(3)) == Period(
|
| 282 |
+
year=2012, month=3, day=10, freq="3B"
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
assert Period(200701, freq=offsets.MonthEnd()) == Period(200701, freq="M")
|
| 286 |
+
|
| 287 |
+
i1 = Period(ordinal=200701, freq=offsets.MonthEnd())
|
| 288 |
+
i2 = Period(ordinal=200701, freq="M")
|
| 289 |
+
assert i1 == i2
|
| 290 |
+
assert i1.year == 18695
|
| 291 |
+
assert i2.year == 18695
|
| 292 |
+
|
| 293 |
+
i1 = Period(datetime(2007, 1, 1), freq="M")
|
| 294 |
+
i2 = Period("200701", freq="M")
|
| 295 |
+
assert i1 == i2
|
| 296 |
+
|
| 297 |
+
i1 = Period(date(2007, 1, 1), freq="M")
|
| 298 |
+
i2 = Period(datetime(2007, 1, 1), freq="M")
|
| 299 |
+
i3 = Period(np.datetime64("2007-01-01"), freq="M")
|
| 300 |
+
i4 = Period("2007-01-01 00:00:00", freq="M")
|
| 301 |
+
i5 = Period("2007-01-01 00:00:00.000", freq="M")
|
| 302 |
+
assert i1 == i2
|
| 303 |
+
assert i1 == i3
|
| 304 |
+
assert i1 == i4
|
| 305 |
+
assert i1 == i5
|
| 306 |
+
|
| 307 |
+
i1 = Period("2007-01-01 09:00:00.001")
|
| 308 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1000), freq="ms")
|
| 309 |
+
assert i1 == expected
|
| 310 |
+
|
| 311 |
+
expected = Period("2007-01-01 09:00:00.001", freq="ms")
|
| 312 |
+
assert i1 == expected
|
| 313 |
+
|
| 314 |
+
i1 = Period("2007-01-01 09:00:00.00101")
|
| 315 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1010), freq="us")
|
| 316 |
+
assert i1 == expected
|
| 317 |
+
|
| 318 |
+
expected = Period("2007-01-01 09:00:00.00101", freq="us")
|
| 319 |
+
assert i1 == expected
|
| 320 |
+
|
| 321 |
+
def test_invalid_arguments(self):
|
| 322 |
+
msg = "Must supply freq for datetime value"
|
| 323 |
+
with pytest.raises(ValueError, match=msg):
|
| 324 |
+
Period(datetime.now())
|
| 325 |
+
with pytest.raises(ValueError, match=msg):
|
| 326 |
+
Period(datetime.now().date())
|
| 327 |
+
|
| 328 |
+
msg = "Value must be Period, string, integer, or datetime"
|
| 329 |
+
with pytest.raises(ValueError, match=msg):
|
| 330 |
+
Period(1.6, freq="D")
|
| 331 |
+
msg = "Ordinal must be an integer"
|
| 332 |
+
with pytest.raises(ValueError, match=msg):
|
| 333 |
+
Period(ordinal=1.6, freq="D")
|
| 334 |
+
msg = "Only value or ordinal but not both should be given but not both"
|
| 335 |
+
with pytest.raises(ValueError, match=msg):
|
| 336 |
+
Period(ordinal=2, value=1, freq="D")
|
| 337 |
+
|
| 338 |
+
msg = "If value is None, freq cannot be None"
|
| 339 |
+
with pytest.raises(ValueError, match=msg):
|
| 340 |
+
Period(month=1)
|
| 341 |
+
|
| 342 |
+
msg = '^Given date string "-2000" not likely a datetime$'
|
| 343 |
+
with pytest.raises(ValueError, match=msg):
|
| 344 |
+
Period("-2000", "Y")
|
| 345 |
+
msg = "day is out of range for month"
|
| 346 |
+
with pytest.raises(DateParseError, match=msg):
|
| 347 |
+
Period("0", "Y")
|
| 348 |
+
msg = "Unknown datetime string format, unable to parse"
|
| 349 |
+
with pytest.raises(DateParseError, match=msg):
|
| 350 |
+
Period("1/1/-2000", "Y")
|
| 351 |
+
|
| 352 |
+
def test_constructor_corner(self):
|
| 353 |
+
expected = Period("2007-01", freq="2M")
|
| 354 |
+
assert Period(year=2007, month=1, freq="2M") == expected
|
| 355 |
+
|
| 356 |
+
assert Period(None) is NaT
|
| 357 |
+
|
| 358 |
+
p = Period("2007-01-01", freq="D")
|
| 359 |
+
|
| 360 |
+
result = Period(p, freq="Y")
|
| 361 |
+
exp = Period("2007", freq="Y")
|
| 362 |
+
assert result == exp
|
| 363 |
+
|
| 364 |
+
def test_constructor_infer_freq(self):
|
| 365 |
+
p = Period("2007-01-01")
|
| 366 |
+
assert p.freq == "D"
|
| 367 |
+
|
| 368 |
+
p = Period("2007-01-01 07")
|
| 369 |
+
assert p.freq == "h"
|
| 370 |
+
|
| 371 |
+
p = Period("2007-01-01 07:10")
|
| 372 |
+
assert p.freq == "min"
|
| 373 |
+
|
| 374 |
+
p = Period("2007-01-01 07:10:15")
|
| 375 |
+
assert p.freq == "s"
|
| 376 |
+
|
| 377 |
+
p = Period("2007-01-01 07:10:15.123")
|
| 378 |
+
assert p.freq == "ms"
|
| 379 |
+
|
| 380 |
+
# We see that there are 6 digits after the decimal, so get microsecond
|
| 381 |
+
# even though they are all zeros.
|
| 382 |
+
p = Period("2007-01-01 07:10:15.123000")
|
| 383 |
+
assert p.freq == "us"
|
| 384 |
+
|
| 385 |
+
p = Period("2007-01-01 07:10:15.123400")
|
| 386 |
+
assert p.freq == "us"
|
| 387 |
+
|
| 388 |
+
def test_multiples(self):
|
| 389 |
+
result1 = Period("1989", freq="2Y")
|
| 390 |
+
result2 = Period("1989", freq="Y")
|
| 391 |
+
assert result1.ordinal == result2.ordinal
|
| 392 |
+
assert result1.freqstr == "2Y-DEC"
|
| 393 |
+
assert result2.freqstr == "Y-DEC"
|
| 394 |
+
assert result1.freq == offsets.YearEnd(2)
|
| 395 |
+
assert result2.freq == offsets.YearEnd()
|
| 396 |
+
|
| 397 |
+
assert (result1 + 1).ordinal == result1.ordinal + 2
|
| 398 |
+
assert (1 + result1).ordinal == result1.ordinal + 2
|
| 399 |
+
assert (result1 - 1).ordinal == result2.ordinal - 2
|
| 400 |
+
assert (-1 + result1).ordinal == result2.ordinal - 2
|
| 401 |
+
|
| 402 |
+
@pytest.mark.parametrize("month", MONTHS)
|
| 403 |
+
def test_period_cons_quarterly(self, month):
|
| 404 |
+
# bugs in scikits.timeseries
|
| 405 |
+
freq = f"Q-{month}"
|
| 406 |
+
exp = Period("1989Q3", freq=freq)
|
| 407 |
+
assert "1989Q3" in str(exp)
|
| 408 |
+
stamp = exp.to_timestamp("D", how="end")
|
| 409 |
+
p = Period(stamp, freq=freq)
|
| 410 |
+
assert p == exp
|
| 411 |
+
|
| 412 |
+
stamp = exp.to_timestamp("3D", how="end")
|
| 413 |
+
p = Period(stamp, freq=freq)
|
| 414 |
+
assert p == exp
|
| 415 |
+
|
| 416 |
+
@pytest.mark.parametrize("month", MONTHS)
|
| 417 |
+
def test_period_cons_annual(self, month):
|
| 418 |
+
# bugs in scikits.timeseries
|
| 419 |
+
freq = f"Y-{month}"
|
| 420 |
+
exp = Period("1989", freq=freq)
|
| 421 |
+
stamp = exp.to_timestamp("D", how="end") + timedelta(days=30)
|
| 422 |
+
p = Period(stamp, freq=freq)
|
| 423 |
+
|
| 424 |
+
assert p == exp + 1
|
| 425 |
+
assert isinstance(p, Period)
|
| 426 |
+
|
| 427 |
+
@pytest.mark.parametrize("day", DAYS)
|
| 428 |
+
@pytest.mark.parametrize("num", range(10, 17))
|
| 429 |
+
def test_period_cons_weekly(self, num, day):
|
| 430 |
+
daystr = f"2011-02-{num}"
|
| 431 |
+
freq = f"W-{day}"
|
| 432 |
+
|
| 433 |
+
result = Period(daystr, freq=freq)
|
| 434 |
+
expected = Period(daystr, freq="D").asfreq(freq)
|
| 435 |
+
assert result == expected
|
| 436 |
+
assert isinstance(result, Period)
|
| 437 |
+
|
| 438 |
+
def test_parse_week_str_roundstrip(self):
|
| 439 |
+
# GH#50803
|
| 440 |
+
per = Period("2017-01-23/2017-01-29")
|
| 441 |
+
assert per.freq.freqstr == "W-SUN"
|
| 442 |
+
|
| 443 |
+
per = Period("2017-01-24/2017-01-30")
|
| 444 |
+
assert per.freq.freqstr == "W-MON"
|
| 445 |
+
|
| 446 |
+
msg = "Could not parse as weekly-freq Period"
|
| 447 |
+
with pytest.raises(ValueError, match=msg):
|
| 448 |
+
# not 6 days apart
|
| 449 |
+
Period("2016-01-23/2017-01-29")
|
| 450 |
+
|
| 451 |
+
def test_period_from_ordinal(self):
|
| 452 |
+
p = Period("2011-01", freq="M")
|
| 453 |
+
res = Period._from_ordinal(p.ordinal, freq=p.freq)
|
| 454 |
+
assert p == res
|
| 455 |
+
assert isinstance(res, Period)
|
| 456 |
+
|
| 457 |
+
@pytest.mark.parametrize("freq", ["Y", "M", "D", "h"])
|
| 458 |
+
def test_construct_from_nat_string_and_freq(self, freq):
|
| 459 |
+
per = Period("NaT", freq=freq)
|
| 460 |
+
assert per is NaT
|
| 461 |
+
|
| 462 |
+
per = Period("NaT", freq="2" + freq)
|
| 463 |
+
assert per is NaT
|
| 464 |
+
|
| 465 |
+
per = Period("NaT", freq="3" + freq)
|
| 466 |
+
assert per is NaT
|
| 467 |
+
|
| 468 |
+
def test_period_cons_nat(self):
|
| 469 |
+
p = Period("nat", freq="W-SUN")
|
| 470 |
+
assert p is NaT
|
| 471 |
+
|
| 472 |
+
p = Period(iNaT, freq="D")
|
| 473 |
+
assert p is NaT
|
| 474 |
+
|
| 475 |
+
p = Period(iNaT, freq="3D")
|
| 476 |
+
assert p is NaT
|
| 477 |
+
|
| 478 |
+
p = Period(iNaT, freq="1D1h")
|
| 479 |
+
assert p is NaT
|
| 480 |
+
|
| 481 |
+
p = Period("NaT")
|
| 482 |
+
assert p is NaT
|
| 483 |
+
|
| 484 |
+
p = Period(iNaT)
|
| 485 |
+
assert p is NaT
|
| 486 |
+
|
| 487 |
+
def test_period_cons_mult(self):
|
| 488 |
+
p1 = Period("2011-01", freq="3M")
|
| 489 |
+
p2 = Period("2011-01", freq="M")
|
| 490 |
+
assert p1.ordinal == p2.ordinal
|
| 491 |
+
|
| 492 |
+
assert p1.freq == offsets.MonthEnd(3)
|
| 493 |
+
assert p1.freqstr == "3M"
|
| 494 |
+
|
| 495 |
+
assert p2.freq == offsets.MonthEnd()
|
| 496 |
+
assert p2.freqstr == "M"
|
| 497 |
+
|
| 498 |
+
result = p1 + 1
|
| 499 |
+
assert result.ordinal == (p2 + 3).ordinal
|
| 500 |
+
|
| 501 |
+
assert result.freq == p1.freq
|
| 502 |
+
assert result.freqstr == "3M"
|
| 503 |
+
|
| 504 |
+
result = p1 - 1
|
| 505 |
+
assert result.ordinal == (p2 - 3).ordinal
|
| 506 |
+
assert result.freq == p1.freq
|
| 507 |
+
assert result.freqstr == "3M"
|
| 508 |
+
|
| 509 |
+
msg = "Frequency must be positive, because it represents span: -3M"
|
| 510 |
+
with pytest.raises(ValueError, match=msg):
|
| 511 |
+
Period("2011-01", freq="-3M")
|
| 512 |
+
|
| 513 |
+
msg = "Frequency must be positive, because it represents span: 0M"
|
| 514 |
+
with pytest.raises(ValueError, match=msg):
|
| 515 |
+
Period("2011-01", freq="0M")
|
| 516 |
+
|
| 517 |
+
def test_period_cons_combined(self):
|
| 518 |
+
p = [
|
| 519 |
+
(
|
| 520 |
+
Period("2011-01", freq="1D1h"),
|
| 521 |
+
Period("2011-01", freq="1h1D"),
|
| 522 |
+
Period("2011-01", freq="h"),
|
| 523 |
+
),
|
| 524 |
+
(
|
| 525 |
+
Period(ordinal=1, freq="1D1h"),
|
| 526 |
+
Period(ordinal=1, freq="1h1D"),
|
| 527 |
+
Period(ordinal=1, freq="h"),
|
| 528 |
+
),
|
| 529 |
+
]
|
| 530 |
+
|
| 531 |
+
for p1, p2, p3 in p:
|
| 532 |
+
assert p1.ordinal == p3.ordinal
|
| 533 |
+
assert p2.ordinal == p3.ordinal
|
| 534 |
+
|
| 535 |
+
assert p1.freq == offsets.Hour(25)
|
| 536 |
+
assert p1.freqstr == "25h"
|
| 537 |
+
|
| 538 |
+
assert p2.freq == offsets.Hour(25)
|
| 539 |
+
assert p2.freqstr == "25h"
|
| 540 |
+
|
| 541 |
+
assert p3.freq == offsets.Hour()
|
| 542 |
+
assert p3.freqstr == "h"
|
| 543 |
+
|
| 544 |
+
result = p1 + 1
|
| 545 |
+
assert result.ordinal == (p3 + 25).ordinal
|
| 546 |
+
assert result.freq == p1.freq
|
| 547 |
+
assert result.freqstr == "25h"
|
| 548 |
+
|
| 549 |
+
result = p2 + 1
|
| 550 |
+
assert result.ordinal == (p3 + 25).ordinal
|
| 551 |
+
assert result.freq == p2.freq
|
| 552 |
+
assert result.freqstr == "25h"
|
| 553 |
+
|
| 554 |
+
result = p1 - 1
|
| 555 |
+
assert result.ordinal == (p3 - 25).ordinal
|
| 556 |
+
assert result.freq == p1.freq
|
| 557 |
+
assert result.freqstr == "25h"
|
| 558 |
+
|
| 559 |
+
result = p2 - 1
|
| 560 |
+
assert result.ordinal == (p3 - 25).ordinal
|
| 561 |
+
assert result.freq == p2.freq
|
| 562 |
+
assert result.freqstr == "25h"
|
| 563 |
+
|
| 564 |
+
msg = "Frequency must be positive, because it represents span: -25h"
|
| 565 |
+
with pytest.raises(ValueError, match=msg):
|
| 566 |
+
Period("2011-01", freq="-1D1h")
|
| 567 |
+
with pytest.raises(ValueError, match=msg):
|
| 568 |
+
Period("2011-01", freq="-1h1D")
|
| 569 |
+
with pytest.raises(ValueError, match=msg):
|
| 570 |
+
Period(ordinal=1, freq="-1D1h")
|
| 571 |
+
with pytest.raises(ValueError, match=msg):
|
| 572 |
+
Period(ordinal=1, freq="-1h1D")
|
| 573 |
+
|
| 574 |
+
msg = "Frequency must be positive, because it represents span: 0D"
|
| 575 |
+
with pytest.raises(ValueError, match=msg):
|
| 576 |
+
Period("2011-01", freq="0D0h")
|
| 577 |
+
with pytest.raises(ValueError, match=msg):
|
| 578 |
+
Period(ordinal=1, freq="0D0h")
|
| 579 |
+
|
| 580 |
+
# You can only combine together day and intraday offsets
|
| 581 |
+
msg = "Invalid frequency: 1W1D"
|
| 582 |
+
with pytest.raises(ValueError, match=msg):
|
| 583 |
+
Period("2011-01", freq="1W1D")
|
| 584 |
+
msg = "Invalid frequency: 1D1W"
|
| 585 |
+
with pytest.raises(ValueError, match=msg):
|
| 586 |
+
Period("2011-01", freq="1D1W")
|
| 587 |
+
|
| 588 |
+
@pytest.mark.parametrize("day", ["1970/01/01 ", "2020-12-31 ", "1981/09/13 "])
|
| 589 |
+
@pytest.mark.parametrize("hour", ["00:00:00", "00:00:01", "23:59:59", "12:00:59"])
|
| 590 |
+
@pytest.mark.parametrize(
|
| 591 |
+
"sec_float, expected",
|
| 592 |
+
[
|
| 593 |
+
(".000000001", 1),
|
| 594 |
+
(".000000999", 999),
|
| 595 |
+
(".123456789", 789),
|
| 596 |
+
(".999999999", 999),
|
| 597 |
+
(".999999000", 0),
|
| 598 |
+
# Test femtoseconds, attoseconds, picoseconds are dropped like Timestamp
|
| 599 |
+
(".999999001123", 1),
|
| 600 |
+
(".999999001123456", 1),
|
| 601 |
+
(".999999001123456789", 1),
|
| 602 |
+
],
|
| 603 |
+
)
|
| 604 |
+
def test_period_constructor_nanosecond(self, day, hour, sec_float, expected):
|
| 605 |
+
# GH 34621
|
| 606 |
+
|
| 607 |
+
assert Period(day + hour + sec_float).start_time.nanosecond == expected
|
| 608 |
+
|
| 609 |
+
@pytest.mark.parametrize("hour", range(24))
|
| 610 |
+
def test_period_large_ordinal(self, hour):
|
| 611 |
+
# Issue #36430
|
| 612 |
+
# Integer overflow for Period over the maximum timestamp
|
| 613 |
+
p = Period(ordinal=2562048 + hour, freq="1h")
|
| 614 |
+
assert p.hour == hour
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
class TestPeriodMethods:
|
| 618 |
+
def test_round_trip(self):
|
| 619 |
+
p = Period("2000Q1")
|
| 620 |
+
new_p = tm.round_trip_pickle(p)
|
| 621 |
+
assert new_p == p
|
| 622 |
+
|
| 623 |
+
def test_hash(self):
|
| 624 |
+
assert hash(Period("2011-01", freq="M")) == hash(Period("2011-01", freq="M"))
|
| 625 |
+
|
| 626 |
+
assert hash(Period("2011-01-01", freq="D")) != hash(Period("2011-01", freq="M"))
|
| 627 |
+
|
| 628 |
+
assert hash(Period("2011-01", freq="3M")) != hash(Period("2011-01", freq="2M"))
|
| 629 |
+
|
| 630 |
+
assert hash(Period("2011-01", freq="M")) != hash(Period("2011-02", freq="M"))
|
| 631 |
+
|
| 632 |
+
# --------------------------------------------------------------
|
| 633 |
+
# to_timestamp
|
| 634 |
+
|
| 635 |
+
def test_to_timestamp_mult(self):
|
| 636 |
+
p = Period("2011-01", freq="M")
|
| 637 |
+
assert p.to_timestamp(how="S") == Timestamp("2011-01-01")
|
| 638 |
+
expected = Timestamp("2011-02-01") - Timedelta(1, "ns")
|
| 639 |
+
assert p.to_timestamp(how="E") == expected
|
| 640 |
+
|
| 641 |
+
p = Period("2011-01", freq="3M")
|
| 642 |
+
assert p.to_timestamp(how="S") == Timestamp("2011-01-01")
|
| 643 |
+
expected = Timestamp("2011-04-01") - Timedelta(1, "ns")
|
| 644 |
+
assert p.to_timestamp(how="E") == expected
|
| 645 |
+
|
| 646 |
+
@pytest.mark.filterwarnings(
|
| 647 |
+
"ignore:Period with BDay freq is deprecated:FutureWarning"
|
| 648 |
+
)
|
| 649 |
+
def test_to_timestamp(self):
|
| 650 |
+
p = Period("1982", freq="Y")
|
| 651 |
+
start_ts = p.to_timestamp(how="S")
|
| 652 |
+
aliases = ["s", "StarT", "BEGIn"]
|
| 653 |
+
for a in aliases:
|
| 654 |
+
assert start_ts == p.to_timestamp("D", how=a)
|
| 655 |
+
# freq with mult should not affect to the result
|
| 656 |
+
assert start_ts == p.to_timestamp("3D", how=a)
|
| 657 |
+
|
| 658 |
+
end_ts = p.to_timestamp(how="E")
|
| 659 |
+
aliases = ["e", "end", "FINIsH"]
|
| 660 |
+
for a in aliases:
|
| 661 |
+
assert end_ts == p.to_timestamp("D", how=a)
|
| 662 |
+
assert end_ts == p.to_timestamp("3D", how=a)
|
| 663 |
+
|
| 664 |
+
from_lst = ["Y", "Q", "M", "W", "B", "D", "h", "Min", "s"]
|
| 665 |
+
|
| 666 |
+
def _ex(p):
|
| 667 |
+
if p.freq == "B":
|
| 668 |
+
return p.start_time + Timedelta(days=1, nanoseconds=-1)
|
| 669 |
+
return Timestamp((p + p.freq).start_time._value - 1)
|
| 670 |
+
|
| 671 |
+
for fcode in from_lst:
|
| 672 |
+
p = Period("1982", freq=fcode)
|
| 673 |
+
result = p.to_timestamp().to_period(fcode)
|
| 674 |
+
assert result == p
|
| 675 |
+
|
| 676 |
+
assert p.start_time == p.to_timestamp(how="S")
|
| 677 |
+
|
| 678 |
+
assert p.end_time == _ex(p)
|
| 679 |
+
|
| 680 |
+
# Frequency other than daily
|
| 681 |
+
|
| 682 |
+
p = Period("1985", freq="Y")
|
| 683 |
+
|
| 684 |
+
result = p.to_timestamp("h", how="end")
|
| 685 |
+
expected = Timestamp(1986, 1, 1) - Timedelta(1, "ns")
|
| 686 |
+
assert result == expected
|
| 687 |
+
result = p.to_timestamp("3h", how="end")
|
| 688 |
+
assert result == expected
|
| 689 |
+
|
| 690 |
+
result = p.to_timestamp("min", how="end")
|
| 691 |
+
expected = Timestamp(1986, 1, 1) - Timedelta(1, "ns")
|
| 692 |
+
assert result == expected
|
| 693 |
+
result = p.to_timestamp("2min", how="end")
|
| 694 |
+
assert result == expected
|
| 695 |
+
|
| 696 |
+
result = p.to_timestamp(how="end")
|
| 697 |
+
expected = Timestamp(1986, 1, 1) - Timedelta(1, "ns")
|
| 698 |
+
assert result == expected
|
| 699 |
+
|
| 700 |
+
expected = datetime(1985, 1, 1)
|
| 701 |
+
result = p.to_timestamp("h", how="start")
|
| 702 |
+
assert result == expected
|
| 703 |
+
result = p.to_timestamp("min", how="start")
|
| 704 |
+
assert result == expected
|
| 705 |
+
result = p.to_timestamp("s", how="start")
|
| 706 |
+
assert result == expected
|
| 707 |
+
result = p.to_timestamp("3h", how="start")
|
| 708 |
+
assert result == expected
|
| 709 |
+
result = p.to_timestamp("5s", how="start")
|
| 710 |
+
assert result == expected
|
| 711 |
+
|
| 712 |
+
def test_to_timestamp_business_end(self):
|
| 713 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 714 |
+
per = Period("1990-01-05", "B") # Friday
|
| 715 |
+
result = per.to_timestamp("B", how="E")
|
| 716 |
+
|
| 717 |
+
expected = Timestamp("1990-01-06") - Timedelta(nanoseconds=1)
|
| 718 |
+
assert result == expected
|
| 719 |
+
|
| 720 |
+
@pytest.mark.parametrize(
|
| 721 |
+
"ts, expected",
|
| 722 |
+
[
|
| 723 |
+
("1970-01-01 00:00:00", 0),
|
| 724 |
+
("1970-01-01 00:00:00.000001", 1),
|
| 725 |
+
("1970-01-01 00:00:00.00001", 10),
|
| 726 |
+
("1970-01-01 00:00:00.499", 499000),
|
| 727 |
+
("1999-12-31 23:59:59.999", 999000),
|
| 728 |
+
("1999-12-31 23:59:59.999999", 999999),
|
| 729 |
+
("2050-12-31 23:59:59.5", 500000),
|
| 730 |
+
("2050-12-31 23:59:59.500001", 500001),
|
| 731 |
+
("2050-12-31 23:59:59.123456", 123456),
|
| 732 |
+
],
|
| 733 |
+
)
|
| 734 |
+
@pytest.mark.parametrize("freq", [None, "us", "ns"])
|
| 735 |
+
def test_to_timestamp_microsecond(self, ts, expected, freq):
|
| 736 |
+
# GH 24444
|
| 737 |
+
result = Period(ts).to_timestamp(freq=freq).microsecond
|
| 738 |
+
assert result == expected
|
| 739 |
+
|
| 740 |
+
# --------------------------------------------------------------
|
| 741 |
+
# Rendering: __repr__, strftime, etc
|
| 742 |
+
|
| 743 |
+
@pytest.mark.parametrize(
|
| 744 |
+
"str_ts,freq,str_res,str_freq",
|
| 745 |
+
(
|
| 746 |
+
("Jan-2000", None, "2000-01", "M"),
|
| 747 |
+
("2000-12-15", None, "2000-12-15", "D"),
|
| 748 |
+
(
|
| 749 |
+
"2000-12-15 13:45:26.123456789",
|
| 750 |
+
"ns",
|
| 751 |
+
"2000-12-15 13:45:26.123456789",
|
| 752 |
+
"ns",
|
| 753 |
+
),
|
| 754 |
+
("2000-12-15 13:45:26.123456789", "us", "2000-12-15 13:45:26.123456", "us"),
|
| 755 |
+
("2000-12-15 13:45:26.123456", None, "2000-12-15 13:45:26.123456", "us"),
|
| 756 |
+
("2000-12-15 13:45:26.123456789", "ms", "2000-12-15 13:45:26.123", "ms"),
|
| 757 |
+
("2000-12-15 13:45:26.123", None, "2000-12-15 13:45:26.123", "ms"),
|
| 758 |
+
("2000-12-15 13:45:26", "s", "2000-12-15 13:45:26", "s"),
|
| 759 |
+
("2000-12-15 13:45:26", "min", "2000-12-15 13:45", "min"),
|
| 760 |
+
("2000-12-15 13:45:26", "h", "2000-12-15 13:00", "h"),
|
| 761 |
+
("2000-12-15", "Y", "2000", "Y-DEC"),
|
| 762 |
+
("2000-12-15", "Q", "2000Q4", "Q-DEC"),
|
| 763 |
+
("2000-12-15", "M", "2000-12", "M"),
|
| 764 |
+
("2000-12-15", "W", "2000-12-11/2000-12-17", "W-SUN"),
|
| 765 |
+
("2000-12-15", "D", "2000-12-15", "D"),
|
| 766 |
+
("2000-12-15", "B", "2000-12-15", "B"),
|
| 767 |
+
),
|
| 768 |
+
)
|
| 769 |
+
@pytest.mark.filterwarnings(
|
| 770 |
+
"ignore:Period with BDay freq is deprecated:FutureWarning"
|
| 771 |
+
)
|
| 772 |
+
def test_repr(self, str_ts, freq, str_res, str_freq):
|
| 773 |
+
p = Period(str_ts, freq=freq)
|
| 774 |
+
assert str(p) == str_res
|
| 775 |
+
assert repr(p) == f"Period('{str_res}', '{str_freq}')"
|
| 776 |
+
|
| 777 |
+
def test_repr_nat(self):
|
| 778 |
+
p = Period("nat", freq="M")
|
| 779 |
+
assert repr(NaT) in repr(p)
|
| 780 |
+
|
| 781 |
+
def test_strftime(self):
|
| 782 |
+
# GH#3363
|
| 783 |
+
p = Period("2000-1-1 12:34:12", freq="s")
|
| 784 |
+
res = p.strftime("%Y-%m-%d %H:%M:%S")
|
| 785 |
+
assert res == "2000-01-01 12:34:12"
|
| 786 |
+
assert isinstance(res, str)
|
| 787 |
+
|
| 788 |
+
|
| 789 |
+
class TestPeriodProperties:
|
| 790 |
+
"""Test properties such as year, month, weekday, etc...."""
|
| 791 |
+
|
| 792 |
+
@pytest.mark.parametrize("freq", ["Y", "M", "D", "h"])
|
| 793 |
+
def test_is_leap_year(self, freq):
|
| 794 |
+
# GH 13727
|
| 795 |
+
p = Period("2000-01-01 00:00:00", freq=freq)
|
| 796 |
+
assert p.is_leap_year
|
| 797 |
+
assert isinstance(p.is_leap_year, bool)
|
| 798 |
+
|
| 799 |
+
p = Period("1999-01-01 00:00:00", freq=freq)
|
| 800 |
+
assert not p.is_leap_year
|
| 801 |
+
|
| 802 |
+
p = Period("2004-01-01 00:00:00", freq=freq)
|
| 803 |
+
assert p.is_leap_year
|
| 804 |
+
|
| 805 |
+
p = Period("2100-01-01 00:00:00", freq=freq)
|
| 806 |
+
assert not p.is_leap_year
|
| 807 |
+
|
| 808 |
+
def test_quarterly_negative_ordinals(self):
|
| 809 |
+
p = Period(ordinal=-1, freq="Q-DEC")
|
| 810 |
+
assert p.year == 1969
|
| 811 |
+
assert p.quarter == 4
|
| 812 |
+
assert isinstance(p, Period)
|
| 813 |
+
|
| 814 |
+
p = Period(ordinal=-2, freq="Q-DEC")
|
| 815 |
+
assert p.year == 1969
|
| 816 |
+
assert p.quarter == 3
|
| 817 |
+
assert isinstance(p, Period)
|
| 818 |
+
|
| 819 |
+
p = Period(ordinal=-2, freq="M")
|
| 820 |
+
assert p.year == 1969
|
| 821 |
+
assert p.month == 11
|
| 822 |
+
assert isinstance(p, Period)
|
| 823 |
+
|
| 824 |
+
def test_freq_str(self):
|
| 825 |
+
i1 = Period("1982", freq="Min")
|
| 826 |
+
assert i1.freq == offsets.Minute()
|
| 827 |
+
assert i1.freqstr == "min"
|
| 828 |
+
|
| 829 |
+
@pytest.mark.filterwarnings(
|
| 830 |
+
"ignore:Period with BDay freq is deprecated:FutureWarning"
|
| 831 |
+
)
|
| 832 |
+
def test_period_deprecated_freq(self):
|
| 833 |
+
cases = {
|
| 834 |
+
"M": ["MTH", "MONTH", "MONTHLY", "Mth", "month", "monthly"],
|
| 835 |
+
"B": ["BUS", "BUSINESS", "BUSINESSLY", "WEEKDAY", "bus"],
|
| 836 |
+
"D": ["DAY", "DLY", "DAILY", "Day", "Dly", "Daily"],
|
| 837 |
+
"h": ["HR", "HOUR", "HRLY", "HOURLY", "hr", "Hour", "HRly"],
|
| 838 |
+
"min": ["minute", "MINUTE", "MINUTELY", "minutely"],
|
| 839 |
+
"s": ["sec", "SEC", "SECOND", "SECONDLY", "second"],
|
| 840 |
+
"ms": ["MILLISECOND", "MILLISECONDLY", "millisecond"],
|
| 841 |
+
"us": ["MICROSECOND", "MICROSECONDLY", "microsecond"],
|
| 842 |
+
"ns": ["NANOSECOND", "NANOSECONDLY", "nanosecond"],
|
| 843 |
+
}
|
| 844 |
+
|
| 845 |
+
msg = INVALID_FREQ_ERR_MSG
|
| 846 |
+
for exp, freqs in cases.items():
|
| 847 |
+
for freq in freqs:
|
| 848 |
+
with pytest.raises(ValueError, match=msg):
|
| 849 |
+
Period("2016-03-01 09:00", freq=freq)
|
| 850 |
+
with pytest.raises(ValueError, match=msg):
|
| 851 |
+
Period(ordinal=1, freq=freq)
|
| 852 |
+
|
| 853 |
+
# check supported freq-aliases still works
|
| 854 |
+
p1 = Period("2016-03-01 09:00", freq=exp)
|
| 855 |
+
p2 = Period(ordinal=1, freq=exp)
|
| 856 |
+
assert isinstance(p1, Period)
|
| 857 |
+
assert isinstance(p2, Period)
|
| 858 |
+
|
| 859 |
+
@staticmethod
|
| 860 |
+
def _period_constructor(bound, offset):
|
| 861 |
+
return Period(
|
| 862 |
+
year=bound.year,
|
| 863 |
+
month=bound.month,
|
| 864 |
+
day=bound.day,
|
| 865 |
+
hour=bound.hour,
|
| 866 |
+
minute=bound.minute,
|
| 867 |
+
second=bound.second + offset,
|
| 868 |
+
freq="us",
|
| 869 |
+
)
|
| 870 |
+
|
| 871 |
+
@pytest.mark.parametrize("bound, offset", [(Timestamp.min, -1), (Timestamp.max, 1)])
|
| 872 |
+
@pytest.mark.parametrize("period_property", ["start_time", "end_time"])
|
| 873 |
+
def test_outer_bounds_start_and_end_time(self, bound, offset, period_property):
|
| 874 |
+
# GH #13346
|
| 875 |
+
period = TestPeriodProperties._period_constructor(bound, offset)
|
| 876 |
+
with pytest.raises(OutOfBoundsDatetime, match="Out of bounds nanosecond"):
|
| 877 |
+
getattr(period, period_property)
|
| 878 |
+
|
| 879 |
+
@pytest.mark.parametrize("bound, offset", [(Timestamp.min, -1), (Timestamp.max, 1)])
|
| 880 |
+
@pytest.mark.parametrize("period_property", ["start_time", "end_time"])
|
| 881 |
+
def test_inner_bounds_start_and_end_time(self, bound, offset, period_property):
|
| 882 |
+
# GH #13346
|
| 883 |
+
period = TestPeriodProperties._period_constructor(bound, -offset)
|
| 884 |
+
expected = period.to_timestamp().round(freq="s")
|
| 885 |
+
assert getattr(period, period_property).round(freq="s") == expected
|
| 886 |
+
expected = (bound - offset * Timedelta(1, unit="s")).floor("s")
|
| 887 |
+
assert getattr(period, period_property).floor("s") == expected
|
| 888 |
+
|
| 889 |
+
def test_start_time(self):
|
| 890 |
+
freq_lst = ["Y", "Q", "M", "D", "h", "min", "s"]
|
| 891 |
+
xp = datetime(2012, 1, 1)
|
| 892 |
+
for f in freq_lst:
|
| 893 |
+
p = Period("2012", freq=f)
|
| 894 |
+
assert p.start_time == xp
|
| 895 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 896 |
+
assert Period("2012", freq="B").start_time == datetime(2012, 1, 2)
|
| 897 |
+
assert Period("2012", freq="W").start_time == datetime(2011, 12, 26)
|
| 898 |
+
|
| 899 |
+
def test_end_time(self):
|
| 900 |
+
p = Period("2012", freq="Y")
|
| 901 |
+
|
| 902 |
+
def _ex(*args):
|
| 903 |
+
return Timestamp(Timestamp(datetime(*args)).as_unit("ns")._value - 1)
|
| 904 |
+
|
| 905 |
+
xp = _ex(2013, 1, 1)
|
| 906 |
+
assert xp == p.end_time
|
| 907 |
+
|
| 908 |
+
p = Period("2012", freq="Q")
|
| 909 |
+
xp = _ex(2012, 4, 1)
|
| 910 |
+
assert xp == p.end_time
|
| 911 |
+
|
| 912 |
+
p = Period("2012", freq="M")
|
| 913 |
+
xp = _ex(2012, 2, 1)
|
| 914 |
+
assert xp == p.end_time
|
| 915 |
+
|
| 916 |
+
p = Period("2012", freq="D")
|
| 917 |
+
xp = _ex(2012, 1, 2)
|
| 918 |
+
assert xp == p.end_time
|
| 919 |
+
|
| 920 |
+
p = Period("2012", freq="h")
|
| 921 |
+
xp = _ex(2012, 1, 1, 1)
|
| 922 |
+
assert xp == p.end_time
|
| 923 |
+
|
| 924 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 925 |
+
p = Period("2012", freq="B")
|
| 926 |
+
xp = _ex(2012, 1, 3)
|
| 927 |
+
assert xp == p.end_time
|
| 928 |
+
|
| 929 |
+
p = Period("2012", freq="W")
|
| 930 |
+
xp = _ex(2012, 1, 2)
|
| 931 |
+
assert xp == p.end_time
|
| 932 |
+
|
| 933 |
+
# Test for GH 11738
|
| 934 |
+
p = Period("2012", freq="15D")
|
| 935 |
+
xp = _ex(2012, 1, 16)
|
| 936 |
+
assert xp == p.end_time
|
| 937 |
+
|
| 938 |
+
p = Period("2012", freq="1D1h")
|
| 939 |
+
xp = _ex(2012, 1, 2, 1)
|
| 940 |
+
assert xp == p.end_time
|
| 941 |
+
|
| 942 |
+
p = Period("2012", freq="1h1D")
|
| 943 |
+
xp = _ex(2012, 1, 2, 1)
|
| 944 |
+
assert xp == p.end_time
|
| 945 |
+
|
| 946 |
+
def test_end_time_business_friday(self):
|
| 947 |
+
# GH#34449
|
| 948 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 949 |
+
per = Period("1990-01-05", "B")
|
| 950 |
+
result = per.end_time
|
| 951 |
+
|
| 952 |
+
expected = Timestamp("1990-01-06") - Timedelta(nanoseconds=1)
|
| 953 |
+
assert result == expected
|
| 954 |
+
|
| 955 |
+
def test_anchor_week_end_time(self):
|
| 956 |
+
def _ex(*args):
|
| 957 |
+
return Timestamp(Timestamp(datetime(*args)).as_unit("ns")._value - 1)
|
| 958 |
+
|
| 959 |
+
p = Period("2013-1-1", "W-SAT")
|
| 960 |
+
xp = _ex(2013, 1, 6)
|
| 961 |
+
assert p.end_time == xp
|
| 962 |
+
|
| 963 |
+
def test_properties_annually(self):
|
| 964 |
+
# Test properties on Periods with annually frequency.
|
| 965 |
+
a_date = Period(freq="Y", year=2007)
|
| 966 |
+
assert a_date.year == 2007
|
| 967 |
+
|
| 968 |
+
def test_properties_quarterly(self):
|
| 969 |
+
# Test properties on Periods with daily frequency.
|
| 970 |
+
qedec_date = Period(freq="Q-DEC", year=2007, quarter=1)
|
| 971 |
+
qejan_date = Period(freq="Q-JAN", year=2007, quarter=1)
|
| 972 |
+
qejun_date = Period(freq="Q-JUN", year=2007, quarter=1)
|
| 973 |
+
#
|
| 974 |
+
for x in range(3):
|
| 975 |
+
for qd in (qedec_date, qejan_date, qejun_date):
|
| 976 |
+
assert (qd + x).qyear == 2007
|
| 977 |
+
assert (qd + x).quarter == x + 1
|
| 978 |
+
|
| 979 |
+
def test_properties_monthly(self):
|
| 980 |
+
# Test properties on Periods with daily frequency.
|
| 981 |
+
m_date = Period(freq="M", year=2007, month=1)
|
| 982 |
+
for x in range(11):
|
| 983 |
+
m_ival_x = m_date + x
|
| 984 |
+
assert m_ival_x.year == 2007
|
| 985 |
+
if 1 <= x + 1 <= 3:
|
| 986 |
+
assert m_ival_x.quarter == 1
|
| 987 |
+
elif 4 <= x + 1 <= 6:
|
| 988 |
+
assert m_ival_x.quarter == 2
|
| 989 |
+
elif 7 <= x + 1 <= 9:
|
| 990 |
+
assert m_ival_x.quarter == 3
|
| 991 |
+
elif 10 <= x + 1 <= 12:
|
| 992 |
+
assert m_ival_x.quarter == 4
|
| 993 |
+
assert m_ival_x.month == x + 1
|
| 994 |
+
|
| 995 |
+
def test_properties_weekly(self):
|
| 996 |
+
# Test properties on Periods with daily frequency.
|
| 997 |
+
w_date = Period(freq="W", year=2007, month=1, day=7)
|
| 998 |
+
#
|
| 999 |
+
assert w_date.year == 2007
|
| 1000 |
+
assert w_date.quarter == 1
|
| 1001 |
+
assert w_date.month == 1
|
| 1002 |
+
assert w_date.week == 1
|
| 1003 |
+
assert (w_date - 1).week == 52
|
| 1004 |
+
assert w_date.days_in_month == 31
|
| 1005 |
+
assert Period(freq="W", year=2012, month=2, day=1).days_in_month == 29
|
| 1006 |
+
|
| 1007 |
+
def test_properties_weekly_legacy(self):
|
| 1008 |
+
# Test properties on Periods with daily frequency.
|
| 1009 |
+
w_date = Period(freq="W", year=2007, month=1, day=7)
|
| 1010 |
+
assert w_date.year == 2007
|
| 1011 |
+
assert w_date.quarter == 1
|
| 1012 |
+
assert w_date.month == 1
|
| 1013 |
+
assert w_date.week == 1
|
| 1014 |
+
assert (w_date - 1).week == 52
|
| 1015 |
+
assert w_date.days_in_month == 31
|
| 1016 |
+
|
| 1017 |
+
exp = Period(freq="W", year=2012, month=2, day=1)
|
| 1018 |
+
assert exp.days_in_month == 29
|
| 1019 |
+
|
| 1020 |
+
msg = INVALID_FREQ_ERR_MSG
|
| 1021 |
+
with pytest.raises(ValueError, match=msg):
|
| 1022 |
+
Period(freq="WK", year=2007, month=1, day=7)
|
| 1023 |
+
|
| 1024 |
+
def test_properties_daily(self):
|
| 1025 |
+
# Test properties on Periods with daily frequency.
|
| 1026 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 1027 |
+
b_date = Period(freq="B", year=2007, month=1, day=1)
|
| 1028 |
+
#
|
| 1029 |
+
assert b_date.year == 2007
|
| 1030 |
+
assert b_date.quarter == 1
|
| 1031 |
+
assert b_date.month == 1
|
| 1032 |
+
assert b_date.day == 1
|
| 1033 |
+
assert b_date.weekday == 0
|
| 1034 |
+
assert b_date.dayofyear == 1
|
| 1035 |
+
assert b_date.days_in_month == 31
|
| 1036 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 1037 |
+
assert Period(freq="B", year=2012, month=2, day=1).days_in_month == 29
|
| 1038 |
+
|
| 1039 |
+
d_date = Period(freq="D", year=2007, month=1, day=1)
|
| 1040 |
+
|
| 1041 |
+
assert d_date.year == 2007
|
| 1042 |
+
assert d_date.quarter == 1
|
| 1043 |
+
assert d_date.month == 1
|
| 1044 |
+
assert d_date.day == 1
|
| 1045 |
+
assert d_date.weekday == 0
|
| 1046 |
+
assert d_date.dayofyear == 1
|
| 1047 |
+
assert d_date.days_in_month == 31
|
| 1048 |
+
assert Period(freq="D", year=2012, month=2, day=1).days_in_month == 29
|
| 1049 |
+
|
| 1050 |
+
def test_properties_hourly(self):
|
| 1051 |
+
# Test properties on Periods with hourly frequency.
|
| 1052 |
+
h_date1 = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 1053 |
+
h_date2 = Period(freq="2h", year=2007, month=1, day=1, hour=0)
|
| 1054 |
+
|
| 1055 |
+
for h_date in [h_date1, h_date2]:
|
| 1056 |
+
assert h_date.year == 2007
|
| 1057 |
+
assert h_date.quarter == 1
|
| 1058 |
+
assert h_date.month == 1
|
| 1059 |
+
assert h_date.day == 1
|
| 1060 |
+
assert h_date.weekday == 0
|
| 1061 |
+
assert h_date.dayofyear == 1
|
| 1062 |
+
assert h_date.hour == 0
|
| 1063 |
+
assert h_date.days_in_month == 31
|
| 1064 |
+
assert (
|
| 1065 |
+
Period(freq="h", year=2012, month=2, day=1, hour=0).days_in_month == 29
|
| 1066 |
+
)
|
| 1067 |
+
|
| 1068 |
+
def test_properties_minutely(self):
|
| 1069 |
+
# Test properties on Periods with minutely frequency.
|
| 1070 |
+
t_date = Period(freq="Min", year=2007, month=1, day=1, hour=0, minute=0)
|
| 1071 |
+
#
|
| 1072 |
+
assert t_date.quarter == 1
|
| 1073 |
+
assert t_date.month == 1
|
| 1074 |
+
assert t_date.day == 1
|
| 1075 |
+
assert t_date.weekday == 0
|
| 1076 |
+
assert t_date.dayofyear == 1
|
| 1077 |
+
assert t_date.hour == 0
|
| 1078 |
+
assert t_date.minute == 0
|
| 1079 |
+
assert t_date.days_in_month == 31
|
| 1080 |
+
assert (
|
| 1081 |
+
Period(freq="D", year=2012, month=2, day=1, hour=0, minute=0).days_in_month
|
| 1082 |
+
== 29
|
| 1083 |
+
)
|
| 1084 |
+
|
| 1085 |
+
def test_properties_secondly(self):
|
| 1086 |
+
# Test properties on Periods with secondly frequency.
|
| 1087 |
+
s_date = Period(
|
| 1088 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 1089 |
+
)
|
| 1090 |
+
#
|
| 1091 |
+
assert s_date.year == 2007
|
| 1092 |
+
assert s_date.quarter == 1
|
| 1093 |
+
assert s_date.month == 1
|
| 1094 |
+
assert s_date.day == 1
|
| 1095 |
+
assert s_date.weekday == 0
|
| 1096 |
+
assert s_date.dayofyear == 1
|
| 1097 |
+
assert s_date.hour == 0
|
| 1098 |
+
assert s_date.minute == 0
|
| 1099 |
+
assert s_date.second == 0
|
| 1100 |
+
assert s_date.days_in_month == 31
|
| 1101 |
+
assert (
|
| 1102 |
+
Period(
|
| 1103 |
+
freq="Min", year=2012, month=2, day=1, hour=0, minute=0, second=0
|
| 1104 |
+
).days_in_month
|
| 1105 |
+
== 29
|
| 1106 |
+
)
|
| 1107 |
+
|
| 1108 |
+
|
| 1109 |
+
class TestPeriodComparisons:
|
| 1110 |
+
def test_sort_periods(self):
|
| 1111 |
+
jan = Period("2000-01", "M")
|
| 1112 |
+
feb = Period("2000-02", "M")
|
| 1113 |
+
mar = Period("2000-03", "M")
|
| 1114 |
+
periods = [mar, jan, feb]
|
| 1115 |
+
correctPeriods = [jan, feb, mar]
|
| 1116 |
+
assert sorted(periods) == correctPeriods
|
| 1117 |
+
|
| 1118 |
+
|
| 1119 |
+
def test_period_immutable():
|
| 1120 |
+
# see gh-17116
|
| 1121 |
+
msg = "not writable"
|
| 1122 |
+
|
| 1123 |
+
per = Period("2014Q1")
|
| 1124 |
+
with pytest.raises(AttributeError, match=msg):
|
| 1125 |
+
per.ordinal = 14
|
| 1126 |
+
|
| 1127 |
+
freq = per.freq
|
| 1128 |
+
with pytest.raises(AttributeError, match=msg):
|
| 1129 |
+
per.freq = 2 * freq
|
| 1130 |
+
|
| 1131 |
+
|
| 1132 |
+
def test_small_year_parsing():
|
| 1133 |
+
per1 = Period("0001-01-07", "D")
|
| 1134 |
+
assert per1.year == 1
|
| 1135 |
+
assert per1.day == 7
|
| 1136 |
+
|
| 1137 |
+
|
| 1138 |
+
def test_negone_ordinals():
|
| 1139 |
+
freqs = ["Y", "M", "Q", "D", "h", "min", "s"]
|
| 1140 |
+
|
| 1141 |
+
period = Period(ordinal=-1, freq="D")
|
| 1142 |
+
for freq in freqs:
|
| 1143 |
+
repr(period.asfreq(freq))
|
| 1144 |
+
|
| 1145 |
+
for freq in freqs:
|
| 1146 |
+
period = Period(ordinal=-1, freq=freq)
|
| 1147 |
+
repr(period)
|
| 1148 |
+
assert period.year == 1969
|
| 1149 |
+
|
| 1150 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 1151 |
+
period = Period(ordinal=-1, freq="B")
|
| 1152 |
+
repr(period)
|
| 1153 |
+
period = Period(ordinal=-1, freq="W")
|
| 1154 |
+
repr(period)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (182 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_arithmetic.cpython-310.pyc
ADDED
|
Binary file (36.7 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_constructors.cpython-310.pyc
ADDED
|
Binary file (17.4 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_formats.cpython-310.pyc
ADDED
|
Binary file (3.48 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/test_timedelta.cpython-310.pyc
ADDED
|
Binary file (20.2 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (190 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__pycache__/test_as_unit.cpython-310.pyc
ADDED
|
Binary file (2.56 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/__pycache__/test_round.cpython-310.pyc
ADDED
|
Binary file (4.92 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/test_as_unit.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from pandas._libs.tslibs.dtypes import NpyDatetimeUnit
|
| 4 |
+
from pandas.errors import OutOfBoundsTimedelta
|
| 5 |
+
|
| 6 |
+
from pandas import Timedelta
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TestAsUnit:
|
| 10 |
+
def test_as_unit(self):
|
| 11 |
+
td = Timedelta(days=1)
|
| 12 |
+
|
| 13 |
+
assert td.as_unit("ns") is td
|
| 14 |
+
|
| 15 |
+
res = td.as_unit("us")
|
| 16 |
+
assert res._value == td._value // 1000
|
| 17 |
+
assert res._creso == NpyDatetimeUnit.NPY_FR_us.value
|
| 18 |
+
|
| 19 |
+
rt = res.as_unit("ns")
|
| 20 |
+
assert rt._value == td._value
|
| 21 |
+
assert rt._creso == td._creso
|
| 22 |
+
|
| 23 |
+
res = td.as_unit("ms")
|
| 24 |
+
assert res._value == td._value // 1_000_000
|
| 25 |
+
assert res._creso == NpyDatetimeUnit.NPY_FR_ms.value
|
| 26 |
+
|
| 27 |
+
rt = res.as_unit("ns")
|
| 28 |
+
assert rt._value == td._value
|
| 29 |
+
assert rt._creso == td._creso
|
| 30 |
+
|
| 31 |
+
res = td.as_unit("s")
|
| 32 |
+
assert res._value == td._value // 1_000_000_000
|
| 33 |
+
assert res._creso == NpyDatetimeUnit.NPY_FR_s.value
|
| 34 |
+
|
| 35 |
+
rt = res.as_unit("ns")
|
| 36 |
+
assert rt._value == td._value
|
| 37 |
+
assert rt._creso == td._creso
|
| 38 |
+
|
| 39 |
+
def test_as_unit_overflows(self):
|
| 40 |
+
# microsecond that would be just out of bounds for nano
|
| 41 |
+
us = 9223372800000000
|
| 42 |
+
td = Timedelta._from_value_and_reso(us, NpyDatetimeUnit.NPY_FR_us.value)
|
| 43 |
+
|
| 44 |
+
msg = "Cannot cast 106752 days 00:00:00 to unit='ns' without overflow"
|
| 45 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 46 |
+
td.as_unit("ns")
|
| 47 |
+
|
| 48 |
+
res = td.as_unit("ms")
|
| 49 |
+
assert res._value == us // 1000
|
| 50 |
+
assert res._creso == NpyDatetimeUnit.NPY_FR_ms.value
|
| 51 |
+
|
| 52 |
+
def test_as_unit_rounding(self):
|
| 53 |
+
td = Timedelta(microseconds=1500)
|
| 54 |
+
res = td.as_unit("ms")
|
| 55 |
+
|
| 56 |
+
expected = Timedelta(milliseconds=1)
|
| 57 |
+
assert res == expected
|
| 58 |
+
|
| 59 |
+
assert res._creso == NpyDatetimeUnit.NPY_FR_ms.value
|
| 60 |
+
assert res._value == 1
|
| 61 |
+
|
| 62 |
+
with pytest.raises(ValueError, match="Cannot losslessly convert units"):
|
| 63 |
+
td.as_unit("ms", round_ok=False)
|
| 64 |
+
|
| 65 |
+
def test_as_unit_non_nano(self):
|
| 66 |
+
# case where we are going neither to nor from nano
|
| 67 |
+
td = Timedelta(days=1).as_unit("ms")
|
| 68 |
+
assert td.days == 1
|
| 69 |
+
assert td._value == 86_400_000
|
| 70 |
+
assert td.components.days == 1
|
| 71 |
+
assert td._d == 1
|
| 72 |
+
assert td.total_seconds() == 86400
|
| 73 |
+
|
| 74 |
+
res = td.as_unit("us")
|
| 75 |
+
assert res._value == 86_400_000_000
|
| 76 |
+
assert res.components.days == 1
|
| 77 |
+
assert res.components.hours == 0
|
| 78 |
+
assert res._d == 1
|
| 79 |
+
assert res._h == 0
|
| 80 |
+
assert res.total_seconds() == 86400
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/methods/test_round.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from hypothesis import (
|
| 2 |
+
given,
|
| 3 |
+
strategies as st,
|
| 4 |
+
)
|
| 5 |
+
import numpy as np
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
from pandas._libs import lib
|
| 9 |
+
from pandas._libs.tslibs import iNaT
|
| 10 |
+
from pandas.errors import OutOfBoundsTimedelta
|
| 11 |
+
|
| 12 |
+
from pandas import Timedelta
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestTimedeltaRound:
|
| 16 |
+
@pytest.mark.parametrize(
|
| 17 |
+
"freq,s1,s2",
|
| 18 |
+
[
|
| 19 |
+
# This first case has s1, s2 being the same as t1,t2 below
|
| 20 |
+
(
|
| 21 |
+
"ns",
|
| 22 |
+
Timedelta("1 days 02:34:56.789123456"),
|
| 23 |
+
Timedelta("-1 days 02:34:56.789123456"),
|
| 24 |
+
),
|
| 25 |
+
(
|
| 26 |
+
"us",
|
| 27 |
+
Timedelta("1 days 02:34:56.789123000"),
|
| 28 |
+
Timedelta("-1 days 02:34:56.789123000"),
|
| 29 |
+
),
|
| 30 |
+
(
|
| 31 |
+
"ms",
|
| 32 |
+
Timedelta("1 days 02:34:56.789000000"),
|
| 33 |
+
Timedelta("-1 days 02:34:56.789000000"),
|
| 34 |
+
),
|
| 35 |
+
("s", Timedelta("1 days 02:34:57"), Timedelta("-1 days 02:34:57")),
|
| 36 |
+
("2s", Timedelta("1 days 02:34:56"), Timedelta("-1 days 02:34:56")),
|
| 37 |
+
("5s", Timedelta("1 days 02:34:55"), Timedelta("-1 days 02:34:55")),
|
| 38 |
+
("min", Timedelta("1 days 02:35:00"), Timedelta("-1 days 02:35:00")),
|
| 39 |
+
("12min", Timedelta("1 days 02:36:00"), Timedelta("-1 days 02:36:00")),
|
| 40 |
+
("h", Timedelta("1 days 03:00:00"), Timedelta("-1 days 03:00:00")),
|
| 41 |
+
("d", Timedelta("1 days"), Timedelta("-1 days")),
|
| 42 |
+
],
|
| 43 |
+
)
|
| 44 |
+
def test_round(self, freq, s1, s2):
|
| 45 |
+
t1 = Timedelta("1 days 02:34:56.789123456")
|
| 46 |
+
t2 = Timedelta("-1 days 02:34:56.789123456")
|
| 47 |
+
|
| 48 |
+
r1 = t1.round(freq)
|
| 49 |
+
assert r1 == s1
|
| 50 |
+
r2 = t2.round(freq)
|
| 51 |
+
assert r2 == s2
|
| 52 |
+
|
| 53 |
+
def test_round_invalid(self):
|
| 54 |
+
t1 = Timedelta("1 days 02:34:56.789123456")
|
| 55 |
+
|
| 56 |
+
for freq, msg in [
|
| 57 |
+
("YE", "<YearEnd: month=12> is a non-fixed frequency"),
|
| 58 |
+
("ME", "<MonthEnd> is a non-fixed frequency"),
|
| 59 |
+
("foobar", "Invalid frequency: foobar"),
|
| 60 |
+
]:
|
| 61 |
+
with pytest.raises(ValueError, match=msg):
|
| 62 |
+
t1.round(freq)
|
| 63 |
+
|
| 64 |
+
@pytest.mark.skip_ubsan
|
| 65 |
+
def test_round_implementation_bounds(self):
|
| 66 |
+
# See also: analogous test for Timestamp
|
| 67 |
+
# GH#38964
|
| 68 |
+
result = Timedelta.min.ceil("s")
|
| 69 |
+
expected = Timedelta.min + Timedelta(seconds=1) - Timedelta(145224193)
|
| 70 |
+
assert result == expected
|
| 71 |
+
|
| 72 |
+
result = Timedelta.max.floor("s")
|
| 73 |
+
expected = Timedelta.max - Timedelta(854775807)
|
| 74 |
+
assert result == expected
|
| 75 |
+
|
| 76 |
+
msg = (
|
| 77 |
+
r"Cannot round -106752 days \+00:12:43.145224193 to freq=s without overflow"
|
| 78 |
+
)
|
| 79 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 80 |
+
Timedelta.min.floor("s")
|
| 81 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 82 |
+
Timedelta.min.round("s")
|
| 83 |
+
|
| 84 |
+
msg = "Cannot round 106751 days 23:47:16.854775807 to freq=s without overflow"
|
| 85 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 86 |
+
Timedelta.max.ceil("s")
|
| 87 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 88 |
+
Timedelta.max.round("s")
|
| 89 |
+
|
| 90 |
+
@pytest.mark.skip_ubsan
|
| 91 |
+
@given(val=st.integers(min_value=iNaT + 1, max_value=lib.i8max))
|
| 92 |
+
@pytest.mark.parametrize(
|
| 93 |
+
"method", [Timedelta.round, Timedelta.floor, Timedelta.ceil]
|
| 94 |
+
)
|
| 95 |
+
def test_round_sanity(self, val, method):
|
| 96 |
+
cls = Timedelta
|
| 97 |
+
err_cls = OutOfBoundsTimedelta
|
| 98 |
+
|
| 99 |
+
val = np.int64(val)
|
| 100 |
+
td = cls(val)
|
| 101 |
+
|
| 102 |
+
def checker(ts, nanos, unit):
|
| 103 |
+
# First check that we do raise in cases where we should
|
| 104 |
+
if nanos == 1:
|
| 105 |
+
pass
|
| 106 |
+
else:
|
| 107 |
+
div, mod = divmod(ts._value, nanos)
|
| 108 |
+
diff = int(nanos - mod)
|
| 109 |
+
lb = ts._value - mod
|
| 110 |
+
assert lb <= ts._value # i.e. no overflows with python ints
|
| 111 |
+
ub = ts._value + diff
|
| 112 |
+
assert ub > ts._value # i.e. no overflows with python ints
|
| 113 |
+
|
| 114 |
+
msg = "without overflow"
|
| 115 |
+
if mod == 0:
|
| 116 |
+
# We should never be raising in this
|
| 117 |
+
pass
|
| 118 |
+
elif method is cls.ceil:
|
| 119 |
+
if ub > cls.max._value:
|
| 120 |
+
with pytest.raises(err_cls, match=msg):
|
| 121 |
+
method(ts, unit)
|
| 122 |
+
return
|
| 123 |
+
elif method is cls.floor:
|
| 124 |
+
if lb < cls.min._value:
|
| 125 |
+
with pytest.raises(err_cls, match=msg):
|
| 126 |
+
method(ts, unit)
|
| 127 |
+
return
|
| 128 |
+
elif mod >= diff:
|
| 129 |
+
if ub > cls.max._value:
|
| 130 |
+
with pytest.raises(err_cls, match=msg):
|
| 131 |
+
method(ts, unit)
|
| 132 |
+
return
|
| 133 |
+
elif lb < cls.min._value:
|
| 134 |
+
with pytest.raises(err_cls, match=msg):
|
| 135 |
+
method(ts, unit)
|
| 136 |
+
return
|
| 137 |
+
|
| 138 |
+
res = method(ts, unit)
|
| 139 |
+
|
| 140 |
+
td = res - ts
|
| 141 |
+
diff = abs(td._value)
|
| 142 |
+
assert diff < nanos
|
| 143 |
+
assert res._value % nanos == 0
|
| 144 |
+
|
| 145 |
+
if method is cls.round:
|
| 146 |
+
assert diff <= nanos / 2
|
| 147 |
+
elif method is cls.floor:
|
| 148 |
+
assert res <= ts
|
| 149 |
+
elif method is cls.ceil:
|
| 150 |
+
assert res >= ts
|
| 151 |
+
|
| 152 |
+
nanos = 1
|
| 153 |
+
checker(td, nanos, "ns")
|
| 154 |
+
|
| 155 |
+
nanos = 1000
|
| 156 |
+
checker(td, nanos, "us")
|
| 157 |
+
|
| 158 |
+
nanos = 1_000_000
|
| 159 |
+
checker(td, nanos, "ms")
|
| 160 |
+
|
| 161 |
+
nanos = 1_000_000_000
|
| 162 |
+
checker(td, nanos, "s")
|
| 163 |
+
|
| 164 |
+
nanos = 60 * 1_000_000_000
|
| 165 |
+
checker(td, nanos, "min")
|
| 166 |
+
|
| 167 |
+
nanos = 60 * 60 * 1_000_000_000
|
| 168 |
+
checker(td, nanos, "h")
|
| 169 |
+
|
| 170 |
+
nanos = 24 * 60 * 60 * 1_000_000_000
|
| 171 |
+
checker(td, nanos, "D")
|
| 172 |
+
|
| 173 |
+
@pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"])
|
| 174 |
+
def test_round_non_nano(self, unit):
|
| 175 |
+
td = Timedelta("1 days 02:34:57").as_unit(unit)
|
| 176 |
+
|
| 177 |
+
res = td.round("min")
|
| 178 |
+
assert res == Timedelta("1 days 02:35:00")
|
| 179 |
+
assert res._creso == td._creso
|
| 180 |
+
|
| 181 |
+
res = td.floor("min")
|
| 182 |
+
assert res == Timedelta("1 days 02:34:00")
|
| 183 |
+
assert res._creso == td._creso
|
| 184 |
+
|
| 185 |
+
res = td.ceil("min")
|
| 186 |
+
assert res == Timedelta("1 days 02:35:00")
|
| 187 |
+
assert res._creso == td._creso
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/test_arithmetic.py
ADDED
|
@@ -0,0 +1,1183 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for scalar Timedelta arithmetic ops
|
| 3 |
+
"""
|
| 4 |
+
from datetime import (
|
| 5 |
+
datetime,
|
| 6 |
+
timedelta,
|
| 7 |
+
)
|
| 8 |
+
import operator
|
| 9 |
+
|
| 10 |
+
import numpy as np
|
| 11 |
+
import pytest
|
| 12 |
+
|
| 13 |
+
from pandas.errors import OutOfBoundsTimedelta
|
| 14 |
+
|
| 15 |
+
import pandas as pd
|
| 16 |
+
from pandas import (
|
| 17 |
+
NaT,
|
| 18 |
+
Timedelta,
|
| 19 |
+
Timestamp,
|
| 20 |
+
offsets,
|
| 21 |
+
)
|
| 22 |
+
import pandas._testing as tm
|
| 23 |
+
from pandas.core import ops
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class TestTimedeltaAdditionSubtraction:
|
| 27 |
+
"""
|
| 28 |
+
Tests for Timedelta methods:
|
| 29 |
+
|
| 30 |
+
__add__, __radd__,
|
| 31 |
+
__sub__, __rsub__
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
@pytest.mark.parametrize(
|
| 35 |
+
"ten_seconds",
|
| 36 |
+
[
|
| 37 |
+
Timedelta(10, unit="s"),
|
| 38 |
+
timedelta(seconds=10),
|
| 39 |
+
np.timedelta64(10, "s"),
|
| 40 |
+
np.timedelta64(10000000000, "ns"),
|
| 41 |
+
offsets.Second(10),
|
| 42 |
+
],
|
| 43 |
+
)
|
| 44 |
+
def test_td_add_sub_ten_seconds(self, ten_seconds):
|
| 45 |
+
# GH#6808
|
| 46 |
+
base = Timestamp("20130101 09:01:12.123456")
|
| 47 |
+
expected_add = Timestamp("20130101 09:01:22.123456")
|
| 48 |
+
expected_sub = Timestamp("20130101 09:01:02.123456")
|
| 49 |
+
|
| 50 |
+
result = base + ten_seconds
|
| 51 |
+
assert result == expected_add
|
| 52 |
+
|
| 53 |
+
result = base - ten_seconds
|
| 54 |
+
assert result == expected_sub
|
| 55 |
+
|
| 56 |
+
@pytest.mark.parametrize(
|
| 57 |
+
"one_day_ten_secs",
|
| 58 |
+
[
|
| 59 |
+
Timedelta("1 day, 00:00:10"),
|
| 60 |
+
Timedelta("1 days, 00:00:10"),
|
| 61 |
+
timedelta(days=1, seconds=10),
|
| 62 |
+
np.timedelta64(1, "D") + np.timedelta64(10, "s"),
|
| 63 |
+
offsets.Day() + offsets.Second(10),
|
| 64 |
+
],
|
| 65 |
+
)
|
| 66 |
+
def test_td_add_sub_one_day_ten_seconds(self, one_day_ten_secs):
|
| 67 |
+
# GH#6808
|
| 68 |
+
base = Timestamp("20130102 09:01:12.123456")
|
| 69 |
+
expected_add = Timestamp("20130103 09:01:22.123456")
|
| 70 |
+
expected_sub = Timestamp("20130101 09:01:02.123456")
|
| 71 |
+
|
| 72 |
+
result = base + one_day_ten_secs
|
| 73 |
+
assert result == expected_add
|
| 74 |
+
|
| 75 |
+
result = base - one_day_ten_secs
|
| 76 |
+
assert result == expected_sub
|
| 77 |
+
|
| 78 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 79 |
+
def test_td_add_datetimelike_scalar(self, op):
|
| 80 |
+
# GH#19738
|
| 81 |
+
td = Timedelta(10, unit="d")
|
| 82 |
+
|
| 83 |
+
result = op(td, datetime(2016, 1, 1))
|
| 84 |
+
if op is operator.add:
|
| 85 |
+
# datetime + Timedelta does _not_ call Timedelta.__radd__,
|
| 86 |
+
# so we get a datetime back instead of a Timestamp
|
| 87 |
+
assert isinstance(result, Timestamp)
|
| 88 |
+
assert result == Timestamp(2016, 1, 11)
|
| 89 |
+
|
| 90 |
+
result = op(td, Timestamp("2018-01-12 18:09"))
|
| 91 |
+
assert isinstance(result, Timestamp)
|
| 92 |
+
assert result == Timestamp("2018-01-22 18:09")
|
| 93 |
+
|
| 94 |
+
result = op(td, np.datetime64("2018-01-12"))
|
| 95 |
+
assert isinstance(result, Timestamp)
|
| 96 |
+
assert result == Timestamp("2018-01-22")
|
| 97 |
+
|
| 98 |
+
result = op(td, NaT)
|
| 99 |
+
assert result is NaT
|
| 100 |
+
|
| 101 |
+
def test_td_add_timestamp_overflow(self):
|
| 102 |
+
ts = Timestamp("1700-01-01").as_unit("ns")
|
| 103 |
+
msg = "Cannot cast 259987 from D to 'ns' without overflow."
|
| 104 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 105 |
+
ts + Timedelta(13 * 19999, unit="D")
|
| 106 |
+
|
| 107 |
+
msg = "Cannot cast 259987 days 00:00:00 to unit='ns' without overflow"
|
| 108 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 109 |
+
ts + timedelta(days=13 * 19999)
|
| 110 |
+
|
| 111 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 112 |
+
def test_td_add_td(self, op):
|
| 113 |
+
td = Timedelta(10, unit="d")
|
| 114 |
+
|
| 115 |
+
result = op(td, Timedelta(days=10))
|
| 116 |
+
assert isinstance(result, Timedelta)
|
| 117 |
+
assert result == Timedelta(days=20)
|
| 118 |
+
|
| 119 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 120 |
+
def test_td_add_pytimedelta(self, op):
|
| 121 |
+
td = Timedelta(10, unit="d")
|
| 122 |
+
result = op(td, timedelta(days=9))
|
| 123 |
+
assert isinstance(result, Timedelta)
|
| 124 |
+
assert result == Timedelta(days=19)
|
| 125 |
+
|
| 126 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 127 |
+
def test_td_add_timedelta64(self, op):
|
| 128 |
+
td = Timedelta(10, unit="d")
|
| 129 |
+
result = op(td, np.timedelta64(-4, "D"))
|
| 130 |
+
assert isinstance(result, Timedelta)
|
| 131 |
+
assert result == Timedelta(days=6)
|
| 132 |
+
|
| 133 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 134 |
+
def test_td_add_offset(self, op):
|
| 135 |
+
td = Timedelta(10, unit="d")
|
| 136 |
+
|
| 137 |
+
result = op(td, offsets.Hour(6))
|
| 138 |
+
assert isinstance(result, Timedelta)
|
| 139 |
+
assert result == Timedelta(days=10, hours=6)
|
| 140 |
+
|
| 141 |
+
def test_td_sub_td(self):
|
| 142 |
+
td = Timedelta(10, unit="d")
|
| 143 |
+
expected = Timedelta(0, unit="ns")
|
| 144 |
+
result = td - td
|
| 145 |
+
assert isinstance(result, Timedelta)
|
| 146 |
+
assert result == expected
|
| 147 |
+
|
| 148 |
+
def test_td_sub_pytimedelta(self):
|
| 149 |
+
td = Timedelta(10, unit="d")
|
| 150 |
+
expected = Timedelta(0, unit="ns")
|
| 151 |
+
|
| 152 |
+
result = td - td.to_pytimedelta()
|
| 153 |
+
assert isinstance(result, Timedelta)
|
| 154 |
+
assert result == expected
|
| 155 |
+
|
| 156 |
+
result = td.to_pytimedelta() - td
|
| 157 |
+
assert isinstance(result, Timedelta)
|
| 158 |
+
assert result == expected
|
| 159 |
+
|
| 160 |
+
def test_td_sub_timedelta64(self):
|
| 161 |
+
td = Timedelta(10, unit="d")
|
| 162 |
+
expected = Timedelta(0, unit="ns")
|
| 163 |
+
|
| 164 |
+
result = td - td.to_timedelta64()
|
| 165 |
+
assert isinstance(result, Timedelta)
|
| 166 |
+
assert result == expected
|
| 167 |
+
|
| 168 |
+
result = td.to_timedelta64() - td
|
| 169 |
+
assert isinstance(result, Timedelta)
|
| 170 |
+
assert result == expected
|
| 171 |
+
|
| 172 |
+
def test_td_sub_nat(self):
|
| 173 |
+
# In this context pd.NaT is treated as timedelta-like
|
| 174 |
+
td = Timedelta(10, unit="d")
|
| 175 |
+
result = td - NaT
|
| 176 |
+
assert result is NaT
|
| 177 |
+
|
| 178 |
+
def test_td_sub_td64_nat(self):
|
| 179 |
+
td = Timedelta(10, unit="d")
|
| 180 |
+
td_nat = np.timedelta64("NaT")
|
| 181 |
+
|
| 182 |
+
result = td - td_nat
|
| 183 |
+
assert result is NaT
|
| 184 |
+
|
| 185 |
+
result = td_nat - td
|
| 186 |
+
assert result is NaT
|
| 187 |
+
|
| 188 |
+
def test_td_sub_offset(self):
|
| 189 |
+
td = Timedelta(10, unit="d")
|
| 190 |
+
result = td - offsets.Hour(1)
|
| 191 |
+
assert isinstance(result, Timedelta)
|
| 192 |
+
assert result == Timedelta(239, unit="h")
|
| 193 |
+
|
| 194 |
+
def test_td_add_sub_numeric_raises(self):
|
| 195 |
+
td = Timedelta(10, unit="d")
|
| 196 |
+
msg = "unsupported operand type"
|
| 197 |
+
for other in [2, 2.0, np.int64(2), np.float64(2)]:
|
| 198 |
+
with pytest.raises(TypeError, match=msg):
|
| 199 |
+
td + other
|
| 200 |
+
with pytest.raises(TypeError, match=msg):
|
| 201 |
+
other + td
|
| 202 |
+
with pytest.raises(TypeError, match=msg):
|
| 203 |
+
td - other
|
| 204 |
+
with pytest.raises(TypeError, match=msg):
|
| 205 |
+
other - td
|
| 206 |
+
|
| 207 |
+
def test_td_add_sub_int_ndarray(self):
|
| 208 |
+
td = Timedelta("1 day")
|
| 209 |
+
other = np.array([1])
|
| 210 |
+
|
| 211 |
+
msg = r"unsupported operand type\(s\) for \+: 'Timedelta' and 'int'"
|
| 212 |
+
with pytest.raises(TypeError, match=msg):
|
| 213 |
+
td + np.array([1])
|
| 214 |
+
|
| 215 |
+
msg = "|".join(
|
| 216 |
+
[
|
| 217 |
+
(
|
| 218 |
+
r"unsupported operand type\(s\) for \+: 'numpy.ndarray' "
|
| 219 |
+
"and 'Timedelta'"
|
| 220 |
+
),
|
| 221 |
+
# This message goes on to say "Please do not rely on this error;
|
| 222 |
+
# it may not be given on all Python implementations"
|
| 223 |
+
"Concatenation operation is not implemented for NumPy arrays",
|
| 224 |
+
]
|
| 225 |
+
)
|
| 226 |
+
with pytest.raises(TypeError, match=msg):
|
| 227 |
+
other + td
|
| 228 |
+
msg = r"unsupported operand type\(s\) for -: 'Timedelta' and 'int'"
|
| 229 |
+
with pytest.raises(TypeError, match=msg):
|
| 230 |
+
td - other
|
| 231 |
+
msg = r"unsupported operand type\(s\) for -: 'numpy.ndarray' and 'Timedelta'"
|
| 232 |
+
with pytest.raises(TypeError, match=msg):
|
| 233 |
+
other - td
|
| 234 |
+
|
| 235 |
+
def test_td_rsub_nat(self):
|
| 236 |
+
td = Timedelta(10, unit="d")
|
| 237 |
+
result = NaT - td
|
| 238 |
+
assert result is NaT
|
| 239 |
+
|
| 240 |
+
result = np.datetime64("NaT") - td
|
| 241 |
+
assert result is NaT
|
| 242 |
+
|
| 243 |
+
def test_td_rsub_offset(self):
|
| 244 |
+
result = offsets.Hour(1) - Timedelta(10, unit="d")
|
| 245 |
+
assert isinstance(result, Timedelta)
|
| 246 |
+
assert result == Timedelta(-239, unit="h")
|
| 247 |
+
|
| 248 |
+
def test_td_sub_timedeltalike_object_dtype_array(self):
|
| 249 |
+
# GH#21980
|
| 250 |
+
arr = np.array([Timestamp("20130101 9:01"), Timestamp("20121230 9:02")])
|
| 251 |
+
exp = np.array([Timestamp("20121231 9:01"), Timestamp("20121229 9:02")])
|
| 252 |
+
res = arr - Timedelta("1D")
|
| 253 |
+
tm.assert_numpy_array_equal(res, exp)
|
| 254 |
+
|
| 255 |
+
def test_td_sub_mixed_most_timedeltalike_object_dtype_array(self):
|
| 256 |
+
# GH#21980
|
| 257 |
+
now = Timestamp("2021-11-09 09:54:00")
|
| 258 |
+
arr = np.array([now, Timedelta("1D"), np.timedelta64(2, "h")])
|
| 259 |
+
exp = np.array(
|
| 260 |
+
[
|
| 261 |
+
now - Timedelta("1D"),
|
| 262 |
+
Timedelta("0D"),
|
| 263 |
+
np.timedelta64(2, "h") - Timedelta("1D"),
|
| 264 |
+
]
|
| 265 |
+
)
|
| 266 |
+
res = arr - Timedelta("1D")
|
| 267 |
+
tm.assert_numpy_array_equal(res, exp)
|
| 268 |
+
|
| 269 |
+
def test_td_rsub_mixed_most_timedeltalike_object_dtype_array(self):
|
| 270 |
+
# GH#21980
|
| 271 |
+
now = Timestamp("2021-11-09 09:54:00")
|
| 272 |
+
arr = np.array([now, Timedelta("1D"), np.timedelta64(2, "h")])
|
| 273 |
+
msg = r"unsupported operand type\(s\) for \-: 'Timedelta' and 'Timestamp'"
|
| 274 |
+
with pytest.raises(TypeError, match=msg):
|
| 275 |
+
Timedelta("1D") - arr
|
| 276 |
+
|
| 277 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 278 |
+
def test_td_add_timedeltalike_object_dtype_array(self, op):
|
| 279 |
+
# GH#21980
|
| 280 |
+
arr = np.array([Timestamp("20130101 9:01"), Timestamp("20121230 9:02")])
|
| 281 |
+
exp = np.array([Timestamp("20130102 9:01"), Timestamp("20121231 9:02")])
|
| 282 |
+
res = op(arr, Timedelta("1D"))
|
| 283 |
+
tm.assert_numpy_array_equal(res, exp)
|
| 284 |
+
|
| 285 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 286 |
+
def test_td_add_mixed_timedeltalike_object_dtype_array(self, op):
|
| 287 |
+
# GH#21980
|
| 288 |
+
now = Timestamp("2021-11-09 09:54:00")
|
| 289 |
+
arr = np.array([now, Timedelta("1D")])
|
| 290 |
+
exp = np.array([now + Timedelta("1D"), Timedelta("2D")])
|
| 291 |
+
res = op(arr, Timedelta("1D"))
|
| 292 |
+
tm.assert_numpy_array_equal(res, exp)
|
| 293 |
+
|
| 294 |
+
def test_td_add_sub_td64_ndarray(self):
|
| 295 |
+
td = Timedelta("1 day")
|
| 296 |
+
|
| 297 |
+
other = np.array([td.to_timedelta64()])
|
| 298 |
+
expected = np.array([Timedelta("2 Days").to_timedelta64()])
|
| 299 |
+
|
| 300 |
+
result = td + other
|
| 301 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 302 |
+
result = other + td
|
| 303 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 304 |
+
|
| 305 |
+
result = td - other
|
| 306 |
+
tm.assert_numpy_array_equal(result, expected * 0)
|
| 307 |
+
result = other - td
|
| 308 |
+
tm.assert_numpy_array_equal(result, expected * 0)
|
| 309 |
+
|
| 310 |
+
def test_td_add_sub_dt64_ndarray(self):
|
| 311 |
+
td = Timedelta("1 day")
|
| 312 |
+
other = np.array(["2000-01-01"], dtype="M8[ns]")
|
| 313 |
+
|
| 314 |
+
expected = np.array(["2000-01-02"], dtype="M8[ns]")
|
| 315 |
+
tm.assert_numpy_array_equal(td + other, expected)
|
| 316 |
+
tm.assert_numpy_array_equal(other + td, expected)
|
| 317 |
+
|
| 318 |
+
expected = np.array(["1999-12-31"], dtype="M8[ns]")
|
| 319 |
+
tm.assert_numpy_array_equal(-td + other, expected)
|
| 320 |
+
tm.assert_numpy_array_equal(other - td, expected)
|
| 321 |
+
|
| 322 |
+
def test_td_add_sub_ndarray_0d(self):
|
| 323 |
+
td = Timedelta("1 day")
|
| 324 |
+
other = np.array(td.asm8)
|
| 325 |
+
|
| 326 |
+
result = td + other
|
| 327 |
+
assert isinstance(result, Timedelta)
|
| 328 |
+
assert result == 2 * td
|
| 329 |
+
|
| 330 |
+
result = other + td
|
| 331 |
+
assert isinstance(result, Timedelta)
|
| 332 |
+
assert result == 2 * td
|
| 333 |
+
|
| 334 |
+
result = other - td
|
| 335 |
+
assert isinstance(result, Timedelta)
|
| 336 |
+
assert result == 0 * td
|
| 337 |
+
|
| 338 |
+
result = td - other
|
| 339 |
+
assert isinstance(result, Timedelta)
|
| 340 |
+
assert result == 0 * td
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
class TestTimedeltaMultiplicationDivision:
|
| 344 |
+
"""
|
| 345 |
+
Tests for Timedelta methods:
|
| 346 |
+
|
| 347 |
+
__mul__, __rmul__,
|
| 348 |
+
__div__, __rdiv__,
|
| 349 |
+
__truediv__, __rtruediv__,
|
| 350 |
+
__floordiv__, __rfloordiv__,
|
| 351 |
+
__mod__, __rmod__,
|
| 352 |
+
__divmod__, __rdivmod__
|
| 353 |
+
"""
|
| 354 |
+
|
| 355 |
+
# ---------------------------------------------------------------
|
| 356 |
+
# Timedelta.__mul__, __rmul__
|
| 357 |
+
|
| 358 |
+
@pytest.mark.parametrize(
|
| 359 |
+
"td_nat", [NaT, np.timedelta64("NaT", "ns"), np.timedelta64("NaT")]
|
| 360 |
+
)
|
| 361 |
+
@pytest.mark.parametrize("op", [operator.mul, ops.rmul])
|
| 362 |
+
def test_td_mul_nat(self, op, td_nat):
|
| 363 |
+
# GH#19819
|
| 364 |
+
td = Timedelta(10, unit="d")
|
| 365 |
+
typs = "|".join(["numpy.timedelta64", "NaTType", "Timedelta"])
|
| 366 |
+
msg = "|".join(
|
| 367 |
+
[
|
| 368 |
+
rf"unsupported operand type\(s\) for \*: '{typs}' and '{typs}'",
|
| 369 |
+
r"ufunc '?multiply'? cannot use operands with types",
|
| 370 |
+
]
|
| 371 |
+
)
|
| 372 |
+
with pytest.raises(TypeError, match=msg):
|
| 373 |
+
op(td, td_nat)
|
| 374 |
+
|
| 375 |
+
@pytest.mark.parametrize("nan", [np.nan, np.float64("NaN"), float("nan")])
|
| 376 |
+
@pytest.mark.parametrize("op", [operator.mul, ops.rmul])
|
| 377 |
+
def test_td_mul_nan(self, op, nan):
|
| 378 |
+
# np.float64('NaN') has a 'dtype' attr, avoid treating as array
|
| 379 |
+
td = Timedelta(10, unit="d")
|
| 380 |
+
result = op(td, nan)
|
| 381 |
+
assert result is NaT
|
| 382 |
+
|
| 383 |
+
@pytest.mark.parametrize("op", [operator.mul, ops.rmul])
|
| 384 |
+
def test_td_mul_scalar(self, op):
|
| 385 |
+
# GH#19738
|
| 386 |
+
td = Timedelta(minutes=3)
|
| 387 |
+
|
| 388 |
+
result = op(td, 2)
|
| 389 |
+
assert result == Timedelta(minutes=6)
|
| 390 |
+
|
| 391 |
+
result = op(td, 1.5)
|
| 392 |
+
assert result == Timedelta(minutes=4, seconds=30)
|
| 393 |
+
|
| 394 |
+
assert op(td, np.nan) is NaT
|
| 395 |
+
|
| 396 |
+
assert op(-1, td)._value == -1 * td._value
|
| 397 |
+
assert op(-1.0, td)._value == -1.0 * td._value
|
| 398 |
+
|
| 399 |
+
msg = "unsupported operand type"
|
| 400 |
+
with pytest.raises(TypeError, match=msg):
|
| 401 |
+
# timedelta * datetime is gibberish
|
| 402 |
+
op(td, Timestamp(2016, 1, 2))
|
| 403 |
+
|
| 404 |
+
with pytest.raises(TypeError, match=msg):
|
| 405 |
+
# invalid multiply with another timedelta
|
| 406 |
+
op(td, td)
|
| 407 |
+
|
| 408 |
+
def test_td_mul_numeric_ndarray(self):
|
| 409 |
+
td = Timedelta("1 day")
|
| 410 |
+
other = np.array([2])
|
| 411 |
+
expected = np.array([Timedelta("2 Days").to_timedelta64()])
|
| 412 |
+
|
| 413 |
+
result = td * other
|
| 414 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 415 |
+
|
| 416 |
+
result = other * td
|
| 417 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 418 |
+
|
| 419 |
+
def test_td_mul_numeric_ndarray_0d(self):
|
| 420 |
+
td = Timedelta("1 day")
|
| 421 |
+
other = np.array(2, dtype=np.int64)
|
| 422 |
+
assert other.ndim == 0
|
| 423 |
+
expected = Timedelta("2 days")
|
| 424 |
+
|
| 425 |
+
res = td * other
|
| 426 |
+
assert type(res) is Timedelta
|
| 427 |
+
assert res == expected
|
| 428 |
+
|
| 429 |
+
res = other * td
|
| 430 |
+
assert type(res) is Timedelta
|
| 431 |
+
assert res == expected
|
| 432 |
+
|
| 433 |
+
def test_td_mul_td64_ndarray_invalid(self):
|
| 434 |
+
td = Timedelta("1 day")
|
| 435 |
+
other = np.array([Timedelta("2 Days").to_timedelta64()])
|
| 436 |
+
|
| 437 |
+
msg = (
|
| 438 |
+
"ufunc '?multiply'? cannot use operands with types "
|
| 439 |
+
rf"dtype\('{tm.ENDIAN}m8\[ns\]'\) and dtype\('{tm.ENDIAN}m8\[ns\]'\)"
|
| 440 |
+
)
|
| 441 |
+
with pytest.raises(TypeError, match=msg):
|
| 442 |
+
td * other
|
| 443 |
+
with pytest.raises(TypeError, match=msg):
|
| 444 |
+
other * td
|
| 445 |
+
|
| 446 |
+
# ---------------------------------------------------------------
|
| 447 |
+
# Timedelta.__div__, __truediv__
|
| 448 |
+
|
| 449 |
+
def test_td_div_timedeltalike_scalar(self):
|
| 450 |
+
# GH#19738
|
| 451 |
+
td = Timedelta(10, unit="d")
|
| 452 |
+
|
| 453 |
+
result = td / offsets.Hour(1)
|
| 454 |
+
assert result == 240
|
| 455 |
+
|
| 456 |
+
assert td / td == 1
|
| 457 |
+
assert td / np.timedelta64(60, "h") == 4
|
| 458 |
+
|
| 459 |
+
assert np.isnan(td / NaT)
|
| 460 |
+
|
| 461 |
+
def test_td_div_td64_non_nano(self):
|
| 462 |
+
# truediv
|
| 463 |
+
td = Timedelta("1 days 2 hours 3 ns")
|
| 464 |
+
result = td / np.timedelta64(1, "D")
|
| 465 |
+
assert result == td._value / (86400 * 10**9)
|
| 466 |
+
result = td / np.timedelta64(1, "s")
|
| 467 |
+
assert result == td._value / 10**9
|
| 468 |
+
result = td / np.timedelta64(1, "ns")
|
| 469 |
+
assert result == td._value
|
| 470 |
+
|
| 471 |
+
# floordiv
|
| 472 |
+
td = Timedelta("1 days 2 hours 3 ns")
|
| 473 |
+
result = td // np.timedelta64(1, "D")
|
| 474 |
+
assert result == 1
|
| 475 |
+
result = td // np.timedelta64(1, "s")
|
| 476 |
+
assert result == 93600
|
| 477 |
+
result = td // np.timedelta64(1, "ns")
|
| 478 |
+
assert result == td._value
|
| 479 |
+
|
| 480 |
+
def test_td_div_numeric_scalar(self):
|
| 481 |
+
# GH#19738
|
| 482 |
+
td = Timedelta(10, unit="d")
|
| 483 |
+
|
| 484 |
+
result = td / 2
|
| 485 |
+
assert isinstance(result, Timedelta)
|
| 486 |
+
assert result == Timedelta(days=5)
|
| 487 |
+
|
| 488 |
+
result = td / 5
|
| 489 |
+
assert isinstance(result, Timedelta)
|
| 490 |
+
assert result == Timedelta(days=2)
|
| 491 |
+
|
| 492 |
+
@pytest.mark.parametrize(
|
| 493 |
+
"nan",
|
| 494 |
+
[
|
| 495 |
+
np.nan,
|
| 496 |
+
np.float64("NaN"),
|
| 497 |
+
float("nan"),
|
| 498 |
+
],
|
| 499 |
+
)
|
| 500 |
+
def test_td_div_nan(self, nan):
|
| 501 |
+
# np.float64('NaN') has a 'dtype' attr, avoid treating as array
|
| 502 |
+
td = Timedelta(10, unit="d")
|
| 503 |
+
result = td / nan
|
| 504 |
+
assert result is NaT
|
| 505 |
+
|
| 506 |
+
result = td // nan
|
| 507 |
+
assert result is NaT
|
| 508 |
+
|
| 509 |
+
def test_td_div_td64_ndarray(self):
|
| 510 |
+
td = Timedelta("1 day")
|
| 511 |
+
|
| 512 |
+
other = np.array([Timedelta("2 Days").to_timedelta64()])
|
| 513 |
+
expected = np.array([0.5])
|
| 514 |
+
|
| 515 |
+
result = td / other
|
| 516 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 517 |
+
|
| 518 |
+
result = other / td
|
| 519 |
+
tm.assert_numpy_array_equal(result, expected * 4)
|
| 520 |
+
|
| 521 |
+
def test_td_div_ndarray_0d(self):
|
| 522 |
+
td = Timedelta("1 day")
|
| 523 |
+
|
| 524 |
+
other = np.array(1)
|
| 525 |
+
res = td / other
|
| 526 |
+
assert isinstance(res, Timedelta)
|
| 527 |
+
assert res == td
|
| 528 |
+
|
| 529 |
+
# ---------------------------------------------------------------
|
| 530 |
+
# Timedelta.__rdiv__
|
| 531 |
+
|
| 532 |
+
def test_td_rdiv_timedeltalike_scalar(self):
|
| 533 |
+
# GH#19738
|
| 534 |
+
td = Timedelta(10, unit="d")
|
| 535 |
+
result = offsets.Hour(1) / td
|
| 536 |
+
assert result == 1 / 240.0
|
| 537 |
+
|
| 538 |
+
assert np.timedelta64(60, "h") / td == 0.25
|
| 539 |
+
|
| 540 |
+
def test_td_rdiv_na_scalar(self):
|
| 541 |
+
# GH#31869 None gets cast to NaT
|
| 542 |
+
td = Timedelta(10, unit="d")
|
| 543 |
+
|
| 544 |
+
result = NaT / td
|
| 545 |
+
assert np.isnan(result)
|
| 546 |
+
|
| 547 |
+
result = None / td
|
| 548 |
+
assert np.isnan(result)
|
| 549 |
+
|
| 550 |
+
result = np.timedelta64("NaT") / td
|
| 551 |
+
assert np.isnan(result)
|
| 552 |
+
|
| 553 |
+
msg = r"unsupported operand type\(s\) for /: 'numpy.datetime64' and 'Timedelta'"
|
| 554 |
+
with pytest.raises(TypeError, match=msg):
|
| 555 |
+
np.datetime64("NaT") / td
|
| 556 |
+
|
| 557 |
+
msg = r"unsupported operand type\(s\) for /: 'float' and 'Timedelta'"
|
| 558 |
+
with pytest.raises(TypeError, match=msg):
|
| 559 |
+
np.nan / td
|
| 560 |
+
|
| 561 |
+
def test_td_rdiv_ndarray(self):
|
| 562 |
+
td = Timedelta(10, unit="d")
|
| 563 |
+
|
| 564 |
+
arr = np.array([td], dtype=object)
|
| 565 |
+
result = arr / td
|
| 566 |
+
expected = np.array([1], dtype=np.float64)
|
| 567 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 568 |
+
|
| 569 |
+
arr = np.array([None])
|
| 570 |
+
result = arr / td
|
| 571 |
+
expected = np.array([np.nan])
|
| 572 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 573 |
+
|
| 574 |
+
arr = np.array([np.nan], dtype=object)
|
| 575 |
+
msg = r"unsupported operand type\(s\) for /: 'float' and 'Timedelta'"
|
| 576 |
+
with pytest.raises(TypeError, match=msg):
|
| 577 |
+
arr / td
|
| 578 |
+
|
| 579 |
+
arr = np.array([np.nan], dtype=np.float64)
|
| 580 |
+
msg = "cannot use operands with types dtype"
|
| 581 |
+
with pytest.raises(TypeError, match=msg):
|
| 582 |
+
arr / td
|
| 583 |
+
|
| 584 |
+
def test_td_rdiv_ndarray_0d(self):
|
| 585 |
+
td = Timedelta(10, unit="d")
|
| 586 |
+
|
| 587 |
+
arr = np.array(td.asm8)
|
| 588 |
+
|
| 589 |
+
assert arr / td == 1
|
| 590 |
+
|
| 591 |
+
# ---------------------------------------------------------------
|
| 592 |
+
# Timedelta.__floordiv__
|
| 593 |
+
|
| 594 |
+
def test_td_floordiv_timedeltalike_scalar(self):
|
| 595 |
+
# GH#18846
|
| 596 |
+
td = Timedelta(hours=3, minutes=4)
|
| 597 |
+
scalar = Timedelta(hours=3, minutes=3)
|
| 598 |
+
|
| 599 |
+
assert td // scalar == 1
|
| 600 |
+
assert -td // scalar.to_pytimedelta() == -2
|
| 601 |
+
assert (2 * td) // scalar.to_timedelta64() == 2
|
| 602 |
+
|
| 603 |
+
def test_td_floordiv_null_scalar(self):
|
| 604 |
+
# GH#18846
|
| 605 |
+
td = Timedelta(hours=3, minutes=4)
|
| 606 |
+
|
| 607 |
+
assert td // np.nan is NaT
|
| 608 |
+
assert np.isnan(td // NaT)
|
| 609 |
+
assert np.isnan(td // np.timedelta64("NaT"))
|
| 610 |
+
|
| 611 |
+
def test_td_floordiv_offsets(self):
|
| 612 |
+
# GH#19738
|
| 613 |
+
td = Timedelta(hours=3, minutes=4)
|
| 614 |
+
assert td // offsets.Hour(1) == 3
|
| 615 |
+
assert td // offsets.Minute(2) == 92
|
| 616 |
+
|
| 617 |
+
def test_td_floordiv_invalid_scalar(self):
|
| 618 |
+
# GH#18846
|
| 619 |
+
td = Timedelta(hours=3, minutes=4)
|
| 620 |
+
|
| 621 |
+
msg = "|".join(
|
| 622 |
+
[
|
| 623 |
+
r"Invalid dtype datetime64\[D\] for __floordiv__",
|
| 624 |
+
"'dtype' is an invalid keyword argument for this function",
|
| 625 |
+
"this function got an unexpected keyword argument 'dtype'",
|
| 626 |
+
r"ufunc '?floor_divide'? cannot use operands with types",
|
| 627 |
+
]
|
| 628 |
+
)
|
| 629 |
+
with pytest.raises(TypeError, match=msg):
|
| 630 |
+
td // np.datetime64("2016-01-01", dtype="datetime64[us]")
|
| 631 |
+
|
| 632 |
+
def test_td_floordiv_numeric_scalar(self):
|
| 633 |
+
# GH#18846
|
| 634 |
+
td = Timedelta(hours=3, minutes=4)
|
| 635 |
+
|
| 636 |
+
expected = Timedelta(hours=1, minutes=32)
|
| 637 |
+
assert td // 2 == expected
|
| 638 |
+
assert td // 2.0 == expected
|
| 639 |
+
assert td // np.float64(2.0) == expected
|
| 640 |
+
assert td // np.int32(2.0) == expected
|
| 641 |
+
assert td // np.uint8(2.0) == expected
|
| 642 |
+
|
| 643 |
+
def test_td_floordiv_timedeltalike_array(self):
|
| 644 |
+
# GH#18846
|
| 645 |
+
td = Timedelta(hours=3, minutes=4)
|
| 646 |
+
scalar = Timedelta(hours=3, minutes=3)
|
| 647 |
+
|
| 648 |
+
# Array-like others
|
| 649 |
+
assert td // np.array(scalar.to_timedelta64()) == 1
|
| 650 |
+
|
| 651 |
+
res = (3 * td) // np.array([scalar.to_timedelta64()])
|
| 652 |
+
expected = np.array([3], dtype=np.int64)
|
| 653 |
+
tm.assert_numpy_array_equal(res, expected)
|
| 654 |
+
|
| 655 |
+
res = (10 * td) // np.array([scalar.to_timedelta64(), np.timedelta64("NaT")])
|
| 656 |
+
expected = np.array([10, np.nan])
|
| 657 |
+
tm.assert_numpy_array_equal(res, expected)
|
| 658 |
+
|
| 659 |
+
def test_td_floordiv_numeric_series(self):
|
| 660 |
+
# GH#18846
|
| 661 |
+
td = Timedelta(hours=3, minutes=4)
|
| 662 |
+
ser = pd.Series([1], dtype=np.int64)
|
| 663 |
+
res = td // ser
|
| 664 |
+
assert res.dtype.kind == "m"
|
| 665 |
+
|
| 666 |
+
# ---------------------------------------------------------------
|
| 667 |
+
# Timedelta.__rfloordiv__
|
| 668 |
+
|
| 669 |
+
def test_td_rfloordiv_timedeltalike_scalar(self):
|
| 670 |
+
# GH#18846
|
| 671 |
+
td = Timedelta(hours=3, minutes=3)
|
| 672 |
+
scalar = Timedelta(hours=3, minutes=4)
|
| 673 |
+
|
| 674 |
+
# scalar others
|
| 675 |
+
# x // Timedelta is defined only for timedelta-like x. int-like,
|
| 676 |
+
# float-like, and date-like, in particular, should all either
|
| 677 |
+
# a) raise TypeError directly or
|
| 678 |
+
# b) return NotImplemented, following which the reversed
|
| 679 |
+
# operation will raise TypeError.
|
| 680 |
+
assert td.__rfloordiv__(scalar) == 1
|
| 681 |
+
assert (-td).__rfloordiv__(scalar.to_pytimedelta()) == -2
|
| 682 |
+
assert (2 * td).__rfloordiv__(scalar.to_timedelta64()) == 0
|
| 683 |
+
|
| 684 |
+
def test_td_rfloordiv_null_scalar(self):
|
| 685 |
+
# GH#18846
|
| 686 |
+
td = Timedelta(hours=3, minutes=3)
|
| 687 |
+
|
| 688 |
+
assert np.isnan(td.__rfloordiv__(NaT))
|
| 689 |
+
assert np.isnan(td.__rfloordiv__(np.timedelta64("NaT")))
|
| 690 |
+
|
| 691 |
+
def test_td_rfloordiv_offsets(self):
|
| 692 |
+
# GH#19738
|
| 693 |
+
assert offsets.Hour(1) // Timedelta(minutes=25) == 2
|
| 694 |
+
|
| 695 |
+
def test_td_rfloordiv_invalid_scalar(self):
|
| 696 |
+
# GH#18846
|
| 697 |
+
td = Timedelta(hours=3, minutes=3)
|
| 698 |
+
|
| 699 |
+
dt64 = np.datetime64("2016-01-01", "us")
|
| 700 |
+
|
| 701 |
+
assert td.__rfloordiv__(dt64) is NotImplemented
|
| 702 |
+
|
| 703 |
+
msg = (
|
| 704 |
+
r"unsupported operand type\(s\) for //: 'numpy.datetime64' and 'Timedelta'"
|
| 705 |
+
)
|
| 706 |
+
with pytest.raises(TypeError, match=msg):
|
| 707 |
+
dt64 // td
|
| 708 |
+
|
| 709 |
+
def test_td_rfloordiv_numeric_scalar(self):
|
| 710 |
+
# GH#18846
|
| 711 |
+
td = Timedelta(hours=3, minutes=3)
|
| 712 |
+
|
| 713 |
+
assert td.__rfloordiv__(np.nan) is NotImplemented
|
| 714 |
+
assert td.__rfloordiv__(3.5) is NotImplemented
|
| 715 |
+
assert td.__rfloordiv__(2) is NotImplemented
|
| 716 |
+
assert td.__rfloordiv__(np.float64(2.0)) is NotImplemented
|
| 717 |
+
assert td.__rfloordiv__(np.uint8(9)) is NotImplemented
|
| 718 |
+
assert td.__rfloordiv__(np.int32(2.0)) is NotImplemented
|
| 719 |
+
|
| 720 |
+
msg = r"unsupported operand type\(s\) for //: '.*' and 'Timedelta"
|
| 721 |
+
with pytest.raises(TypeError, match=msg):
|
| 722 |
+
np.float64(2.0) // td
|
| 723 |
+
with pytest.raises(TypeError, match=msg):
|
| 724 |
+
np.uint8(9) // td
|
| 725 |
+
with pytest.raises(TypeError, match=msg):
|
| 726 |
+
# deprecated GH#19761, enforced GH#29797
|
| 727 |
+
np.int32(2.0) // td
|
| 728 |
+
|
| 729 |
+
def test_td_rfloordiv_timedeltalike_array(self):
|
| 730 |
+
# GH#18846
|
| 731 |
+
td = Timedelta(hours=3, minutes=3)
|
| 732 |
+
scalar = Timedelta(hours=3, minutes=4)
|
| 733 |
+
|
| 734 |
+
# Array-like others
|
| 735 |
+
assert td.__rfloordiv__(np.array(scalar.to_timedelta64())) == 1
|
| 736 |
+
|
| 737 |
+
res = td.__rfloordiv__(np.array([(3 * scalar).to_timedelta64()]))
|
| 738 |
+
expected = np.array([3], dtype=np.int64)
|
| 739 |
+
tm.assert_numpy_array_equal(res, expected)
|
| 740 |
+
|
| 741 |
+
arr = np.array([(10 * scalar).to_timedelta64(), np.timedelta64("NaT")])
|
| 742 |
+
res = td.__rfloordiv__(arr)
|
| 743 |
+
expected = np.array([10, np.nan])
|
| 744 |
+
tm.assert_numpy_array_equal(res, expected)
|
| 745 |
+
|
| 746 |
+
def test_td_rfloordiv_intarray(self):
|
| 747 |
+
# deprecated GH#19761, enforced GH#29797
|
| 748 |
+
ints = np.array([1349654400, 1349740800, 1349827200, 1349913600]) * 10**9
|
| 749 |
+
|
| 750 |
+
msg = "Invalid dtype"
|
| 751 |
+
with pytest.raises(TypeError, match=msg):
|
| 752 |
+
ints // Timedelta(1, unit="s")
|
| 753 |
+
|
| 754 |
+
def test_td_rfloordiv_numeric_series(self):
|
| 755 |
+
# GH#18846
|
| 756 |
+
td = Timedelta(hours=3, minutes=3)
|
| 757 |
+
ser = pd.Series([1], dtype=np.int64)
|
| 758 |
+
res = td.__rfloordiv__(ser)
|
| 759 |
+
assert res is NotImplemented
|
| 760 |
+
|
| 761 |
+
msg = "Invalid dtype"
|
| 762 |
+
with pytest.raises(TypeError, match=msg):
|
| 763 |
+
# Deprecated GH#19761, enforced GH#29797
|
| 764 |
+
ser // td
|
| 765 |
+
|
| 766 |
+
# ----------------------------------------------------------------
|
| 767 |
+
# Timedelta.__mod__, __rmod__
|
| 768 |
+
|
| 769 |
+
def test_mod_timedeltalike(self):
|
| 770 |
+
# GH#19365
|
| 771 |
+
td = Timedelta(hours=37)
|
| 772 |
+
|
| 773 |
+
# Timedelta-like others
|
| 774 |
+
result = td % Timedelta(hours=6)
|
| 775 |
+
assert isinstance(result, Timedelta)
|
| 776 |
+
assert result == Timedelta(hours=1)
|
| 777 |
+
|
| 778 |
+
result = td % timedelta(minutes=60)
|
| 779 |
+
assert isinstance(result, Timedelta)
|
| 780 |
+
assert result == Timedelta(0)
|
| 781 |
+
|
| 782 |
+
result = td % NaT
|
| 783 |
+
assert result is NaT
|
| 784 |
+
|
| 785 |
+
def test_mod_timedelta64_nat(self):
|
| 786 |
+
# GH#19365
|
| 787 |
+
td = Timedelta(hours=37)
|
| 788 |
+
|
| 789 |
+
result = td % np.timedelta64("NaT", "ns")
|
| 790 |
+
assert result is NaT
|
| 791 |
+
|
| 792 |
+
def test_mod_timedelta64(self):
|
| 793 |
+
# GH#19365
|
| 794 |
+
td = Timedelta(hours=37)
|
| 795 |
+
|
| 796 |
+
result = td % np.timedelta64(2, "h")
|
| 797 |
+
assert isinstance(result, Timedelta)
|
| 798 |
+
assert result == Timedelta(hours=1)
|
| 799 |
+
|
| 800 |
+
def test_mod_offset(self):
|
| 801 |
+
# GH#19365
|
| 802 |
+
td = Timedelta(hours=37)
|
| 803 |
+
|
| 804 |
+
result = td % offsets.Hour(5)
|
| 805 |
+
assert isinstance(result, Timedelta)
|
| 806 |
+
assert result == Timedelta(hours=2)
|
| 807 |
+
|
| 808 |
+
def test_mod_numeric(self):
|
| 809 |
+
# GH#19365
|
| 810 |
+
td = Timedelta(hours=37)
|
| 811 |
+
|
| 812 |
+
# Numeric Others
|
| 813 |
+
result = td % 2
|
| 814 |
+
assert isinstance(result, Timedelta)
|
| 815 |
+
assert result == Timedelta(0)
|
| 816 |
+
|
| 817 |
+
result = td % 1e12
|
| 818 |
+
assert isinstance(result, Timedelta)
|
| 819 |
+
assert result == Timedelta(minutes=3, seconds=20)
|
| 820 |
+
|
| 821 |
+
result = td % int(1e12)
|
| 822 |
+
assert isinstance(result, Timedelta)
|
| 823 |
+
assert result == Timedelta(minutes=3, seconds=20)
|
| 824 |
+
|
| 825 |
+
def test_mod_invalid(self):
|
| 826 |
+
# GH#19365
|
| 827 |
+
td = Timedelta(hours=37)
|
| 828 |
+
msg = "unsupported operand type"
|
| 829 |
+
with pytest.raises(TypeError, match=msg):
|
| 830 |
+
td % Timestamp("2018-01-22")
|
| 831 |
+
|
| 832 |
+
with pytest.raises(TypeError, match=msg):
|
| 833 |
+
td % []
|
| 834 |
+
|
| 835 |
+
def test_rmod_pytimedelta(self):
|
| 836 |
+
# GH#19365
|
| 837 |
+
td = Timedelta(minutes=3)
|
| 838 |
+
|
| 839 |
+
result = timedelta(minutes=4) % td
|
| 840 |
+
assert isinstance(result, Timedelta)
|
| 841 |
+
assert result == Timedelta(minutes=1)
|
| 842 |
+
|
| 843 |
+
def test_rmod_timedelta64(self):
|
| 844 |
+
# GH#19365
|
| 845 |
+
td = Timedelta(minutes=3)
|
| 846 |
+
result = np.timedelta64(5, "m") % td
|
| 847 |
+
assert isinstance(result, Timedelta)
|
| 848 |
+
assert result == Timedelta(minutes=2)
|
| 849 |
+
|
| 850 |
+
def test_rmod_invalid(self):
|
| 851 |
+
# GH#19365
|
| 852 |
+
td = Timedelta(minutes=3)
|
| 853 |
+
|
| 854 |
+
msg = "unsupported operand"
|
| 855 |
+
with pytest.raises(TypeError, match=msg):
|
| 856 |
+
Timestamp("2018-01-22") % td
|
| 857 |
+
|
| 858 |
+
with pytest.raises(TypeError, match=msg):
|
| 859 |
+
15 % td
|
| 860 |
+
|
| 861 |
+
with pytest.raises(TypeError, match=msg):
|
| 862 |
+
16.0 % td
|
| 863 |
+
|
| 864 |
+
msg = "Invalid dtype int"
|
| 865 |
+
with pytest.raises(TypeError, match=msg):
|
| 866 |
+
np.array([22, 24]) % td
|
| 867 |
+
|
| 868 |
+
# ----------------------------------------------------------------
|
| 869 |
+
# Timedelta.__divmod__, __rdivmod__
|
| 870 |
+
|
| 871 |
+
def test_divmod_numeric(self):
|
| 872 |
+
# GH#19365
|
| 873 |
+
td = Timedelta(days=2, hours=6)
|
| 874 |
+
|
| 875 |
+
result = divmod(td, 53 * 3600 * 1e9)
|
| 876 |
+
assert result[0] == Timedelta(1, unit="ns")
|
| 877 |
+
assert isinstance(result[1], Timedelta)
|
| 878 |
+
assert result[1] == Timedelta(hours=1)
|
| 879 |
+
|
| 880 |
+
assert result
|
| 881 |
+
result = divmod(td, np.nan)
|
| 882 |
+
assert result[0] is NaT
|
| 883 |
+
assert result[1] is NaT
|
| 884 |
+
|
| 885 |
+
def test_divmod(self):
|
| 886 |
+
# GH#19365
|
| 887 |
+
td = Timedelta(days=2, hours=6)
|
| 888 |
+
|
| 889 |
+
result = divmod(td, timedelta(days=1))
|
| 890 |
+
assert result[0] == 2
|
| 891 |
+
assert isinstance(result[1], Timedelta)
|
| 892 |
+
assert result[1] == Timedelta(hours=6)
|
| 893 |
+
|
| 894 |
+
result = divmod(td, 54)
|
| 895 |
+
assert result[0] == Timedelta(hours=1)
|
| 896 |
+
assert isinstance(result[1], Timedelta)
|
| 897 |
+
assert result[1] == Timedelta(0)
|
| 898 |
+
|
| 899 |
+
result = divmod(td, NaT)
|
| 900 |
+
assert np.isnan(result[0])
|
| 901 |
+
assert result[1] is NaT
|
| 902 |
+
|
| 903 |
+
def test_divmod_offset(self):
|
| 904 |
+
# GH#19365
|
| 905 |
+
td = Timedelta(days=2, hours=6)
|
| 906 |
+
|
| 907 |
+
result = divmod(td, offsets.Hour(-4))
|
| 908 |
+
assert result[0] == -14
|
| 909 |
+
assert isinstance(result[1], Timedelta)
|
| 910 |
+
assert result[1] == Timedelta(hours=-2)
|
| 911 |
+
|
| 912 |
+
def test_divmod_invalid(self):
|
| 913 |
+
# GH#19365
|
| 914 |
+
td = Timedelta(days=2, hours=6)
|
| 915 |
+
|
| 916 |
+
msg = r"unsupported operand type\(s\) for //: 'Timedelta' and 'Timestamp'"
|
| 917 |
+
with pytest.raises(TypeError, match=msg):
|
| 918 |
+
divmod(td, Timestamp("2018-01-22"))
|
| 919 |
+
|
| 920 |
+
def test_rdivmod_pytimedelta(self):
|
| 921 |
+
# GH#19365
|
| 922 |
+
result = divmod(timedelta(days=2, hours=6), Timedelta(days=1))
|
| 923 |
+
assert result[0] == 2
|
| 924 |
+
assert isinstance(result[1], Timedelta)
|
| 925 |
+
assert result[1] == Timedelta(hours=6)
|
| 926 |
+
|
| 927 |
+
def test_rdivmod_offset(self):
|
| 928 |
+
result = divmod(offsets.Hour(54), Timedelta(hours=-4))
|
| 929 |
+
assert result[0] == -14
|
| 930 |
+
assert isinstance(result[1], Timedelta)
|
| 931 |
+
assert result[1] == Timedelta(hours=-2)
|
| 932 |
+
|
| 933 |
+
def test_rdivmod_invalid(self):
|
| 934 |
+
# GH#19365
|
| 935 |
+
td = Timedelta(minutes=3)
|
| 936 |
+
msg = "unsupported operand type"
|
| 937 |
+
|
| 938 |
+
with pytest.raises(TypeError, match=msg):
|
| 939 |
+
divmod(Timestamp("2018-01-22"), td)
|
| 940 |
+
|
| 941 |
+
with pytest.raises(TypeError, match=msg):
|
| 942 |
+
divmod(15, td)
|
| 943 |
+
|
| 944 |
+
with pytest.raises(TypeError, match=msg):
|
| 945 |
+
divmod(16.0, td)
|
| 946 |
+
|
| 947 |
+
msg = "Invalid dtype int"
|
| 948 |
+
with pytest.raises(TypeError, match=msg):
|
| 949 |
+
divmod(np.array([22, 24]), td)
|
| 950 |
+
|
| 951 |
+
# ----------------------------------------------------------------
|
| 952 |
+
|
| 953 |
+
@pytest.mark.parametrize(
|
| 954 |
+
"op", [operator.mul, ops.rmul, operator.truediv, ops.rdiv, ops.rsub]
|
| 955 |
+
)
|
| 956 |
+
@pytest.mark.parametrize(
|
| 957 |
+
"arr",
|
| 958 |
+
[
|
| 959 |
+
np.array([Timestamp("20130101 9:01"), Timestamp("20121230 9:02")]),
|
| 960 |
+
np.array([Timestamp("2021-11-09 09:54:00"), Timedelta("1D")]),
|
| 961 |
+
],
|
| 962 |
+
)
|
| 963 |
+
def test_td_op_timedelta_timedeltalike_array(self, op, arr):
|
| 964 |
+
msg = "unsupported operand type|cannot use operands with types"
|
| 965 |
+
with pytest.raises(TypeError, match=msg):
|
| 966 |
+
op(arr, Timedelta("1D"))
|
| 967 |
+
|
| 968 |
+
|
| 969 |
+
class TestTimedeltaComparison:
|
| 970 |
+
@pytest.mark.skip_ubsan
|
| 971 |
+
def test_compare_pytimedelta_bounds(self):
|
| 972 |
+
# GH#49021 don't overflow on comparison with very large pytimedeltas
|
| 973 |
+
|
| 974 |
+
for unit in ["ns", "us"]:
|
| 975 |
+
tdmax = Timedelta.max.as_unit(unit).max
|
| 976 |
+
tdmin = Timedelta.min.as_unit(unit).min
|
| 977 |
+
|
| 978 |
+
assert tdmax < timedelta.max
|
| 979 |
+
assert tdmax <= timedelta.max
|
| 980 |
+
assert not tdmax > timedelta.max
|
| 981 |
+
assert not tdmax >= timedelta.max
|
| 982 |
+
assert tdmax != timedelta.max
|
| 983 |
+
assert not tdmax == timedelta.max
|
| 984 |
+
|
| 985 |
+
assert tdmin > timedelta.min
|
| 986 |
+
assert tdmin >= timedelta.min
|
| 987 |
+
assert not tdmin < timedelta.min
|
| 988 |
+
assert not tdmin <= timedelta.min
|
| 989 |
+
assert tdmin != timedelta.min
|
| 990 |
+
assert not tdmin == timedelta.min
|
| 991 |
+
|
| 992 |
+
# But the "ms" and "s"-reso bounds extend pass pytimedelta
|
| 993 |
+
for unit in ["ms", "s"]:
|
| 994 |
+
tdmax = Timedelta.max.as_unit(unit).max
|
| 995 |
+
tdmin = Timedelta.min.as_unit(unit).min
|
| 996 |
+
|
| 997 |
+
assert tdmax > timedelta.max
|
| 998 |
+
assert tdmax >= timedelta.max
|
| 999 |
+
assert not tdmax < timedelta.max
|
| 1000 |
+
assert not tdmax <= timedelta.max
|
| 1001 |
+
assert tdmax != timedelta.max
|
| 1002 |
+
assert not tdmax == timedelta.max
|
| 1003 |
+
|
| 1004 |
+
assert tdmin < timedelta.min
|
| 1005 |
+
assert tdmin <= timedelta.min
|
| 1006 |
+
assert not tdmin > timedelta.min
|
| 1007 |
+
assert not tdmin >= timedelta.min
|
| 1008 |
+
assert tdmin != timedelta.min
|
| 1009 |
+
assert not tdmin == timedelta.min
|
| 1010 |
+
|
| 1011 |
+
def test_compare_pytimedelta_bounds2(self):
|
| 1012 |
+
# a pytimedelta outside the microsecond bounds
|
| 1013 |
+
pytd = timedelta(days=999999999, seconds=86399)
|
| 1014 |
+
# NB: np.timedelta64(td, "s"") incorrectly overflows
|
| 1015 |
+
td64 = np.timedelta64(pytd.days, "D") + np.timedelta64(pytd.seconds, "s")
|
| 1016 |
+
td = Timedelta(td64)
|
| 1017 |
+
assert td.days == pytd.days
|
| 1018 |
+
assert td.seconds == pytd.seconds
|
| 1019 |
+
|
| 1020 |
+
assert td == pytd
|
| 1021 |
+
assert not td != pytd
|
| 1022 |
+
assert not td < pytd
|
| 1023 |
+
assert not td > pytd
|
| 1024 |
+
assert td <= pytd
|
| 1025 |
+
assert td >= pytd
|
| 1026 |
+
|
| 1027 |
+
td2 = td - Timedelta(seconds=1).as_unit("s")
|
| 1028 |
+
assert td2 != pytd
|
| 1029 |
+
assert not td2 == pytd
|
| 1030 |
+
assert td2 < pytd
|
| 1031 |
+
assert td2 <= pytd
|
| 1032 |
+
assert not td2 > pytd
|
| 1033 |
+
assert not td2 >= pytd
|
| 1034 |
+
|
| 1035 |
+
def test_compare_tick(self, tick_classes):
|
| 1036 |
+
cls = tick_classes
|
| 1037 |
+
|
| 1038 |
+
off = cls(4)
|
| 1039 |
+
td = off._as_pd_timedelta
|
| 1040 |
+
assert isinstance(td, Timedelta)
|
| 1041 |
+
|
| 1042 |
+
assert td == off
|
| 1043 |
+
assert not td != off
|
| 1044 |
+
assert td <= off
|
| 1045 |
+
assert td >= off
|
| 1046 |
+
assert not td < off
|
| 1047 |
+
assert not td > off
|
| 1048 |
+
|
| 1049 |
+
assert not td == 2 * off
|
| 1050 |
+
assert td != 2 * off
|
| 1051 |
+
assert td <= 2 * off
|
| 1052 |
+
assert td < 2 * off
|
| 1053 |
+
assert not td >= 2 * off
|
| 1054 |
+
assert not td > 2 * off
|
| 1055 |
+
|
| 1056 |
+
def test_comparison_object_array(self):
|
| 1057 |
+
# analogous to GH#15183
|
| 1058 |
+
td = Timedelta("2 days")
|
| 1059 |
+
other = Timedelta("3 hours")
|
| 1060 |
+
|
| 1061 |
+
arr = np.array([other, td], dtype=object)
|
| 1062 |
+
res = arr == td
|
| 1063 |
+
expected = np.array([False, True], dtype=bool)
|
| 1064 |
+
assert (res == expected).all()
|
| 1065 |
+
|
| 1066 |
+
# 2D case
|
| 1067 |
+
arr = np.array([[other, td], [td, other]], dtype=object)
|
| 1068 |
+
res = arr != td
|
| 1069 |
+
expected = np.array([[True, False], [False, True]], dtype=bool)
|
| 1070 |
+
assert res.shape == expected.shape
|
| 1071 |
+
assert (res == expected).all()
|
| 1072 |
+
|
| 1073 |
+
def test_compare_timedelta_ndarray(self):
|
| 1074 |
+
# GH#11835
|
| 1075 |
+
periods = [Timedelta("0 days 01:00:00"), Timedelta("0 days 01:00:00")]
|
| 1076 |
+
arr = np.array(periods)
|
| 1077 |
+
result = arr[0] > arr
|
| 1078 |
+
expected = np.array([False, False])
|
| 1079 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1080 |
+
|
| 1081 |
+
def test_compare_td64_ndarray(self):
|
| 1082 |
+
# GG#33441
|
| 1083 |
+
arr = np.arange(5).astype("timedelta64[ns]")
|
| 1084 |
+
td = Timedelta(arr[1])
|
| 1085 |
+
|
| 1086 |
+
expected = np.array([False, True, False, False, False], dtype=bool)
|
| 1087 |
+
|
| 1088 |
+
result = td == arr
|
| 1089 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1090 |
+
|
| 1091 |
+
result = arr == td
|
| 1092 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1093 |
+
|
| 1094 |
+
result = td != arr
|
| 1095 |
+
tm.assert_numpy_array_equal(result, ~expected)
|
| 1096 |
+
|
| 1097 |
+
result = arr != td
|
| 1098 |
+
tm.assert_numpy_array_equal(result, ~expected)
|
| 1099 |
+
|
| 1100 |
+
def test_compare_custom_object(self):
|
| 1101 |
+
"""
|
| 1102 |
+
Make sure non supported operations on Timedelta returns NonImplemented
|
| 1103 |
+
and yields to other operand (GH#20829).
|
| 1104 |
+
"""
|
| 1105 |
+
|
| 1106 |
+
class CustomClass:
|
| 1107 |
+
def __init__(self, cmp_result=None) -> None:
|
| 1108 |
+
self.cmp_result = cmp_result
|
| 1109 |
+
|
| 1110 |
+
def generic_result(self):
|
| 1111 |
+
if self.cmp_result is None:
|
| 1112 |
+
return NotImplemented
|
| 1113 |
+
else:
|
| 1114 |
+
return self.cmp_result
|
| 1115 |
+
|
| 1116 |
+
def __eq__(self, other):
|
| 1117 |
+
return self.generic_result()
|
| 1118 |
+
|
| 1119 |
+
def __gt__(self, other):
|
| 1120 |
+
return self.generic_result()
|
| 1121 |
+
|
| 1122 |
+
t = Timedelta("1s")
|
| 1123 |
+
|
| 1124 |
+
assert t != "string"
|
| 1125 |
+
assert t != 1
|
| 1126 |
+
assert t != CustomClass()
|
| 1127 |
+
assert t != CustomClass(cmp_result=False)
|
| 1128 |
+
|
| 1129 |
+
assert t < CustomClass(cmp_result=True)
|
| 1130 |
+
assert not t < CustomClass(cmp_result=False)
|
| 1131 |
+
|
| 1132 |
+
assert t == CustomClass(cmp_result=True)
|
| 1133 |
+
|
| 1134 |
+
@pytest.mark.parametrize("val", ["string", 1])
|
| 1135 |
+
def test_compare_unknown_type(self, val):
|
| 1136 |
+
# GH#20829
|
| 1137 |
+
t = Timedelta("1s")
|
| 1138 |
+
msg = "not supported between instances of 'Timedelta' and '(int|str)'"
|
| 1139 |
+
with pytest.raises(TypeError, match=msg):
|
| 1140 |
+
t >= val
|
| 1141 |
+
with pytest.raises(TypeError, match=msg):
|
| 1142 |
+
t > val
|
| 1143 |
+
with pytest.raises(TypeError, match=msg):
|
| 1144 |
+
t <= val
|
| 1145 |
+
with pytest.raises(TypeError, match=msg):
|
| 1146 |
+
t < val
|
| 1147 |
+
|
| 1148 |
+
|
| 1149 |
+
def test_ops_notimplemented():
|
| 1150 |
+
class Other:
|
| 1151 |
+
pass
|
| 1152 |
+
|
| 1153 |
+
other = Other()
|
| 1154 |
+
|
| 1155 |
+
td = Timedelta("1 day")
|
| 1156 |
+
assert td.__add__(other) is NotImplemented
|
| 1157 |
+
assert td.__sub__(other) is NotImplemented
|
| 1158 |
+
assert td.__truediv__(other) is NotImplemented
|
| 1159 |
+
assert td.__mul__(other) is NotImplemented
|
| 1160 |
+
assert td.__floordiv__(other) is NotImplemented
|
| 1161 |
+
|
| 1162 |
+
|
| 1163 |
+
def test_ops_error_str():
|
| 1164 |
+
# GH#13624
|
| 1165 |
+
td = Timedelta("1 day")
|
| 1166 |
+
|
| 1167 |
+
for left, right in [(td, "a"), ("a", td)]:
|
| 1168 |
+
msg = "|".join(
|
| 1169 |
+
[
|
| 1170 |
+
"unsupported operand type",
|
| 1171 |
+
r'can only concatenate str \(not "Timedelta"\) to str',
|
| 1172 |
+
"must be str, not Timedelta",
|
| 1173 |
+
]
|
| 1174 |
+
)
|
| 1175 |
+
with pytest.raises(TypeError, match=msg):
|
| 1176 |
+
left + right
|
| 1177 |
+
|
| 1178 |
+
msg = "not supported between instances of"
|
| 1179 |
+
with pytest.raises(TypeError, match=msg):
|
| 1180 |
+
left > right
|
| 1181 |
+
|
| 1182 |
+
assert not left == right # pylint: disable=unneeded-not
|
| 1183 |
+
assert left != right
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/test_formats.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from pandas import Timedelta
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@pytest.mark.parametrize(
|
| 7 |
+
"td, expected_repr",
|
| 8 |
+
[
|
| 9 |
+
(Timedelta(10, unit="d"), "Timedelta('10 days 00:00:00')"),
|
| 10 |
+
(Timedelta(10, unit="s"), "Timedelta('0 days 00:00:10')"),
|
| 11 |
+
(Timedelta(10, unit="ms"), "Timedelta('0 days 00:00:00.010000')"),
|
| 12 |
+
(Timedelta(-10, unit="ms"), "Timedelta('-1 days +23:59:59.990000')"),
|
| 13 |
+
],
|
| 14 |
+
)
|
| 15 |
+
def test_repr(td, expected_repr):
|
| 16 |
+
assert repr(td) == expected_repr
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@pytest.mark.parametrize(
|
| 20 |
+
"td, expected_iso",
|
| 21 |
+
[
|
| 22 |
+
(
|
| 23 |
+
Timedelta(
|
| 24 |
+
days=6,
|
| 25 |
+
minutes=50,
|
| 26 |
+
seconds=3,
|
| 27 |
+
milliseconds=10,
|
| 28 |
+
microseconds=10,
|
| 29 |
+
nanoseconds=12,
|
| 30 |
+
),
|
| 31 |
+
"P6DT0H50M3.010010012S",
|
| 32 |
+
),
|
| 33 |
+
(Timedelta(days=4, hours=12, minutes=30, seconds=5), "P4DT12H30M5S"),
|
| 34 |
+
(Timedelta(nanoseconds=123), "P0DT0H0M0.000000123S"),
|
| 35 |
+
# trim nano
|
| 36 |
+
(Timedelta(microseconds=10), "P0DT0H0M0.00001S"),
|
| 37 |
+
# trim micro
|
| 38 |
+
(Timedelta(milliseconds=1), "P0DT0H0M0.001S"),
|
| 39 |
+
# don't strip every 0
|
| 40 |
+
(Timedelta(minutes=1), "P0DT0H1M0S"),
|
| 41 |
+
],
|
| 42 |
+
)
|
| 43 |
+
def test_isoformat(td, expected_iso):
|
| 44 |
+
assert td.isoformat() == expected_iso
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class TestReprBase:
|
| 48 |
+
def test_none(self):
|
| 49 |
+
delta_1d = Timedelta(1, unit="D")
|
| 50 |
+
delta_0d = Timedelta(0, unit="D")
|
| 51 |
+
delta_1s = Timedelta(1, unit="s")
|
| 52 |
+
delta_500ms = Timedelta(500, unit="ms")
|
| 53 |
+
|
| 54 |
+
drepr = lambda x: x._repr_base()
|
| 55 |
+
assert drepr(delta_1d) == "1 days"
|
| 56 |
+
assert drepr(-delta_1d) == "-1 days"
|
| 57 |
+
assert drepr(delta_0d) == "0 days"
|
| 58 |
+
assert drepr(delta_1s) == "0 days 00:00:01"
|
| 59 |
+
assert drepr(delta_500ms) == "0 days 00:00:00.500000"
|
| 60 |
+
assert drepr(delta_1d + delta_1s) == "1 days 00:00:01"
|
| 61 |
+
assert drepr(-delta_1d + delta_1s) == "-1 days +00:00:01"
|
| 62 |
+
assert drepr(delta_1d + delta_500ms) == "1 days 00:00:00.500000"
|
| 63 |
+
assert drepr(-delta_1d + delta_500ms) == "-1 days +00:00:00.500000"
|
| 64 |
+
|
| 65 |
+
def test_sub_day(self):
|
| 66 |
+
delta_1d = Timedelta(1, unit="D")
|
| 67 |
+
delta_0d = Timedelta(0, unit="D")
|
| 68 |
+
delta_1s = Timedelta(1, unit="s")
|
| 69 |
+
delta_500ms = Timedelta(500, unit="ms")
|
| 70 |
+
|
| 71 |
+
drepr = lambda x: x._repr_base(format="sub_day")
|
| 72 |
+
assert drepr(delta_1d) == "1 days"
|
| 73 |
+
assert drepr(-delta_1d) == "-1 days"
|
| 74 |
+
assert drepr(delta_0d) == "00:00:00"
|
| 75 |
+
assert drepr(delta_1s) == "00:00:01"
|
| 76 |
+
assert drepr(delta_500ms) == "00:00:00.500000"
|
| 77 |
+
assert drepr(delta_1d + delta_1s) == "1 days 00:00:01"
|
| 78 |
+
assert drepr(-delta_1d + delta_1s) == "-1 days +00:00:01"
|
| 79 |
+
assert drepr(delta_1d + delta_500ms) == "1 days 00:00:00.500000"
|
| 80 |
+
assert drepr(-delta_1d + delta_500ms) == "-1 days +00:00:00.500000"
|
| 81 |
+
|
| 82 |
+
def test_long(self):
|
| 83 |
+
delta_1d = Timedelta(1, unit="D")
|
| 84 |
+
delta_0d = Timedelta(0, unit="D")
|
| 85 |
+
delta_1s = Timedelta(1, unit="s")
|
| 86 |
+
delta_500ms = Timedelta(500, unit="ms")
|
| 87 |
+
|
| 88 |
+
drepr = lambda x: x._repr_base(format="long")
|
| 89 |
+
assert drepr(delta_1d) == "1 days 00:00:00"
|
| 90 |
+
assert drepr(-delta_1d) == "-1 days +00:00:00"
|
| 91 |
+
assert drepr(delta_0d) == "0 days 00:00:00"
|
| 92 |
+
assert drepr(delta_1s) == "0 days 00:00:01"
|
| 93 |
+
assert drepr(delta_500ms) == "0 days 00:00:00.500000"
|
| 94 |
+
assert drepr(delta_1d + delta_1s) == "1 days 00:00:01"
|
| 95 |
+
assert drepr(-delta_1d + delta_1s) == "-1 days +00:00:01"
|
| 96 |
+
assert drepr(delta_1d + delta_500ms) == "1 days 00:00:00.500000"
|
| 97 |
+
assert drepr(-delta_1d + delta_500ms) == "-1 days +00:00:00.500000"
|
| 98 |
+
|
| 99 |
+
def test_all(self):
|
| 100 |
+
delta_1d = Timedelta(1, unit="D")
|
| 101 |
+
delta_0d = Timedelta(0, unit="D")
|
| 102 |
+
delta_1ns = Timedelta(1, unit="ns")
|
| 103 |
+
|
| 104 |
+
drepr = lambda x: x._repr_base(format="all")
|
| 105 |
+
assert drepr(delta_1d) == "1 days 00:00:00.000000000"
|
| 106 |
+
assert drepr(-delta_1d) == "-1 days +00:00:00.000000000"
|
| 107 |
+
assert drepr(delta_0d) == "0 days 00:00:00.000000000"
|
| 108 |
+
assert drepr(delta_1ns) == "0 days 00:00:00.000000001"
|
| 109 |
+
assert drepr(-delta_1d + delta_1ns) == "-1 days +00:00:00.000000001"
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/__pycache__/test_arithmetic.cpython-310.pyc
ADDED
|
Binary file (10.4 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (190 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_as_unit.cpython-310.pyc
ADDED
|
Binary file (2.7 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_replace.cpython-310.pyc
ADDED
|
Binary file (6.62 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_timestamp_method.cpython-310.pyc
ADDED
|
Binary file (1.09 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_to_julian_date.cpython-310.pyc
ADDED
|
Binary file (1.34 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_to_pydatetime.cpython-310.pyc
ADDED
|
Binary file (3.17 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_tz_convert.cpython-310.pyc
ADDED
|
Binary file (1.83 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/__pycache__/test_tz_localize.cpython-310.pyc
ADDED
|
Binary file (9.69 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/scalar/timestamp/methods/test_timestamp_method.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# NB: This is for the Timestamp.timestamp *method* specifically, not
|
| 2 |
+
# the Timestamp class in general.
|
| 3 |
+
|
| 4 |
+
from pytz import utc
|
| 5 |
+
|
| 6 |
+
from pandas._libs.tslibs import Timestamp
|
| 7 |
+
import pandas.util._test_decorators as td
|
| 8 |
+
|
| 9 |
+
import pandas._testing as tm
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestTimestampMethod:
|
| 13 |
+
@td.skip_if_windows
|
| 14 |
+
def test_timestamp(self, fixed_now_ts):
|
| 15 |
+
# GH#17329
|
| 16 |
+
# tz-naive --> treat it as if it were UTC for purposes of timestamp()
|
| 17 |
+
ts = fixed_now_ts
|
| 18 |
+
uts = ts.replace(tzinfo=utc)
|
| 19 |
+
assert ts.timestamp() == uts.timestamp()
|
| 20 |
+
|
| 21 |
+
tsc = Timestamp("2014-10-11 11:00:01.12345678", tz="US/Central")
|
| 22 |
+
utsc = tsc.tz_convert("UTC")
|
| 23 |
+
|
| 24 |
+
# utsc is a different representation of the same time
|
| 25 |
+
assert tsc.timestamp() == utsc.timestamp()
|
| 26 |
+
|
| 27 |
+
# datetime.timestamp() converts in the local timezone
|
| 28 |
+
with tm.set_timezone("UTC"):
|
| 29 |
+
# should agree with datetime.timestamp method
|
| 30 |
+
dt = ts.to_pydatetime()
|
| 31 |
+
assert dt.timestamp() == ts.timestamp()
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (185 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_freq_code.cpython-310.pyc
ADDED
|
Binary file (1.9 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_frequencies.cpython-310.pyc
ADDED
|
Binary file (803 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_inference.cpython-310.pyc
ADDED
|
Binary file (13.8 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_freq_code.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
from pandas._libs.tslibs import (
|
| 5 |
+
Period,
|
| 6 |
+
to_offset,
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@pytest.mark.parametrize(
|
| 11 |
+
"freqstr,exp_freqstr",
|
| 12 |
+
[("D", "D"), ("W", "D"), ("ME", "D"), ("s", "s"), ("min", "s"), ("h", "s")],
|
| 13 |
+
)
|
| 14 |
+
def test_get_to_timestamp_base(freqstr, exp_freqstr):
|
| 15 |
+
off = to_offset(freqstr)
|
| 16 |
+
per = Period._from_ordinal(1, off)
|
| 17 |
+
exp_code = to_offset(exp_freqstr)._period_dtype_code
|
| 18 |
+
|
| 19 |
+
result_code = per._dtype._get_to_timestamp_base()
|
| 20 |
+
assert result_code == exp_code
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@pytest.mark.parametrize(
|
| 24 |
+
"args,expected",
|
| 25 |
+
[
|
| 26 |
+
((1.5, "min"), (90, "s")),
|
| 27 |
+
((62.4, "min"), (3744, "s")),
|
| 28 |
+
((1.04, "h"), (3744, "s")),
|
| 29 |
+
((1, "D"), (1, "D")),
|
| 30 |
+
((0.342931, "h"), (1234551600, "us")),
|
| 31 |
+
((1.2345, "D"), (106660800, "ms")),
|
| 32 |
+
],
|
| 33 |
+
)
|
| 34 |
+
def test_resolution_bumping(args, expected):
|
| 35 |
+
# see gh-14378
|
| 36 |
+
off = to_offset(str(args[0]) + args[1])
|
| 37 |
+
assert off.n == expected[0]
|
| 38 |
+
assert off._prefix == expected[1]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@pytest.mark.parametrize(
|
| 42 |
+
"args",
|
| 43 |
+
[
|
| 44 |
+
(0.5, "ns"),
|
| 45 |
+
# Too much precision in the input can prevent.
|
| 46 |
+
(0.3429324798798269273987982, "h"),
|
| 47 |
+
],
|
| 48 |
+
)
|
| 49 |
+
def test_cat(args):
|
| 50 |
+
msg = "Invalid frequency"
|
| 51 |
+
|
| 52 |
+
with pytest.raises(ValueError, match=msg):
|
| 53 |
+
to_offset(str(args[0]) + args[1])
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@pytest.mark.parametrize(
|
| 57 |
+
"freqstr,expected",
|
| 58 |
+
[
|
| 59 |
+
("1h", "2021-01-01T09:00:00"),
|
| 60 |
+
("1D", "2021-01-02T08:00:00"),
|
| 61 |
+
("1W", "2021-01-03T08:00:00"),
|
| 62 |
+
("1ME", "2021-01-31T08:00:00"),
|
| 63 |
+
("1YE", "2021-12-31T08:00:00"),
|
| 64 |
+
],
|
| 65 |
+
)
|
| 66 |
+
def test_compatibility(freqstr, expected):
|
| 67 |
+
ts_np = np.datetime64("2021-01-01T08:00:00.00")
|
| 68 |
+
do = to_offset(freqstr)
|
| 69 |
+
assert ts_np + do == np.datetime64(expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_frequencies.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from pandas._libs.tslibs import offsets
|
| 4 |
+
|
| 5 |
+
from pandas.tseries.frequencies import (
|
| 6 |
+
is_subperiod,
|
| 7 |
+
is_superperiod,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@pytest.mark.parametrize(
|
| 12 |
+
"p1,p2,expected",
|
| 13 |
+
[
|
| 14 |
+
# Input validation.
|
| 15 |
+
(offsets.MonthEnd(), None, False),
|
| 16 |
+
(offsets.YearEnd(), None, False),
|
| 17 |
+
(None, offsets.YearEnd(), False),
|
| 18 |
+
(None, offsets.MonthEnd(), False),
|
| 19 |
+
(None, None, False),
|
| 20 |
+
(offsets.YearEnd(), offsets.MonthEnd(), True),
|
| 21 |
+
(offsets.Hour(), offsets.Minute(), True),
|
| 22 |
+
(offsets.Second(), offsets.Milli(), True),
|
| 23 |
+
(offsets.Milli(), offsets.Micro(), True),
|
| 24 |
+
(offsets.Micro(), offsets.Nano(), True),
|
| 25 |
+
],
|
| 26 |
+
)
|
| 27 |
+
def test_super_sub_symmetry(p1, p2, expected):
|
| 28 |
+
assert is_superperiod(p1, p2) is expected
|
| 29 |
+
assert is_subperiod(p2, p1) is expected
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_inference.py
ADDED
|
@@ -0,0 +1,558 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import (
|
| 2 |
+
datetime,
|
| 3 |
+
timedelta,
|
| 4 |
+
)
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from pandas._libs.tslibs.ccalendar import (
|
| 10 |
+
DAYS,
|
| 11 |
+
MONTHS,
|
| 12 |
+
)
|
| 13 |
+
from pandas._libs.tslibs.offsets import _get_offset
|
| 14 |
+
from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
|
| 15 |
+
from pandas.compat import is_platform_windows
|
| 16 |
+
|
| 17 |
+
from pandas import (
|
| 18 |
+
DatetimeIndex,
|
| 19 |
+
Index,
|
| 20 |
+
RangeIndex,
|
| 21 |
+
Series,
|
| 22 |
+
Timestamp,
|
| 23 |
+
date_range,
|
| 24 |
+
period_range,
|
| 25 |
+
)
|
| 26 |
+
import pandas._testing as tm
|
| 27 |
+
from pandas.core.arrays import (
|
| 28 |
+
DatetimeArray,
|
| 29 |
+
TimedeltaArray,
|
| 30 |
+
)
|
| 31 |
+
from pandas.core.tools.datetimes import to_datetime
|
| 32 |
+
|
| 33 |
+
from pandas.tseries import (
|
| 34 |
+
frequencies,
|
| 35 |
+
offsets,
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@pytest.fixture(
|
| 40 |
+
params=[
|
| 41 |
+
(timedelta(1), "D"),
|
| 42 |
+
(timedelta(hours=1), "h"),
|
| 43 |
+
(timedelta(minutes=1), "min"),
|
| 44 |
+
(timedelta(seconds=1), "s"),
|
| 45 |
+
(np.timedelta64(1, "ns"), "ns"),
|
| 46 |
+
(timedelta(microseconds=1), "us"),
|
| 47 |
+
(timedelta(microseconds=1000), "ms"),
|
| 48 |
+
]
|
| 49 |
+
)
|
| 50 |
+
def base_delta_code_pair(request):
|
| 51 |
+
return request.param
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
freqs = (
|
| 55 |
+
[f"QE-{month}" for month in MONTHS]
|
| 56 |
+
+ [f"{annual}-{month}" for annual in ["YE", "BYE"] for month in MONTHS]
|
| 57 |
+
+ ["ME", "BME", "BMS"]
|
| 58 |
+
+ [f"WOM-{count}{day}" for count in range(1, 5) for day in DAYS]
|
| 59 |
+
+ [f"W-{day}" for day in DAYS]
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
@pytest.mark.parametrize("freq", freqs)
|
| 64 |
+
@pytest.mark.parametrize("periods", [5, 7])
|
| 65 |
+
def test_infer_freq_range(periods, freq):
|
| 66 |
+
freq = freq.upper()
|
| 67 |
+
|
| 68 |
+
gen = date_range("1/1/2000", periods=periods, freq=freq)
|
| 69 |
+
index = DatetimeIndex(gen.values)
|
| 70 |
+
|
| 71 |
+
if not freq.startswith("QE-"):
|
| 72 |
+
assert frequencies.infer_freq(index) == gen.freqstr
|
| 73 |
+
else:
|
| 74 |
+
inf_freq = frequencies.infer_freq(index)
|
| 75 |
+
is_dec_range = inf_freq == "QE-DEC" and gen.freqstr in (
|
| 76 |
+
"QE",
|
| 77 |
+
"QE-DEC",
|
| 78 |
+
"QE-SEP",
|
| 79 |
+
"QE-JUN",
|
| 80 |
+
"QE-MAR",
|
| 81 |
+
)
|
| 82 |
+
is_nov_range = inf_freq == "QE-NOV" and gen.freqstr in (
|
| 83 |
+
"QE-NOV",
|
| 84 |
+
"QE-AUG",
|
| 85 |
+
"QE-MAY",
|
| 86 |
+
"QE-FEB",
|
| 87 |
+
)
|
| 88 |
+
is_oct_range = inf_freq == "QE-OCT" and gen.freqstr in (
|
| 89 |
+
"QE-OCT",
|
| 90 |
+
"QE-JUL",
|
| 91 |
+
"QE-APR",
|
| 92 |
+
"QE-JAN",
|
| 93 |
+
)
|
| 94 |
+
assert is_dec_range or is_nov_range or is_oct_range
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def test_raise_if_period_index():
|
| 98 |
+
index = period_range(start="1/1/1990", periods=20, freq="M")
|
| 99 |
+
msg = "Check the `freq` attribute instead of using infer_freq"
|
| 100 |
+
|
| 101 |
+
with pytest.raises(TypeError, match=msg):
|
| 102 |
+
frequencies.infer_freq(index)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def test_raise_if_too_few():
|
| 106 |
+
index = DatetimeIndex(["12/31/1998", "1/3/1999"])
|
| 107 |
+
msg = "Need at least 3 dates to infer frequency"
|
| 108 |
+
|
| 109 |
+
with pytest.raises(ValueError, match=msg):
|
| 110 |
+
frequencies.infer_freq(index)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def test_business_daily():
|
| 114 |
+
index = DatetimeIndex(["01/01/1999", "1/4/1999", "1/5/1999"])
|
| 115 |
+
assert frequencies.infer_freq(index) == "B"
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def test_business_daily_look_alike():
|
| 119 |
+
# see gh-16624
|
| 120 |
+
#
|
| 121 |
+
# Do not infer "B when "weekend" (2-day gap) in wrong place.
|
| 122 |
+
index = DatetimeIndex(["12/31/1998", "1/3/1999", "1/4/1999"])
|
| 123 |
+
assert frequencies.infer_freq(index) is None
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def test_day_corner():
|
| 127 |
+
index = DatetimeIndex(["1/1/2000", "1/2/2000", "1/3/2000"])
|
| 128 |
+
assert frequencies.infer_freq(index) == "D"
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def test_non_datetime_index():
|
| 132 |
+
dates = to_datetime(["1/1/2000", "1/2/2000", "1/3/2000"])
|
| 133 |
+
assert frequencies.infer_freq(dates) == "D"
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def test_fifth_week_of_month_infer():
|
| 137 |
+
# see gh-9425
|
| 138 |
+
#
|
| 139 |
+
# Only attempt to infer up to WOM-4.
|
| 140 |
+
index = DatetimeIndex(["2014-03-31", "2014-06-30", "2015-03-30"])
|
| 141 |
+
assert frequencies.infer_freq(index) is None
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def test_week_of_month_fake():
|
| 145 |
+
# All of these dates are on same day
|
| 146 |
+
# of week and are 4 or 5 weeks apart.
|
| 147 |
+
index = DatetimeIndex(["2013-08-27", "2013-10-01", "2013-10-29", "2013-11-26"])
|
| 148 |
+
assert frequencies.infer_freq(index) != "WOM-4TUE"
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def test_fifth_week_of_month():
|
| 152 |
+
# see gh-9425
|
| 153 |
+
#
|
| 154 |
+
# Only supports freq up to WOM-4.
|
| 155 |
+
msg = (
|
| 156 |
+
"Of the four parameters: start, end, periods, "
|
| 157 |
+
"and freq, exactly three must be specified"
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
with pytest.raises(ValueError, match=msg):
|
| 161 |
+
date_range("2014-01-01", freq="WOM-5MON")
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def test_monthly_ambiguous():
|
| 165 |
+
rng = DatetimeIndex(["1/31/2000", "2/29/2000", "3/31/2000"])
|
| 166 |
+
assert rng.inferred_freq == "ME"
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
def test_annual_ambiguous():
|
| 170 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
| 171 |
+
assert rng.inferred_freq == "YE-JAN"
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
@pytest.mark.parametrize("count", range(1, 5))
|
| 175 |
+
def test_infer_freq_delta(base_delta_code_pair, count):
|
| 176 |
+
b = Timestamp(datetime.now())
|
| 177 |
+
base_delta, code = base_delta_code_pair
|
| 178 |
+
|
| 179 |
+
inc = base_delta * count
|
| 180 |
+
index = DatetimeIndex([b + inc * j for j in range(3)])
|
| 181 |
+
|
| 182 |
+
exp_freq = f"{count:d}{code}" if count > 1 else code
|
| 183 |
+
assert frequencies.infer_freq(index) == exp_freq
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@pytest.mark.parametrize(
|
| 187 |
+
"constructor",
|
| 188 |
+
[
|
| 189 |
+
lambda now, delta: DatetimeIndex(
|
| 190 |
+
[now + delta * 7] + [now + delta * j for j in range(3)]
|
| 191 |
+
),
|
| 192 |
+
lambda now, delta: DatetimeIndex(
|
| 193 |
+
[now + delta * j for j in range(3)] + [now + delta * 7]
|
| 194 |
+
),
|
| 195 |
+
],
|
| 196 |
+
)
|
| 197 |
+
def test_infer_freq_custom(base_delta_code_pair, constructor):
|
| 198 |
+
b = Timestamp(datetime.now())
|
| 199 |
+
base_delta, _ = base_delta_code_pair
|
| 200 |
+
|
| 201 |
+
index = constructor(b, base_delta)
|
| 202 |
+
assert frequencies.infer_freq(index) is None
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
@pytest.mark.parametrize(
|
| 206 |
+
"freq,expected", [("Q", "QE-DEC"), ("Q-NOV", "QE-NOV"), ("Q-OCT", "QE-OCT")]
|
| 207 |
+
)
|
| 208 |
+
def test_infer_freq_index(freq, expected):
|
| 209 |
+
rng = period_range("1959Q2", "2009Q3", freq=freq)
|
| 210 |
+
with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
|
| 211 |
+
rng = Index(rng.to_timestamp("D", how="e").astype(object))
|
| 212 |
+
|
| 213 |
+
assert rng.inferred_freq == expected
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
@pytest.mark.parametrize(
|
| 217 |
+
"expected,dates",
|
| 218 |
+
list(
|
| 219 |
+
{
|
| 220 |
+
"YS-JAN": ["2009-01-01", "2010-01-01", "2011-01-01", "2012-01-01"],
|
| 221 |
+
"QE-OCT": ["2009-01-31", "2009-04-30", "2009-07-31", "2009-10-31"],
|
| 222 |
+
"ME": ["2010-11-30", "2010-12-31", "2011-01-31", "2011-02-28"],
|
| 223 |
+
"W-SAT": ["2010-12-25", "2011-01-01", "2011-01-08", "2011-01-15"],
|
| 224 |
+
"D": ["2011-01-01", "2011-01-02", "2011-01-03", "2011-01-04"],
|
| 225 |
+
"h": [
|
| 226 |
+
"2011-12-31 22:00",
|
| 227 |
+
"2011-12-31 23:00",
|
| 228 |
+
"2012-01-01 00:00",
|
| 229 |
+
"2012-01-01 01:00",
|
| 230 |
+
],
|
| 231 |
+
}.items()
|
| 232 |
+
),
|
| 233 |
+
)
|
| 234 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
| 235 |
+
def test_infer_freq_tz(tz_naive_fixture, expected, dates, unit):
|
| 236 |
+
# see gh-7310, GH#55609
|
| 237 |
+
tz = tz_naive_fixture
|
| 238 |
+
idx = DatetimeIndex(dates, tz=tz).as_unit(unit)
|
| 239 |
+
assert idx.inferred_freq == expected
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def test_infer_freq_tz_series(tz_naive_fixture):
|
| 243 |
+
# infer_freq should work with both tz-naive and tz-aware series. See gh-52456
|
| 244 |
+
tz = tz_naive_fixture
|
| 245 |
+
idx = date_range("2021-01-01", "2021-01-04", tz=tz)
|
| 246 |
+
series = idx.to_series().reset_index(drop=True)
|
| 247 |
+
inferred_freq = frequencies.infer_freq(series)
|
| 248 |
+
assert inferred_freq == "D"
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@pytest.mark.parametrize(
|
| 252 |
+
"date_pair",
|
| 253 |
+
[
|
| 254 |
+
["2013-11-02", "2013-11-5"], # Fall DST
|
| 255 |
+
["2014-03-08", "2014-03-11"], # Spring DST
|
| 256 |
+
["2014-01-01", "2014-01-03"], # Regular Time
|
| 257 |
+
],
|
| 258 |
+
)
|
| 259 |
+
@pytest.mark.parametrize(
|
| 260 |
+
"freq",
|
| 261 |
+
["h", "3h", "10min", "3601s", "3600001ms", "3600000001us", "3600000000001ns"],
|
| 262 |
+
)
|
| 263 |
+
def test_infer_freq_tz_transition(tz_naive_fixture, date_pair, freq):
|
| 264 |
+
# see gh-8772
|
| 265 |
+
tz = tz_naive_fixture
|
| 266 |
+
idx = date_range(date_pair[0], date_pair[1], freq=freq, tz=tz)
|
| 267 |
+
assert idx.inferred_freq == freq
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def test_infer_freq_tz_transition_custom():
|
| 271 |
+
index = date_range("2013-11-03", periods=5, freq="3h").tz_localize(
|
| 272 |
+
"America/Chicago"
|
| 273 |
+
)
|
| 274 |
+
assert index.inferred_freq is None
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
@pytest.mark.parametrize(
|
| 278 |
+
"data,expected",
|
| 279 |
+
[
|
| 280 |
+
# Hourly freq in a day must result in "h"
|
| 281 |
+
(
|
| 282 |
+
[
|
| 283 |
+
"2014-07-01 09:00",
|
| 284 |
+
"2014-07-01 10:00",
|
| 285 |
+
"2014-07-01 11:00",
|
| 286 |
+
"2014-07-01 12:00",
|
| 287 |
+
"2014-07-01 13:00",
|
| 288 |
+
"2014-07-01 14:00",
|
| 289 |
+
],
|
| 290 |
+
"h",
|
| 291 |
+
),
|
| 292 |
+
(
|
| 293 |
+
[
|
| 294 |
+
"2014-07-01 09:00",
|
| 295 |
+
"2014-07-01 10:00",
|
| 296 |
+
"2014-07-01 11:00",
|
| 297 |
+
"2014-07-01 12:00",
|
| 298 |
+
"2014-07-01 13:00",
|
| 299 |
+
"2014-07-01 14:00",
|
| 300 |
+
"2014-07-01 15:00",
|
| 301 |
+
"2014-07-01 16:00",
|
| 302 |
+
"2014-07-02 09:00",
|
| 303 |
+
"2014-07-02 10:00",
|
| 304 |
+
"2014-07-02 11:00",
|
| 305 |
+
],
|
| 306 |
+
"bh",
|
| 307 |
+
),
|
| 308 |
+
(
|
| 309 |
+
[
|
| 310 |
+
"2014-07-04 09:00",
|
| 311 |
+
"2014-07-04 10:00",
|
| 312 |
+
"2014-07-04 11:00",
|
| 313 |
+
"2014-07-04 12:00",
|
| 314 |
+
"2014-07-04 13:00",
|
| 315 |
+
"2014-07-04 14:00",
|
| 316 |
+
"2014-07-04 15:00",
|
| 317 |
+
"2014-07-04 16:00",
|
| 318 |
+
"2014-07-07 09:00",
|
| 319 |
+
"2014-07-07 10:00",
|
| 320 |
+
"2014-07-07 11:00",
|
| 321 |
+
],
|
| 322 |
+
"bh",
|
| 323 |
+
),
|
| 324 |
+
(
|
| 325 |
+
[
|
| 326 |
+
"2014-07-04 09:00",
|
| 327 |
+
"2014-07-04 10:00",
|
| 328 |
+
"2014-07-04 11:00",
|
| 329 |
+
"2014-07-04 12:00",
|
| 330 |
+
"2014-07-04 13:00",
|
| 331 |
+
"2014-07-04 14:00",
|
| 332 |
+
"2014-07-04 15:00",
|
| 333 |
+
"2014-07-04 16:00",
|
| 334 |
+
"2014-07-07 09:00",
|
| 335 |
+
"2014-07-07 10:00",
|
| 336 |
+
"2014-07-07 11:00",
|
| 337 |
+
"2014-07-07 12:00",
|
| 338 |
+
"2014-07-07 13:00",
|
| 339 |
+
"2014-07-07 14:00",
|
| 340 |
+
"2014-07-07 15:00",
|
| 341 |
+
"2014-07-07 16:00",
|
| 342 |
+
"2014-07-08 09:00",
|
| 343 |
+
"2014-07-08 10:00",
|
| 344 |
+
"2014-07-08 11:00",
|
| 345 |
+
"2014-07-08 12:00",
|
| 346 |
+
"2014-07-08 13:00",
|
| 347 |
+
"2014-07-08 14:00",
|
| 348 |
+
"2014-07-08 15:00",
|
| 349 |
+
"2014-07-08 16:00",
|
| 350 |
+
],
|
| 351 |
+
"bh",
|
| 352 |
+
),
|
| 353 |
+
],
|
| 354 |
+
)
|
| 355 |
+
def test_infer_freq_business_hour(data, expected):
|
| 356 |
+
# see gh-7905
|
| 357 |
+
idx = DatetimeIndex(data)
|
| 358 |
+
assert idx.inferred_freq == expected
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def test_not_monotonic():
|
| 362 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
| 363 |
+
rng = rng[::-1]
|
| 364 |
+
|
| 365 |
+
assert rng.inferred_freq == "-1YE-JAN"
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def test_non_datetime_index2():
|
| 369 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
| 370 |
+
vals = rng.to_pydatetime()
|
| 371 |
+
|
| 372 |
+
result = frequencies.infer_freq(vals)
|
| 373 |
+
assert result == rng.inferred_freq
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
@pytest.mark.parametrize(
|
| 377 |
+
"idx",
|
| 378 |
+
[
|
| 379 |
+
Index(np.arange(5), dtype=np.int64),
|
| 380 |
+
Index(np.arange(5), dtype=np.float64),
|
| 381 |
+
period_range("2020-01-01", periods=5),
|
| 382 |
+
RangeIndex(5),
|
| 383 |
+
],
|
| 384 |
+
)
|
| 385 |
+
def test_invalid_index_types(idx):
|
| 386 |
+
# see gh-48439
|
| 387 |
+
msg = "|".join(
|
| 388 |
+
[
|
| 389 |
+
"cannot infer freq from a non-convertible",
|
| 390 |
+
"Check the `freq` attribute instead of using infer_freq",
|
| 391 |
+
]
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
with pytest.raises(TypeError, match=msg):
|
| 395 |
+
frequencies.infer_freq(idx)
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
@pytest.mark.skipif(is_platform_windows(), reason="see gh-10822: Windows issue")
|
| 399 |
+
def test_invalid_index_types_unicode():
|
| 400 |
+
# see gh-10822
|
| 401 |
+
#
|
| 402 |
+
# Odd error message on conversions to datetime for unicode.
|
| 403 |
+
msg = "Unknown datetime string format"
|
| 404 |
+
|
| 405 |
+
with pytest.raises(ValueError, match=msg):
|
| 406 |
+
frequencies.infer_freq(Index(["ZqgszYBfuL"]))
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
def test_string_datetime_like_compat():
|
| 410 |
+
# see gh-6463
|
| 411 |
+
data = ["2004-01", "2004-02", "2004-03", "2004-04"]
|
| 412 |
+
|
| 413 |
+
expected = frequencies.infer_freq(data)
|
| 414 |
+
result = frequencies.infer_freq(Index(data))
|
| 415 |
+
|
| 416 |
+
assert result == expected
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
def test_series():
|
| 420 |
+
# see gh-6407
|
| 421 |
+
s = Series(date_range("20130101", "20130110"))
|
| 422 |
+
inferred = frequencies.infer_freq(s)
|
| 423 |
+
assert inferred == "D"
|
| 424 |
+
|
| 425 |
+
|
| 426 |
+
@pytest.mark.parametrize("end", [10, 10.0])
|
| 427 |
+
def test_series_invalid_type(end):
|
| 428 |
+
# see gh-6407
|
| 429 |
+
msg = "cannot infer freq from a non-convertible dtype on a Series"
|
| 430 |
+
s = Series(np.arange(end))
|
| 431 |
+
|
| 432 |
+
with pytest.raises(TypeError, match=msg):
|
| 433 |
+
frequencies.infer_freq(s)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
def test_series_inconvertible_string(using_infer_string):
|
| 437 |
+
# see gh-6407
|
| 438 |
+
if using_infer_string:
|
| 439 |
+
msg = "cannot infer freq from"
|
| 440 |
+
|
| 441 |
+
with pytest.raises(TypeError, match=msg):
|
| 442 |
+
frequencies.infer_freq(Series(["foo", "bar"]))
|
| 443 |
+
else:
|
| 444 |
+
msg = "Unknown datetime string format"
|
| 445 |
+
|
| 446 |
+
with pytest.raises(ValueError, match=msg):
|
| 447 |
+
frequencies.infer_freq(Series(["foo", "bar"]))
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
@pytest.mark.parametrize("freq", [None, "ms"])
|
| 451 |
+
def test_series_period_index(freq):
|
| 452 |
+
# see gh-6407
|
| 453 |
+
#
|
| 454 |
+
# Cannot infer on PeriodIndex
|
| 455 |
+
msg = "cannot infer freq from a non-convertible dtype on a Series"
|
| 456 |
+
s = Series(period_range("2013", periods=10, freq=freq))
|
| 457 |
+
|
| 458 |
+
with pytest.raises(TypeError, match=msg):
|
| 459 |
+
frequencies.infer_freq(s)
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
@pytest.mark.parametrize("freq", ["ME", "ms", "s"])
|
| 463 |
+
def test_series_datetime_index(freq):
|
| 464 |
+
s = Series(date_range("20130101", periods=10, freq=freq))
|
| 465 |
+
inferred = frequencies.infer_freq(s)
|
| 466 |
+
assert inferred == freq
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
@pytest.mark.parametrize(
|
| 470 |
+
"offset_func",
|
| 471 |
+
[
|
| 472 |
+
_get_offset,
|
| 473 |
+
lambda freq: date_range("2011-01-01", periods=5, freq=freq),
|
| 474 |
+
],
|
| 475 |
+
)
|
| 476 |
+
@pytest.mark.parametrize(
|
| 477 |
+
"freq",
|
| 478 |
+
[
|
| 479 |
+
"WEEKDAY",
|
| 480 |
+
"EOM",
|
| 481 |
+
"W@MON",
|
| 482 |
+
"W@TUE",
|
| 483 |
+
"W@WED",
|
| 484 |
+
"W@THU",
|
| 485 |
+
"W@FRI",
|
| 486 |
+
"W@SAT",
|
| 487 |
+
"W@SUN",
|
| 488 |
+
"QE@JAN",
|
| 489 |
+
"QE@FEB",
|
| 490 |
+
"QE@MAR",
|
| 491 |
+
"YE@JAN",
|
| 492 |
+
"YE@FEB",
|
| 493 |
+
"YE@MAR",
|
| 494 |
+
"YE@APR",
|
| 495 |
+
"YE@MAY",
|
| 496 |
+
"YE@JUN",
|
| 497 |
+
"YE@JUL",
|
| 498 |
+
"YE@AUG",
|
| 499 |
+
"YE@SEP",
|
| 500 |
+
"YE@OCT",
|
| 501 |
+
"YE@NOV",
|
| 502 |
+
"YE@DEC",
|
| 503 |
+
"YE@JAN",
|
| 504 |
+
"WOM@1MON",
|
| 505 |
+
"WOM@2MON",
|
| 506 |
+
"WOM@3MON",
|
| 507 |
+
"WOM@4MON",
|
| 508 |
+
"WOM@1TUE",
|
| 509 |
+
"WOM@2TUE",
|
| 510 |
+
"WOM@3TUE",
|
| 511 |
+
"WOM@4TUE",
|
| 512 |
+
"WOM@1WED",
|
| 513 |
+
"WOM@2WED",
|
| 514 |
+
"WOM@3WED",
|
| 515 |
+
"WOM@4WED",
|
| 516 |
+
"WOM@1THU",
|
| 517 |
+
"WOM@2THU",
|
| 518 |
+
"WOM@3THU",
|
| 519 |
+
"WOM@4THU",
|
| 520 |
+
"WOM@1FRI",
|
| 521 |
+
"WOM@2FRI",
|
| 522 |
+
"WOM@3FRI",
|
| 523 |
+
"WOM@4FRI",
|
| 524 |
+
],
|
| 525 |
+
)
|
| 526 |
+
def test_legacy_offset_warnings(offset_func, freq):
|
| 527 |
+
with pytest.raises(ValueError, match=INVALID_FREQ_ERR_MSG):
|
| 528 |
+
offset_func(freq)
|
| 529 |
+
|
| 530 |
+
|
| 531 |
+
def test_ms_vs_capital_ms():
|
| 532 |
+
left = _get_offset("ms")
|
| 533 |
+
right = _get_offset("MS")
|
| 534 |
+
|
| 535 |
+
assert left == offsets.Milli()
|
| 536 |
+
assert right == offsets.MonthBegin()
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def test_infer_freq_non_nano():
|
| 540 |
+
arr = np.arange(10).astype(np.int64).view("M8[s]")
|
| 541 |
+
dta = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
| 542 |
+
res = frequencies.infer_freq(dta)
|
| 543 |
+
assert res == "s"
|
| 544 |
+
|
| 545 |
+
arr2 = arr.view("m8[ms]")
|
| 546 |
+
tda = TimedeltaArray._simple_new(arr2, dtype=arr2.dtype)
|
| 547 |
+
res2 = frequencies.infer_freq(tda)
|
| 548 |
+
assert res2 == "ms"
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
def test_infer_freq_non_nano_tzaware(tz_aware_fixture):
|
| 552 |
+
tz = tz_aware_fixture
|
| 553 |
+
|
| 554 |
+
dti = date_range("2016-01-01", periods=365, freq="B", tz=tz)
|
| 555 |
+
dta = dti._data.as_unit("s")
|
| 556 |
+
|
| 557 |
+
res = frequencies.infer_freq(dta)
|
| 558 |
+
assert res == "B"
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (181 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_calendar.cpython-310.pyc
ADDED
|
Binary file (3.75 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_federal.cpython-310.pyc
ADDED
|
Binary file (1.96 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_holiday.cpython-310.pyc
ADDED
|
Binary file (7.78 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_observance.cpython-310.pyc
ADDED
|
Binary file (2.47 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_calendar.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
from pandas import (
|
| 6 |
+
DatetimeIndex,
|
| 7 |
+
offsets,
|
| 8 |
+
to_datetime,
|
| 9 |
+
)
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
|
| 12 |
+
from pandas.tseries.holiday import (
|
| 13 |
+
AbstractHolidayCalendar,
|
| 14 |
+
Holiday,
|
| 15 |
+
Timestamp,
|
| 16 |
+
USFederalHolidayCalendar,
|
| 17 |
+
USLaborDay,
|
| 18 |
+
USThanksgivingDay,
|
| 19 |
+
get_calendar,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@pytest.mark.parametrize(
|
| 24 |
+
"transform", [lambda x: x, lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
|
| 25 |
+
)
|
| 26 |
+
def test_calendar(transform):
|
| 27 |
+
start_date = datetime(2012, 1, 1)
|
| 28 |
+
end_date = datetime(2012, 12, 31)
|
| 29 |
+
|
| 30 |
+
calendar = USFederalHolidayCalendar()
|
| 31 |
+
holidays = calendar.holidays(transform(start_date), transform(end_date))
|
| 32 |
+
|
| 33 |
+
expected = [
|
| 34 |
+
datetime(2012, 1, 2),
|
| 35 |
+
datetime(2012, 1, 16),
|
| 36 |
+
datetime(2012, 2, 20),
|
| 37 |
+
datetime(2012, 5, 28),
|
| 38 |
+
datetime(2012, 7, 4),
|
| 39 |
+
datetime(2012, 9, 3),
|
| 40 |
+
datetime(2012, 10, 8),
|
| 41 |
+
datetime(2012, 11, 12),
|
| 42 |
+
datetime(2012, 11, 22),
|
| 43 |
+
datetime(2012, 12, 25),
|
| 44 |
+
]
|
| 45 |
+
|
| 46 |
+
assert list(holidays.to_pydatetime()) == expected
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def test_calendar_caching():
|
| 50 |
+
# see gh-9552.
|
| 51 |
+
|
| 52 |
+
class TestCalendar(AbstractHolidayCalendar):
|
| 53 |
+
def __init__(self, name=None, rules=None) -> None:
|
| 54 |
+
super().__init__(name=name, rules=rules)
|
| 55 |
+
|
| 56 |
+
jan1 = TestCalendar(rules=[Holiday("jan1", year=2015, month=1, day=1)])
|
| 57 |
+
jan2 = TestCalendar(rules=[Holiday("jan2", year=2015, month=1, day=2)])
|
| 58 |
+
|
| 59 |
+
# Getting holidays for Jan 1 should not alter results for Jan 2.
|
| 60 |
+
expected = DatetimeIndex(["01-Jan-2015"]).as_unit("ns")
|
| 61 |
+
tm.assert_index_equal(jan1.holidays(), expected)
|
| 62 |
+
|
| 63 |
+
expected2 = DatetimeIndex(["02-Jan-2015"]).as_unit("ns")
|
| 64 |
+
tm.assert_index_equal(jan2.holidays(), expected2)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def test_calendar_observance_dates():
|
| 68 |
+
# see gh-11477
|
| 69 |
+
us_fed_cal = get_calendar("USFederalHolidayCalendar")
|
| 70 |
+
holidays0 = us_fed_cal.holidays(
|
| 71 |
+
datetime(2015, 7, 3), datetime(2015, 7, 3)
|
| 72 |
+
) # <-- same start and end dates
|
| 73 |
+
holidays1 = us_fed_cal.holidays(
|
| 74 |
+
datetime(2015, 7, 3), datetime(2015, 7, 6)
|
| 75 |
+
) # <-- different start and end dates
|
| 76 |
+
holidays2 = us_fed_cal.holidays(
|
| 77 |
+
datetime(2015, 7, 3), datetime(2015, 7, 3)
|
| 78 |
+
) # <-- same start and end dates
|
| 79 |
+
|
| 80 |
+
# These should all produce the same result.
|
| 81 |
+
#
|
| 82 |
+
# In addition, calling with different start and end
|
| 83 |
+
# dates should not alter the output if we call the
|
| 84 |
+
# function again with the same start and end date.
|
| 85 |
+
tm.assert_index_equal(holidays0, holidays1)
|
| 86 |
+
tm.assert_index_equal(holidays0, holidays2)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def test_rule_from_name():
|
| 90 |
+
us_fed_cal = get_calendar("USFederalHolidayCalendar")
|
| 91 |
+
assert us_fed_cal.rule_from_name("Thanksgiving Day") == USThanksgivingDay
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def test_calendar_2031():
|
| 95 |
+
# See gh-27790
|
| 96 |
+
#
|
| 97 |
+
# Labor Day 2031 is on September 1. Saturday before is August 30.
|
| 98 |
+
# Next working day after August 30 ought to be Tuesday, September 2.
|
| 99 |
+
|
| 100 |
+
class testCalendar(AbstractHolidayCalendar):
|
| 101 |
+
rules = [USLaborDay]
|
| 102 |
+
|
| 103 |
+
cal = testCalendar()
|
| 104 |
+
workDay = offsets.CustomBusinessDay(calendar=cal)
|
| 105 |
+
Sat_before_Labor_Day_2031 = to_datetime("2031-08-30")
|
| 106 |
+
next_working_day = Sat_before_Labor_Day_2031 + 0 * workDay
|
| 107 |
+
assert next_working_day == to_datetime("2031-09-02")
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def test_no_holidays_calendar():
|
| 111 |
+
# Test for issue #31415
|
| 112 |
+
|
| 113 |
+
class NoHolidaysCalendar(AbstractHolidayCalendar):
|
| 114 |
+
pass
|
| 115 |
+
|
| 116 |
+
cal = NoHolidaysCalendar()
|
| 117 |
+
holidays = cal.holidays(Timestamp("01-Jan-2020"), Timestamp("01-Jan-2021"))
|
| 118 |
+
empty_index = DatetimeIndex([]) # Type is DatetimeIndex since return_name=False
|
| 119 |
+
tm.assert_index_equal(holidays, empty_index)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_federal.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
|
| 3 |
+
from pandas import DatetimeIndex
|
| 4 |
+
import pandas._testing as tm
|
| 5 |
+
|
| 6 |
+
from pandas.tseries.holiday import (
|
| 7 |
+
AbstractHolidayCalendar,
|
| 8 |
+
USFederalHolidayCalendar,
|
| 9 |
+
USMartinLutherKingJr,
|
| 10 |
+
USMemorialDay,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def test_no_mlk_before_1986():
|
| 15 |
+
# see gh-10278
|
| 16 |
+
class MLKCalendar(AbstractHolidayCalendar):
|
| 17 |
+
rules = [USMartinLutherKingJr]
|
| 18 |
+
|
| 19 |
+
holidays = MLKCalendar().holidays(start="1984", end="1988").to_pydatetime().tolist()
|
| 20 |
+
|
| 21 |
+
# Testing to make sure holiday is not incorrectly observed before 1986.
|
| 22 |
+
assert holidays == [datetime(1986, 1, 20, 0, 0), datetime(1987, 1, 19, 0, 0)]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def test_memorial_day():
|
| 26 |
+
class MemorialDay(AbstractHolidayCalendar):
|
| 27 |
+
rules = [USMemorialDay]
|
| 28 |
+
|
| 29 |
+
holidays = MemorialDay().holidays(start="1971", end="1980").to_pydatetime().tolist()
|
| 30 |
+
|
| 31 |
+
# Fixes 5/31 error and checked manually against Wikipedia.
|
| 32 |
+
assert holidays == [
|
| 33 |
+
datetime(1971, 5, 31, 0, 0),
|
| 34 |
+
datetime(1972, 5, 29, 0, 0),
|
| 35 |
+
datetime(1973, 5, 28, 0, 0),
|
| 36 |
+
datetime(1974, 5, 27, 0, 0),
|
| 37 |
+
datetime(1975, 5, 26, 0, 0),
|
| 38 |
+
datetime(1976, 5, 31, 0, 0),
|
| 39 |
+
datetime(1977, 5, 30, 0, 0),
|
| 40 |
+
datetime(1978, 5, 29, 0, 0),
|
| 41 |
+
datetime(1979, 5, 28, 0, 0),
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def test_federal_holiday_inconsistent_returntype():
|
| 46 |
+
# GH 49075 test case
|
| 47 |
+
# Instantiate two calendars to rule out _cache
|
| 48 |
+
cal1 = USFederalHolidayCalendar()
|
| 49 |
+
cal2 = USFederalHolidayCalendar()
|
| 50 |
+
|
| 51 |
+
results_2018 = cal1.holidays(start=datetime(2018, 8, 1), end=datetime(2018, 8, 31))
|
| 52 |
+
results_2019 = cal2.holidays(start=datetime(2019, 8, 1), end=datetime(2019, 8, 31))
|
| 53 |
+
expected_results = DatetimeIndex([], dtype="datetime64[ns]", freq=None)
|
| 54 |
+
|
| 55 |
+
# Check against expected results to ensure both date
|
| 56 |
+
# ranges generate expected results as per GH49075 submission
|
| 57 |
+
tm.assert_index_equal(results_2018, expected_results)
|
| 58 |
+
tm.assert_index_equal(results_2019, expected_results)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_holiday.py
ADDED
|
@@ -0,0 +1,332 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
from pytz import utc
|
| 5 |
+
|
| 6 |
+
from pandas import (
|
| 7 |
+
DatetimeIndex,
|
| 8 |
+
Series,
|
| 9 |
+
)
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
|
| 12 |
+
from pandas.tseries.holiday import (
|
| 13 |
+
MO,
|
| 14 |
+
SA,
|
| 15 |
+
AbstractHolidayCalendar,
|
| 16 |
+
DateOffset,
|
| 17 |
+
EasterMonday,
|
| 18 |
+
GoodFriday,
|
| 19 |
+
Holiday,
|
| 20 |
+
HolidayCalendarFactory,
|
| 21 |
+
Timestamp,
|
| 22 |
+
USColumbusDay,
|
| 23 |
+
USFederalHolidayCalendar,
|
| 24 |
+
USLaborDay,
|
| 25 |
+
USMartinLutherKingJr,
|
| 26 |
+
USMemorialDay,
|
| 27 |
+
USPresidentsDay,
|
| 28 |
+
USThanksgivingDay,
|
| 29 |
+
get_calendar,
|
| 30 |
+
next_monday,
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@pytest.mark.parametrize(
|
| 35 |
+
"holiday,start_date,end_date,expected",
|
| 36 |
+
[
|
| 37 |
+
(
|
| 38 |
+
USMemorialDay,
|
| 39 |
+
datetime(2011, 1, 1),
|
| 40 |
+
datetime(2020, 12, 31),
|
| 41 |
+
[
|
| 42 |
+
datetime(2011, 5, 30),
|
| 43 |
+
datetime(2012, 5, 28),
|
| 44 |
+
datetime(2013, 5, 27),
|
| 45 |
+
datetime(2014, 5, 26),
|
| 46 |
+
datetime(2015, 5, 25),
|
| 47 |
+
datetime(2016, 5, 30),
|
| 48 |
+
datetime(2017, 5, 29),
|
| 49 |
+
datetime(2018, 5, 28),
|
| 50 |
+
datetime(2019, 5, 27),
|
| 51 |
+
datetime(2020, 5, 25),
|
| 52 |
+
],
|
| 53 |
+
),
|
| 54 |
+
(
|
| 55 |
+
Holiday("July 4th Eve", month=7, day=3),
|
| 56 |
+
"2001-01-01",
|
| 57 |
+
"2003-03-03",
|
| 58 |
+
[Timestamp("2001-07-03 00:00:00"), Timestamp("2002-07-03 00:00:00")],
|
| 59 |
+
),
|
| 60 |
+
(
|
| 61 |
+
Holiday("July 4th Eve", month=7, day=3, days_of_week=(0, 1, 2, 3)),
|
| 62 |
+
"2001-01-01",
|
| 63 |
+
"2008-03-03",
|
| 64 |
+
[
|
| 65 |
+
Timestamp("2001-07-03 00:00:00"),
|
| 66 |
+
Timestamp("2002-07-03 00:00:00"),
|
| 67 |
+
Timestamp("2003-07-03 00:00:00"),
|
| 68 |
+
Timestamp("2006-07-03 00:00:00"),
|
| 69 |
+
Timestamp("2007-07-03 00:00:00"),
|
| 70 |
+
],
|
| 71 |
+
),
|
| 72 |
+
(
|
| 73 |
+
EasterMonday,
|
| 74 |
+
datetime(2011, 1, 1),
|
| 75 |
+
datetime(2020, 12, 31),
|
| 76 |
+
[
|
| 77 |
+
Timestamp("2011-04-25 00:00:00"),
|
| 78 |
+
Timestamp("2012-04-09 00:00:00"),
|
| 79 |
+
Timestamp("2013-04-01 00:00:00"),
|
| 80 |
+
Timestamp("2014-04-21 00:00:00"),
|
| 81 |
+
Timestamp("2015-04-06 00:00:00"),
|
| 82 |
+
Timestamp("2016-03-28 00:00:00"),
|
| 83 |
+
Timestamp("2017-04-17 00:00:00"),
|
| 84 |
+
Timestamp("2018-04-02 00:00:00"),
|
| 85 |
+
Timestamp("2019-04-22 00:00:00"),
|
| 86 |
+
Timestamp("2020-04-13 00:00:00"),
|
| 87 |
+
],
|
| 88 |
+
),
|
| 89 |
+
(
|
| 90 |
+
GoodFriday,
|
| 91 |
+
datetime(2011, 1, 1),
|
| 92 |
+
datetime(2020, 12, 31),
|
| 93 |
+
[
|
| 94 |
+
Timestamp("2011-04-22 00:00:00"),
|
| 95 |
+
Timestamp("2012-04-06 00:00:00"),
|
| 96 |
+
Timestamp("2013-03-29 00:00:00"),
|
| 97 |
+
Timestamp("2014-04-18 00:00:00"),
|
| 98 |
+
Timestamp("2015-04-03 00:00:00"),
|
| 99 |
+
Timestamp("2016-03-25 00:00:00"),
|
| 100 |
+
Timestamp("2017-04-14 00:00:00"),
|
| 101 |
+
Timestamp("2018-03-30 00:00:00"),
|
| 102 |
+
Timestamp("2019-04-19 00:00:00"),
|
| 103 |
+
Timestamp("2020-04-10 00:00:00"),
|
| 104 |
+
],
|
| 105 |
+
),
|
| 106 |
+
(
|
| 107 |
+
USThanksgivingDay,
|
| 108 |
+
datetime(2011, 1, 1),
|
| 109 |
+
datetime(2020, 12, 31),
|
| 110 |
+
[
|
| 111 |
+
datetime(2011, 11, 24),
|
| 112 |
+
datetime(2012, 11, 22),
|
| 113 |
+
datetime(2013, 11, 28),
|
| 114 |
+
datetime(2014, 11, 27),
|
| 115 |
+
datetime(2015, 11, 26),
|
| 116 |
+
datetime(2016, 11, 24),
|
| 117 |
+
datetime(2017, 11, 23),
|
| 118 |
+
datetime(2018, 11, 22),
|
| 119 |
+
datetime(2019, 11, 28),
|
| 120 |
+
datetime(2020, 11, 26),
|
| 121 |
+
],
|
| 122 |
+
),
|
| 123 |
+
],
|
| 124 |
+
)
|
| 125 |
+
def test_holiday_dates(holiday, start_date, end_date, expected):
|
| 126 |
+
assert list(holiday.dates(start_date, end_date)) == expected
|
| 127 |
+
|
| 128 |
+
# Verify that timezone info is preserved.
|
| 129 |
+
assert list(
|
| 130 |
+
holiday.dates(
|
| 131 |
+
utc.localize(Timestamp(start_date)), utc.localize(Timestamp(end_date))
|
| 132 |
+
)
|
| 133 |
+
) == [utc.localize(dt) for dt in expected]
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@pytest.mark.parametrize(
|
| 137 |
+
"holiday,start,expected",
|
| 138 |
+
[
|
| 139 |
+
(USMemorialDay, datetime(2015, 7, 1), []),
|
| 140 |
+
(USMemorialDay, "2015-05-25", [Timestamp("2015-05-25")]),
|
| 141 |
+
(USLaborDay, datetime(2015, 7, 1), []),
|
| 142 |
+
(USLaborDay, "2015-09-07", [Timestamp("2015-09-07")]),
|
| 143 |
+
(USColumbusDay, datetime(2015, 7, 1), []),
|
| 144 |
+
(USColumbusDay, "2015-10-12", [Timestamp("2015-10-12")]),
|
| 145 |
+
(USThanksgivingDay, datetime(2015, 7, 1), []),
|
| 146 |
+
(USThanksgivingDay, "2015-11-26", [Timestamp("2015-11-26")]),
|
| 147 |
+
(USMartinLutherKingJr, datetime(2015, 7, 1), []),
|
| 148 |
+
(USMartinLutherKingJr, "2015-01-19", [Timestamp("2015-01-19")]),
|
| 149 |
+
(USPresidentsDay, datetime(2015, 7, 1), []),
|
| 150 |
+
(USPresidentsDay, "2015-02-16", [Timestamp("2015-02-16")]),
|
| 151 |
+
(GoodFriday, datetime(2015, 7, 1), []),
|
| 152 |
+
(GoodFriday, "2015-04-03", [Timestamp("2015-04-03")]),
|
| 153 |
+
(EasterMonday, "2015-04-06", [Timestamp("2015-04-06")]),
|
| 154 |
+
(EasterMonday, datetime(2015, 7, 1), []),
|
| 155 |
+
(EasterMonday, "2015-04-05", []),
|
| 156 |
+
("New Year's Day", "2015-01-01", [Timestamp("2015-01-01")]),
|
| 157 |
+
("New Year's Day", "2010-12-31", [Timestamp("2010-12-31")]),
|
| 158 |
+
("New Year's Day", datetime(2015, 7, 1), []),
|
| 159 |
+
("New Year's Day", "2011-01-01", []),
|
| 160 |
+
("Independence Day", "2015-07-03", [Timestamp("2015-07-03")]),
|
| 161 |
+
("Independence Day", datetime(2015, 7, 1), []),
|
| 162 |
+
("Independence Day", "2015-07-04", []),
|
| 163 |
+
("Veterans Day", "2012-11-12", [Timestamp("2012-11-12")]),
|
| 164 |
+
("Veterans Day", datetime(2015, 7, 1), []),
|
| 165 |
+
("Veterans Day", "2012-11-11", []),
|
| 166 |
+
("Christmas Day", "2011-12-26", [Timestamp("2011-12-26")]),
|
| 167 |
+
("Christmas Day", datetime(2015, 7, 1), []),
|
| 168 |
+
("Christmas Day", "2011-12-25", []),
|
| 169 |
+
("Juneteenth National Independence Day", "2020-06-19", []),
|
| 170 |
+
(
|
| 171 |
+
"Juneteenth National Independence Day",
|
| 172 |
+
"2021-06-18",
|
| 173 |
+
[Timestamp("2021-06-18")],
|
| 174 |
+
),
|
| 175 |
+
("Juneteenth National Independence Day", "2022-06-19", []),
|
| 176 |
+
(
|
| 177 |
+
"Juneteenth National Independence Day",
|
| 178 |
+
"2022-06-20",
|
| 179 |
+
[Timestamp("2022-06-20")],
|
| 180 |
+
),
|
| 181 |
+
],
|
| 182 |
+
)
|
| 183 |
+
def test_holidays_within_dates(holiday, start, expected):
|
| 184 |
+
# see gh-11477
|
| 185 |
+
#
|
| 186 |
+
# Fix holiday behavior where holiday.dates returned dates outside
|
| 187 |
+
# start/end date, or observed rules could not be applied because the
|
| 188 |
+
# holiday was not in the original date range (e.g., 7/4/2015 -> 7/3/2015).
|
| 189 |
+
if isinstance(holiday, str):
|
| 190 |
+
calendar = get_calendar("USFederalHolidayCalendar")
|
| 191 |
+
holiday = calendar.rule_from_name(holiday)
|
| 192 |
+
|
| 193 |
+
assert list(holiday.dates(start, start)) == expected
|
| 194 |
+
|
| 195 |
+
# Verify that timezone info is preserved.
|
| 196 |
+
assert list(
|
| 197 |
+
holiday.dates(utc.localize(Timestamp(start)), utc.localize(Timestamp(start)))
|
| 198 |
+
) == [utc.localize(dt) for dt in expected]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
@pytest.mark.parametrize(
|
| 202 |
+
"transform", [lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
|
| 203 |
+
)
|
| 204 |
+
def test_argument_types(transform):
|
| 205 |
+
start_date = datetime(2011, 1, 1)
|
| 206 |
+
end_date = datetime(2020, 12, 31)
|
| 207 |
+
|
| 208 |
+
holidays = USThanksgivingDay.dates(start_date, end_date)
|
| 209 |
+
holidays2 = USThanksgivingDay.dates(transform(start_date), transform(end_date))
|
| 210 |
+
tm.assert_index_equal(holidays, holidays2)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
@pytest.mark.parametrize(
|
| 214 |
+
"name,kwargs",
|
| 215 |
+
[
|
| 216 |
+
("One-Time", {"year": 2012, "month": 5, "day": 28}),
|
| 217 |
+
(
|
| 218 |
+
"Range",
|
| 219 |
+
{
|
| 220 |
+
"month": 5,
|
| 221 |
+
"day": 28,
|
| 222 |
+
"start_date": datetime(2012, 1, 1),
|
| 223 |
+
"end_date": datetime(2012, 12, 31),
|
| 224 |
+
"offset": DateOffset(weekday=MO(1)),
|
| 225 |
+
},
|
| 226 |
+
),
|
| 227 |
+
],
|
| 228 |
+
)
|
| 229 |
+
def test_special_holidays(name, kwargs):
|
| 230 |
+
base_date = [datetime(2012, 5, 28)]
|
| 231 |
+
holiday = Holiday(name, **kwargs)
|
| 232 |
+
|
| 233 |
+
start_date = datetime(2011, 1, 1)
|
| 234 |
+
end_date = datetime(2020, 12, 31)
|
| 235 |
+
|
| 236 |
+
assert base_date == holiday.dates(start_date, end_date)
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
def test_get_calendar():
|
| 240 |
+
class TestCalendar(AbstractHolidayCalendar):
|
| 241 |
+
rules = []
|
| 242 |
+
|
| 243 |
+
calendar = get_calendar("TestCalendar")
|
| 244 |
+
assert TestCalendar == type(calendar)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def test_factory():
|
| 248 |
+
class_1 = HolidayCalendarFactory(
|
| 249 |
+
"MemorialDay", AbstractHolidayCalendar, USMemorialDay
|
| 250 |
+
)
|
| 251 |
+
class_2 = HolidayCalendarFactory(
|
| 252 |
+
"Thanksgiving", AbstractHolidayCalendar, USThanksgivingDay
|
| 253 |
+
)
|
| 254 |
+
class_3 = HolidayCalendarFactory("Combined", class_1, class_2)
|
| 255 |
+
|
| 256 |
+
assert len(class_1.rules) == 1
|
| 257 |
+
assert len(class_2.rules) == 1
|
| 258 |
+
assert len(class_3.rules) == 2
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def test_both_offset_observance_raises():
|
| 262 |
+
# see gh-10217
|
| 263 |
+
msg = "Cannot use both offset and observance"
|
| 264 |
+
with pytest.raises(NotImplementedError, match=msg):
|
| 265 |
+
Holiday(
|
| 266 |
+
"Cyber Monday",
|
| 267 |
+
month=11,
|
| 268 |
+
day=1,
|
| 269 |
+
offset=[DateOffset(weekday=SA(4))],
|
| 270 |
+
observance=next_monday,
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def test_half_open_interval_with_observance():
|
| 275 |
+
# Prompted by GH 49075
|
| 276 |
+
# Check for holidays that have a half-open date interval where
|
| 277 |
+
# they have either a start_date or end_date defined along
|
| 278 |
+
# with a defined observance pattern to make sure that the return type
|
| 279 |
+
# for Holiday.dates() remains consistent before & after the year that
|
| 280 |
+
# marks the 'edge' of the half-open date interval.
|
| 281 |
+
|
| 282 |
+
holiday_1 = Holiday(
|
| 283 |
+
"Arbitrary Holiday - start 2022-03-14",
|
| 284 |
+
start_date=datetime(2022, 3, 14),
|
| 285 |
+
month=3,
|
| 286 |
+
day=14,
|
| 287 |
+
observance=next_monday,
|
| 288 |
+
)
|
| 289 |
+
holiday_2 = Holiday(
|
| 290 |
+
"Arbitrary Holiday 2 - end 2022-03-20",
|
| 291 |
+
end_date=datetime(2022, 3, 20),
|
| 292 |
+
month=3,
|
| 293 |
+
day=20,
|
| 294 |
+
observance=next_monday,
|
| 295 |
+
)
|
| 296 |
+
|
| 297 |
+
class TestHolidayCalendar(AbstractHolidayCalendar):
|
| 298 |
+
rules = [
|
| 299 |
+
USMartinLutherKingJr,
|
| 300 |
+
holiday_1,
|
| 301 |
+
holiday_2,
|
| 302 |
+
USLaborDay,
|
| 303 |
+
]
|
| 304 |
+
|
| 305 |
+
start = Timestamp("2022-08-01")
|
| 306 |
+
end = Timestamp("2022-08-31")
|
| 307 |
+
year_offset = DateOffset(years=5)
|
| 308 |
+
expected_results = DatetimeIndex([], dtype="datetime64[ns]", freq=None)
|
| 309 |
+
test_cal = TestHolidayCalendar()
|
| 310 |
+
|
| 311 |
+
date_interval_low = test_cal.holidays(start - year_offset, end - year_offset)
|
| 312 |
+
date_window_edge = test_cal.holidays(start, end)
|
| 313 |
+
date_interval_high = test_cal.holidays(start + year_offset, end + year_offset)
|
| 314 |
+
|
| 315 |
+
tm.assert_index_equal(date_interval_low, expected_results)
|
| 316 |
+
tm.assert_index_equal(date_window_edge, expected_results)
|
| 317 |
+
tm.assert_index_equal(date_interval_high, expected_results)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def test_holidays_with_timezone_specified_but_no_occurences():
|
| 321 |
+
# GH 54580
|
| 322 |
+
# _apply_rule() in holiday.py was silently dropping timezones if you passed it
|
| 323 |
+
# an empty list of holiday dates that had timezone information
|
| 324 |
+
start_date = Timestamp("2018-01-01", tz="America/Chicago")
|
| 325 |
+
end_date = Timestamp("2018-01-11", tz="America/Chicago")
|
| 326 |
+
test_case = USFederalHolidayCalendar().holidays(
|
| 327 |
+
start_date, end_date, return_name=True
|
| 328 |
+
)
|
| 329 |
+
expected_results = Series("New Year's Day", index=[start_date])
|
| 330 |
+
expected_results.index = expected_results.index.as_unit("ns")
|
| 331 |
+
|
| 332 |
+
tm.assert_equal(test_case, expected_results)
|