Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llava_next/lib/python3.10/site-packages/pandas/tests/indexes/test_common.py +512 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/scalar/period/test_period.py +1154 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__init__.py +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_freq_code.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_frequencies.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_inference.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_freq_code.py +69 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_frequencies.py +29 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_inference.py +558 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__init__.py +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_calendar.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_federal.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_holiday.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_observance.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_calendar.py +119 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_federal.py +58 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_holiday.py +332 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__init__.py +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_hour.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_month.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_quarter.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_year.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_common.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_custom_business_day.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_custom_business_month.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_offsets.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_week.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/common.py +37 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_day.py +236 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_hour.py +1445 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_month.py +217 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_quarter.py +315 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_year.py +215 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_common.py +268 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_custom_business_day.py +98 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_custom_business_hour.py +329 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_custom_business_month.py +437 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_dst.py +260 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_easter.py +33 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_fiscal.py +656 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_index.py +57 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_month.py +666 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_offsets.py +1185 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_offsets_properties.py +60 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_quarter.py +303 -0
- llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_ticks.py +405 -0
llava_next/lib/python3.10/site-packages/pandas/tests/indexes/test_common.py
ADDED
|
@@ -0,0 +1,512 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Collection of tests asserting things that should be true for
|
| 3 |
+
any index subclass except for MultiIndex. Makes use of the `index_flat`
|
| 4 |
+
fixture defined in pandas/conftest.py.
|
| 5 |
+
"""
|
| 6 |
+
from copy import (
|
| 7 |
+
copy,
|
| 8 |
+
deepcopy,
|
| 9 |
+
)
|
| 10 |
+
import re
|
| 11 |
+
|
| 12 |
+
import numpy as np
|
| 13 |
+
import pytest
|
| 14 |
+
|
| 15 |
+
from pandas.compat import IS64
|
| 16 |
+
from pandas.compat.numpy import np_version_gte1p25
|
| 17 |
+
|
| 18 |
+
from pandas.core.dtypes.common import (
|
| 19 |
+
is_integer_dtype,
|
| 20 |
+
is_numeric_dtype,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
import pandas as pd
|
| 24 |
+
from pandas import (
|
| 25 |
+
CategoricalIndex,
|
| 26 |
+
MultiIndex,
|
| 27 |
+
PeriodIndex,
|
| 28 |
+
RangeIndex,
|
| 29 |
+
)
|
| 30 |
+
import pandas._testing as tm
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class TestCommon:
|
| 34 |
+
@pytest.mark.parametrize("name", [None, "new_name"])
|
| 35 |
+
def test_to_frame(self, name, index_flat, using_copy_on_write):
|
| 36 |
+
# see GH#15230, GH#22580
|
| 37 |
+
idx = index_flat
|
| 38 |
+
|
| 39 |
+
if name:
|
| 40 |
+
idx_name = name
|
| 41 |
+
else:
|
| 42 |
+
idx_name = idx.name or 0
|
| 43 |
+
|
| 44 |
+
df = idx.to_frame(name=idx_name)
|
| 45 |
+
|
| 46 |
+
assert df.index is idx
|
| 47 |
+
assert len(df.columns) == 1
|
| 48 |
+
assert df.columns[0] == idx_name
|
| 49 |
+
if not using_copy_on_write:
|
| 50 |
+
assert df[idx_name].values is not idx.values
|
| 51 |
+
|
| 52 |
+
df = idx.to_frame(index=False, name=idx_name)
|
| 53 |
+
assert df.index is not idx
|
| 54 |
+
|
| 55 |
+
def test_droplevel(self, index_flat):
|
| 56 |
+
# GH 21115
|
| 57 |
+
# MultiIndex is tested separately in test_multi.py
|
| 58 |
+
index = index_flat
|
| 59 |
+
|
| 60 |
+
assert index.droplevel([]).equals(index)
|
| 61 |
+
|
| 62 |
+
for level in [index.name, [index.name]]:
|
| 63 |
+
if isinstance(index.name, tuple) and level is index.name:
|
| 64 |
+
# GH 21121 : droplevel with tuple name
|
| 65 |
+
continue
|
| 66 |
+
msg = (
|
| 67 |
+
"Cannot remove 1 levels from an index with 1 levels: at least one "
|
| 68 |
+
"level must be left."
|
| 69 |
+
)
|
| 70 |
+
with pytest.raises(ValueError, match=msg):
|
| 71 |
+
index.droplevel(level)
|
| 72 |
+
|
| 73 |
+
for level in "wrong", ["wrong"]:
|
| 74 |
+
with pytest.raises(
|
| 75 |
+
KeyError,
|
| 76 |
+
match=r"'Requested level \(wrong\) does not match index name \(None\)'",
|
| 77 |
+
):
|
| 78 |
+
index.droplevel(level)
|
| 79 |
+
|
| 80 |
+
def test_constructor_non_hashable_name(self, index_flat):
|
| 81 |
+
# GH 20527
|
| 82 |
+
index = index_flat
|
| 83 |
+
|
| 84 |
+
message = "Index.name must be a hashable type"
|
| 85 |
+
renamed = [["1"]]
|
| 86 |
+
|
| 87 |
+
# With .rename()
|
| 88 |
+
with pytest.raises(TypeError, match=message):
|
| 89 |
+
index.rename(name=renamed)
|
| 90 |
+
|
| 91 |
+
# With .set_names()
|
| 92 |
+
with pytest.raises(TypeError, match=message):
|
| 93 |
+
index.set_names(names=renamed)
|
| 94 |
+
|
| 95 |
+
def test_constructor_unwraps_index(self, index_flat):
|
| 96 |
+
a = index_flat
|
| 97 |
+
# Passing dtype is necessary for Index([True, False], dtype=object)
|
| 98 |
+
# case.
|
| 99 |
+
b = type(a)(a, dtype=a.dtype)
|
| 100 |
+
tm.assert_equal(a._data, b._data)
|
| 101 |
+
|
| 102 |
+
def test_to_flat_index(self, index_flat):
|
| 103 |
+
# 22866
|
| 104 |
+
index = index_flat
|
| 105 |
+
|
| 106 |
+
result = index.to_flat_index()
|
| 107 |
+
tm.assert_index_equal(result, index)
|
| 108 |
+
|
| 109 |
+
def test_set_name_methods(self, index_flat):
|
| 110 |
+
# MultiIndex tested separately
|
| 111 |
+
index = index_flat
|
| 112 |
+
new_name = "This is the new name for this index"
|
| 113 |
+
|
| 114 |
+
original_name = index.name
|
| 115 |
+
new_ind = index.set_names([new_name])
|
| 116 |
+
assert new_ind.name == new_name
|
| 117 |
+
assert index.name == original_name
|
| 118 |
+
res = index.rename(new_name, inplace=True)
|
| 119 |
+
|
| 120 |
+
# should return None
|
| 121 |
+
assert res is None
|
| 122 |
+
assert index.name == new_name
|
| 123 |
+
assert index.names == [new_name]
|
| 124 |
+
with pytest.raises(ValueError, match="Level must be None"):
|
| 125 |
+
index.set_names("a", level=0)
|
| 126 |
+
|
| 127 |
+
# rename in place just leaves tuples and other containers alone
|
| 128 |
+
name = ("A", "B")
|
| 129 |
+
index.rename(name, inplace=True)
|
| 130 |
+
assert index.name == name
|
| 131 |
+
assert index.names == [name]
|
| 132 |
+
|
| 133 |
+
@pytest.mark.xfail
|
| 134 |
+
def test_set_names_single_label_no_level(self, index_flat):
|
| 135 |
+
with pytest.raises(TypeError, match="list-like"):
|
| 136 |
+
# should still fail even if it would be the right length
|
| 137 |
+
index_flat.set_names("a")
|
| 138 |
+
|
| 139 |
+
def test_copy_and_deepcopy(self, index_flat):
|
| 140 |
+
index = index_flat
|
| 141 |
+
|
| 142 |
+
for func in (copy, deepcopy):
|
| 143 |
+
idx_copy = func(index)
|
| 144 |
+
assert idx_copy is not index
|
| 145 |
+
assert idx_copy.equals(index)
|
| 146 |
+
|
| 147 |
+
new_copy = index.copy(deep=True, name="banana")
|
| 148 |
+
assert new_copy.name == "banana"
|
| 149 |
+
|
| 150 |
+
def test_copy_name(self, index_flat):
|
| 151 |
+
# GH#12309: Check that the "name" argument
|
| 152 |
+
# passed at initialization is honored.
|
| 153 |
+
index = index_flat
|
| 154 |
+
|
| 155 |
+
first = type(index)(index, copy=True, name="mario")
|
| 156 |
+
second = type(first)(first, copy=False)
|
| 157 |
+
|
| 158 |
+
# Even though "copy=False", we want a new object.
|
| 159 |
+
assert first is not second
|
| 160 |
+
tm.assert_index_equal(first, second)
|
| 161 |
+
|
| 162 |
+
# Not using tm.assert_index_equal() since names differ.
|
| 163 |
+
assert index.equals(first)
|
| 164 |
+
|
| 165 |
+
assert first.name == "mario"
|
| 166 |
+
assert second.name == "mario"
|
| 167 |
+
|
| 168 |
+
# TODO: belongs in series arithmetic tests?
|
| 169 |
+
s1 = pd.Series(2, index=first)
|
| 170 |
+
s2 = pd.Series(3, index=second[:-1])
|
| 171 |
+
# See GH#13365
|
| 172 |
+
s3 = s1 * s2
|
| 173 |
+
assert s3.index.name == "mario"
|
| 174 |
+
|
| 175 |
+
def test_copy_name2(self, index_flat):
|
| 176 |
+
# GH#35592
|
| 177 |
+
index = index_flat
|
| 178 |
+
|
| 179 |
+
assert index.copy(name="mario").name == "mario"
|
| 180 |
+
|
| 181 |
+
with pytest.raises(ValueError, match="Length of new names must be 1, got 2"):
|
| 182 |
+
index.copy(name=["mario", "luigi"])
|
| 183 |
+
|
| 184 |
+
msg = f"{type(index).__name__}.name must be a hashable type"
|
| 185 |
+
with pytest.raises(TypeError, match=msg):
|
| 186 |
+
index.copy(name=[["mario"]])
|
| 187 |
+
|
| 188 |
+
def test_unique_level(self, index_flat):
|
| 189 |
+
# don't test a MultiIndex here (as its tested separated)
|
| 190 |
+
index = index_flat
|
| 191 |
+
|
| 192 |
+
# GH 17896
|
| 193 |
+
expected = index.drop_duplicates()
|
| 194 |
+
for level in [0, index.name, None]:
|
| 195 |
+
result = index.unique(level=level)
|
| 196 |
+
tm.assert_index_equal(result, expected)
|
| 197 |
+
|
| 198 |
+
msg = "Too many levels: Index has only 1 level, not 4"
|
| 199 |
+
with pytest.raises(IndexError, match=msg):
|
| 200 |
+
index.unique(level=3)
|
| 201 |
+
|
| 202 |
+
msg = (
|
| 203 |
+
rf"Requested level \(wrong\) does not match index name "
|
| 204 |
+
rf"\({re.escape(index.name.__repr__())}\)"
|
| 205 |
+
)
|
| 206 |
+
with pytest.raises(KeyError, match=msg):
|
| 207 |
+
index.unique(level="wrong")
|
| 208 |
+
|
| 209 |
+
def test_unique(self, index_flat):
|
| 210 |
+
# MultiIndex tested separately
|
| 211 |
+
index = index_flat
|
| 212 |
+
if not len(index):
|
| 213 |
+
pytest.skip("Skip check for empty Index and MultiIndex")
|
| 214 |
+
|
| 215 |
+
idx = index[[0] * 5]
|
| 216 |
+
idx_unique = index[[0]]
|
| 217 |
+
|
| 218 |
+
# We test against `idx_unique`, so first we make sure it's unique
|
| 219 |
+
# and doesn't contain nans.
|
| 220 |
+
assert idx_unique.is_unique is True
|
| 221 |
+
try:
|
| 222 |
+
assert idx_unique.hasnans is False
|
| 223 |
+
except NotImplementedError:
|
| 224 |
+
pass
|
| 225 |
+
|
| 226 |
+
result = idx.unique()
|
| 227 |
+
tm.assert_index_equal(result, idx_unique)
|
| 228 |
+
|
| 229 |
+
# nans:
|
| 230 |
+
if not index._can_hold_na:
|
| 231 |
+
pytest.skip("Skip na-check if index cannot hold na")
|
| 232 |
+
|
| 233 |
+
vals = index._values[[0] * 5]
|
| 234 |
+
vals[0] = np.nan
|
| 235 |
+
|
| 236 |
+
vals_unique = vals[:2]
|
| 237 |
+
idx_nan = index._shallow_copy(vals)
|
| 238 |
+
idx_unique_nan = index._shallow_copy(vals_unique)
|
| 239 |
+
assert idx_unique_nan.is_unique is True
|
| 240 |
+
|
| 241 |
+
assert idx_nan.dtype == index.dtype
|
| 242 |
+
assert idx_unique_nan.dtype == index.dtype
|
| 243 |
+
|
| 244 |
+
expected = idx_unique_nan
|
| 245 |
+
for pos, i in enumerate([idx_nan, idx_unique_nan]):
|
| 246 |
+
result = i.unique()
|
| 247 |
+
tm.assert_index_equal(result, expected)
|
| 248 |
+
|
| 249 |
+
@pytest.mark.filterwarnings("ignore:Period with BDay freq:FutureWarning")
|
| 250 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 251 |
+
def test_searchsorted_monotonic(self, index_flat, request):
|
| 252 |
+
# GH17271
|
| 253 |
+
index = index_flat
|
| 254 |
+
# not implemented for tuple searches in MultiIndex
|
| 255 |
+
# or Intervals searches in IntervalIndex
|
| 256 |
+
if isinstance(index, pd.IntervalIndex):
|
| 257 |
+
mark = pytest.mark.xfail(
|
| 258 |
+
reason="IntervalIndex.searchsorted does not support Interval arg",
|
| 259 |
+
raises=NotImplementedError,
|
| 260 |
+
)
|
| 261 |
+
request.applymarker(mark)
|
| 262 |
+
|
| 263 |
+
# nothing to test if the index is empty
|
| 264 |
+
if index.empty:
|
| 265 |
+
pytest.skip("Skip check for empty Index")
|
| 266 |
+
value = index[0]
|
| 267 |
+
|
| 268 |
+
# determine the expected results (handle dupes for 'right')
|
| 269 |
+
expected_left, expected_right = 0, (index == value).argmin()
|
| 270 |
+
if expected_right == 0:
|
| 271 |
+
# all values are the same, expected_right should be length
|
| 272 |
+
expected_right = len(index)
|
| 273 |
+
|
| 274 |
+
# test _searchsorted_monotonic in all cases
|
| 275 |
+
# test searchsorted only for increasing
|
| 276 |
+
if index.is_monotonic_increasing:
|
| 277 |
+
ssm_left = index._searchsorted_monotonic(value, side="left")
|
| 278 |
+
assert expected_left == ssm_left
|
| 279 |
+
|
| 280 |
+
ssm_right = index._searchsorted_monotonic(value, side="right")
|
| 281 |
+
assert expected_right == ssm_right
|
| 282 |
+
|
| 283 |
+
ss_left = index.searchsorted(value, side="left")
|
| 284 |
+
assert expected_left == ss_left
|
| 285 |
+
|
| 286 |
+
ss_right = index.searchsorted(value, side="right")
|
| 287 |
+
assert expected_right == ss_right
|
| 288 |
+
|
| 289 |
+
elif index.is_monotonic_decreasing:
|
| 290 |
+
ssm_left = index._searchsorted_monotonic(value, side="left")
|
| 291 |
+
assert expected_left == ssm_left
|
| 292 |
+
|
| 293 |
+
ssm_right = index._searchsorted_monotonic(value, side="right")
|
| 294 |
+
assert expected_right == ssm_right
|
| 295 |
+
else:
|
| 296 |
+
# non-monotonic should raise.
|
| 297 |
+
msg = "index must be monotonic increasing or decreasing"
|
| 298 |
+
with pytest.raises(ValueError, match=msg):
|
| 299 |
+
index._searchsorted_monotonic(value, side="left")
|
| 300 |
+
|
| 301 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 302 |
+
def test_drop_duplicates(self, index_flat, keep):
|
| 303 |
+
# MultiIndex is tested separately
|
| 304 |
+
index = index_flat
|
| 305 |
+
if isinstance(index, RangeIndex):
|
| 306 |
+
pytest.skip(
|
| 307 |
+
"RangeIndex is tested in test_drop_duplicates_no_duplicates "
|
| 308 |
+
"as it cannot hold duplicates"
|
| 309 |
+
)
|
| 310 |
+
if len(index) == 0:
|
| 311 |
+
pytest.skip(
|
| 312 |
+
"empty index is tested in test_drop_duplicates_no_duplicates "
|
| 313 |
+
"as it cannot hold duplicates"
|
| 314 |
+
)
|
| 315 |
+
|
| 316 |
+
# make unique index
|
| 317 |
+
holder = type(index)
|
| 318 |
+
unique_values = list(set(index))
|
| 319 |
+
dtype = index.dtype if is_numeric_dtype(index) else None
|
| 320 |
+
unique_idx = holder(unique_values, dtype=dtype)
|
| 321 |
+
|
| 322 |
+
# make duplicated index
|
| 323 |
+
n = len(unique_idx)
|
| 324 |
+
duplicated_selection = np.random.default_rng(2).choice(n, int(n * 1.5))
|
| 325 |
+
idx = holder(unique_idx.values[duplicated_selection])
|
| 326 |
+
|
| 327 |
+
# Series.duplicated is tested separately
|
| 328 |
+
expected_duplicated = (
|
| 329 |
+
pd.Series(duplicated_selection).duplicated(keep=keep).values
|
| 330 |
+
)
|
| 331 |
+
tm.assert_numpy_array_equal(idx.duplicated(keep=keep), expected_duplicated)
|
| 332 |
+
|
| 333 |
+
# Series.drop_duplicates is tested separately
|
| 334 |
+
expected_dropped = holder(pd.Series(idx).drop_duplicates(keep=keep))
|
| 335 |
+
tm.assert_index_equal(idx.drop_duplicates(keep=keep), expected_dropped)
|
| 336 |
+
|
| 337 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 338 |
+
def test_drop_duplicates_no_duplicates(self, index_flat):
|
| 339 |
+
# MultiIndex is tested separately
|
| 340 |
+
index = index_flat
|
| 341 |
+
|
| 342 |
+
# make unique index
|
| 343 |
+
if isinstance(index, RangeIndex):
|
| 344 |
+
# RangeIndex cannot have duplicates
|
| 345 |
+
unique_idx = index
|
| 346 |
+
else:
|
| 347 |
+
holder = type(index)
|
| 348 |
+
unique_values = list(set(index))
|
| 349 |
+
dtype = index.dtype if is_numeric_dtype(index) else None
|
| 350 |
+
unique_idx = holder(unique_values, dtype=dtype)
|
| 351 |
+
|
| 352 |
+
# check on unique index
|
| 353 |
+
expected_duplicated = np.array([False] * len(unique_idx), dtype="bool")
|
| 354 |
+
tm.assert_numpy_array_equal(unique_idx.duplicated(), expected_duplicated)
|
| 355 |
+
result_dropped = unique_idx.drop_duplicates()
|
| 356 |
+
tm.assert_index_equal(result_dropped, unique_idx)
|
| 357 |
+
# validate shallow copy
|
| 358 |
+
assert result_dropped is not unique_idx
|
| 359 |
+
|
| 360 |
+
def test_drop_duplicates_inplace(self, index):
|
| 361 |
+
msg = r"drop_duplicates\(\) got an unexpected keyword argument"
|
| 362 |
+
with pytest.raises(TypeError, match=msg):
|
| 363 |
+
index.drop_duplicates(inplace=True)
|
| 364 |
+
|
| 365 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 366 |
+
def test_has_duplicates(self, index_flat):
|
| 367 |
+
# MultiIndex tested separately in:
|
| 368 |
+
# tests/indexes/multi/test_unique_and_duplicates.
|
| 369 |
+
index = index_flat
|
| 370 |
+
holder = type(index)
|
| 371 |
+
if not len(index) or isinstance(index, RangeIndex):
|
| 372 |
+
# MultiIndex tested separately in:
|
| 373 |
+
# tests/indexes/multi/test_unique_and_duplicates.
|
| 374 |
+
# RangeIndex is unique by definition.
|
| 375 |
+
pytest.skip("Skip check for empty Index, MultiIndex, and RangeIndex")
|
| 376 |
+
|
| 377 |
+
idx = holder([index[0]] * 5)
|
| 378 |
+
assert idx.is_unique is False
|
| 379 |
+
assert idx.has_duplicates is True
|
| 380 |
+
|
| 381 |
+
@pytest.mark.parametrize(
|
| 382 |
+
"dtype",
|
| 383 |
+
["int64", "uint64", "float64", "category", "datetime64[ns]", "timedelta64[ns]"],
|
| 384 |
+
)
|
| 385 |
+
def test_astype_preserves_name(self, index, dtype):
|
| 386 |
+
# https://github.com/pandas-dev/pandas/issues/32013
|
| 387 |
+
if isinstance(index, MultiIndex):
|
| 388 |
+
index.names = ["idx" + str(i) for i in range(index.nlevels)]
|
| 389 |
+
else:
|
| 390 |
+
index.name = "idx"
|
| 391 |
+
|
| 392 |
+
warn = None
|
| 393 |
+
if index.dtype.kind == "c" and dtype in ["float64", "int64", "uint64"]:
|
| 394 |
+
# imaginary components discarded
|
| 395 |
+
if np_version_gte1p25:
|
| 396 |
+
warn = np.exceptions.ComplexWarning
|
| 397 |
+
else:
|
| 398 |
+
warn = np.ComplexWarning
|
| 399 |
+
|
| 400 |
+
is_pyarrow_str = str(index.dtype) == "string[pyarrow]" and dtype == "category"
|
| 401 |
+
try:
|
| 402 |
+
# Some of these conversions cannot succeed so we use a try / except
|
| 403 |
+
with tm.assert_produces_warning(
|
| 404 |
+
warn,
|
| 405 |
+
raise_on_extra_warnings=is_pyarrow_str,
|
| 406 |
+
check_stacklevel=False,
|
| 407 |
+
):
|
| 408 |
+
result = index.astype(dtype)
|
| 409 |
+
except (ValueError, TypeError, NotImplementedError, SystemError):
|
| 410 |
+
return
|
| 411 |
+
|
| 412 |
+
if isinstance(index, MultiIndex):
|
| 413 |
+
assert result.names == index.names
|
| 414 |
+
else:
|
| 415 |
+
assert result.name == index.name
|
| 416 |
+
|
| 417 |
+
def test_hasnans_isnans(self, index_flat):
|
| 418 |
+
# GH#11343, added tests for hasnans / isnans
|
| 419 |
+
index = index_flat
|
| 420 |
+
|
| 421 |
+
# cases in indices doesn't include NaN
|
| 422 |
+
idx = index.copy(deep=True)
|
| 423 |
+
expected = np.array([False] * len(idx), dtype=bool)
|
| 424 |
+
tm.assert_numpy_array_equal(idx._isnan, expected)
|
| 425 |
+
assert idx.hasnans is False
|
| 426 |
+
|
| 427 |
+
idx = index.copy(deep=True)
|
| 428 |
+
values = idx._values
|
| 429 |
+
|
| 430 |
+
if len(index) == 0:
|
| 431 |
+
return
|
| 432 |
+
elif is_integer_dtype(index.dtype):
|
| 433 |
+
return
|
| 434 |
+
elif index.dtype == bool:
|
| 435 |
+
# values[1] = np.nan below casts to True!
|
| 436 |
+
return
|
| 437 |
+
|
| 438 |
+
values[1] = np.nan
|
| 439 |
+
|
| 440 |
+
idx = type(index)(values)
|
| 441 |
+
|
| 442 |
+
expected = np.array([False] * len(idx), dtype=bool)
|
| 443 |
+
expected[1] = True
|
| 444 |
+
tm.assert_numpy_array_equal(idx._isnan, expected)
|
| 445 |
+
assert idx.hasnans is True
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 449 |
+
@pytest.mark.parametrize("na_position", [None, "middle"])
|
| 450 |
+
def test_sort_values_invalid_na_position(index_with_missing, na_position):
|
| 451 |
+
with pytest.raises(ValueError, match=f"invalid na_position: {na_position}"):
|
| 452 |
+
index_with_missing.sort_values(na_position=na_position)
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
@pytest.mark.fails_arm_wheels
|
| 456 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 457 |
+
@pytest.mark.parametrize("na_position", ["first", "last"])
|
| 458 |
+
def test_sort_values_with_missing(index_with_missing, na_position, request):
|
| 459 |
+
# GH 35584. Test that sort_values works with missing values,
|
| 460 |
+
# sort non-missing and place missing according to na_position
|
| 461 |
+
|
| 462 |
+
if isinstance(index_with_missing, CategoricalIndex):
|
| 463 |
+
request.applymarker(
|
| 464 |
+
pytest.mark.xfail(
|
| 465 |
+
reason="missing value sorting order not well-defined", strict=False
|
| 466 |
+
)
|
| 467 |
+
)
|
| 468 |
+
|
| 469 |
+
missing_count = np.sum(index_with_missing.isna())
|
| 470 |
+
not_na_vals = index_with_missing[index_with_missing.notna()].values
|
| 471 |
+
sorted_values = np.sort(not_na_vals)
|
| 472 |
+
if na_position == "first":
|
| 473 |
+
sorted_values = np.concatenate([[None] * missing_count, sorted_values])
|
| 474 |
+
else:
|
| 475 |
+
sorted_values = np.concatenate([sorted_values, [None] * missing_count])
|
| 476 |
+
|
| 477 |
+
# Explicitly pass dtype needed for Index backed by EA e.g. IntegerArray
|
| 478 |
+
expected = type(index_with_missing)(sorted_values, dtype=index_with_missing.dtype)
|
| 479 |
+
|
| 480 |
+
result = index_with_missing.sort_values(na_position=na_position)
|
| 481 |
+
tm.assert_index_equal(result, expected)
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
def test_ndarray_compat_properties(index):
|
| 485 |
+
if isinstance(index, PeriodIndex) and not IS64:
|
| 486 |
+
pytest.skip("Overflow")
|
| 487 |
+
idx = index
|
| 488 |
+
assert idx.T.equals(idx)
|
| 489 |
+
assert idx.transpose().equals(idx)
|
| 490 |
+
|
| 491 |
+
values = idx.values
|
| 492 |
+
|
| 493 |
+
assert idx.shape == values.shape
|
| 494 |
+
assert idx.ndim == values.ndim
|
| 495 |
+
assert idx.size == values.size
|
| 496 |
+
|
| 497 |
+
if not isinstance(index, (RangeIndex, MultiIndex)):
|
| 498 |
+
# These two are not backed by an ndarray
|
| 499 |
+
assert idx.nbytes == values.nbytes
|
| 500 |
+
|
| 501 |
+
# test for validity
|
| 502 |
+
idx.nbytes
|
| 503 |
+
idx.values.nbytes
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
def test_compare_read_only_array():
|
| 507 |
+
# GH#57130
|
| 508 |
+
arr = np.array([], dtype=object)
|
| 509 |
+
arr.flags.writeable = False
|
| 510 |
+
idx = pd.Index(arr)
|
| 511 |
+
result = idx > 69
|
| 512 |
+
assert result.dtype == bool
|
llava_next/lib/python3.10/site-packages/pandas/tests/scalar/period/test_period.py
ADDED
|
@@ -0,0 +1,1154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import (
|
| 2 |
+
date,
|
| 3 |
+
datetime,
|
| 4 |
+
timedelta,
|
| 5 |
+
)
|
| 6 |
+
import re
|
| 7 |
+
|
| 8 |
+
import numpy as np
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from pandas._libs.tslibs import iNaT
|
| 12 |
+
from pandas._libs.tslibs.ccalendar import (
|
| 13 |
+
DAYS,
|
| 14 |
+
MONTHS,
|
| 15 |
+
)
|
| 16 |
+
from pandas._libs.tslibs.np_datetime import OutOfBoundsDatetime
|
| 17 |
+
from pandas._libs.tslibs.parsing import DateParseError
|
| 18 |
+
from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
|
| 19 |
+
|
| 20 |
+
from pandas import (
|
| 21 |
+
NaT,
|
| 22 |
+
Period,
|
| 23 |
+
Timedelta,
|
| 24 |
+
Timestamp,
|
| 25 |
+
offsets,
|
| 26 |
+
)
|
| 27 |
+
import pandas._testing as tm
|
| 28 |
+
|
| 29 |
+
bday_msg = "Period with BDay freq is deprecated"
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class TestPeriodDisallowedFreqs:
|
| 33 |
+
@pytest.mark.parametrize(
|
| 34 |
+
"freq, freq_msg",
|
| 35 |
+
[
|
| 36 |
+
(offsets.BYearBegin(), "BYearBegin"),
|
| 37 |
+
(offsets.YearBegin(2), "YearBegin"),
|
| 38 |
+
(offsets.QuarterBegin(startingMonth=12), "QuarterBegin"),
|
| 39 |
+
(offsets.BusinessMonthEnd(2), "BusinessMonthEnd"),
|
| 40 |
+
],
|
| 41 |
+
)
|
| 42 |
+
def test_offsets_not_supported(self, freq, freq_msg):
|
| 43 |
+
# GH#55785
|
| 44 |
+
msg = re.escape(f"{freq} is not supported as period frequency")
|
| 45 |
+
with pytest.raises(ValueError, match=msg):
|
| 46 |
+
Period(year=2014, freq=freq)
|
| 47 |
+
|
| 48 |
+
def test_custom_business_day_freq_raises(self):
|
| 49 |
+
# GH#52534
|
| 50 |
+
msg = "C is not supported as period frequency"
|
| 51 |
+
with pytest.raises(ValueError, match=msg):
|
| 52 |
+
Period("2023-04-10", freq="C")
|
| 53 |
+
msg = f"{offsets.CustomBusinessDay().base} is not supported as period frequency"
|
| 54 |
+
with pytest.raises(ValueError, match=msg):
|
| 55 |
+
Period("2023-04-10", freq=offsets.CustomBusinessDay())
|
| 56 |
+
|
| 57 |
+
def test_invalid_frequency_error_message(self):
|
| 58 |
+
msg = "WOM-1MON is not supported as period frequency"
|
| 59 |
+
with pytest.raises(ValueError, match=msg):
|
| 60 |
+
Period("2012-01-02", freq="WOM-1MON")
|
| 61 |
+
|
| 62 |
+
def test_invalid_frequency_period_error_message(self):
|
| 63 |
+
msg = "for Period, please use 'M' instead of 'ME'"
|
| 64 |
+
with pytest.raises(ValueError, match=msg):
|
| 65 |
+
Period("2012-01-02", freq="ME")
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class TestPeriodConstruction:
|
| 69 |
+
def test_from_td64nat_raises(self):
|
| 70 |
+
# GH#44507
|
| 71 |
+
td = NaT.to_numpy("m8[ns]")
|
| 72 |
+
|
| 73 |
+
msg = "Value must be Period, string, integer, or datetime"
|
| 74 |
+
with pytest.raises(ValueError, match=msg):
|
| 75 |
+
Period(td)
|
| 76 |
+
|
| 77 |
+
with pytest.raises(ValueError, match=msg):
|
| 78 |
+
Period(td, freq="D")
|
| 79 |
+
|
| 80 |
+
def test_construction(self):
|
| 81 |
+
i1 = Period("1/1/2005", freq="M")
|
| 82 |
+
i2 = Period("Jan 2005")
|
| 83 |
+
|
| 84 |
+
assert i1 == i2
|
| 85 |
+
|
| 86 |
+
# GH#54105 - Period can be confusingly instantiated with lowercase freq
|
| 87 |
+
# TODO: raise in the future an error when passing lowercase freq
|
| 88 |
+
i1 = Period("2005", freq="Y")
|
| 89 |
+
i2 = Period("2005")
|
| 90 |
+
|
| 91 |
+
assert i1 == i2
|
| 92 |
+
|
| 93 |
+
i4 = Period("2005", freq="M")
|
| 94 |
+
assert i1 != i4
|
| 95 |
+
|
| 96 |
+
i1 = Period.now(freq="Q")
|
| 97 |
+
i2 = Period(datetime.now(), freq="Q")
|
| 98 |
+
|
| 99 |
+
assert i1 == i2
|
| 100 |
+
|
| 101 |
+
# Pass in freq as a keyword argument sometimes as a test for
|
| 102 |
+
# https://github.com/pandas-dev/pandas/issues/53369
|
| 103 |
+
i1 = Period.now(freq="D")
|
| 104 |
+
i2 = Period(datetime.now(), freq="D")
|
| 105 |
+
i3 = Period.now(offsets.Day())
|
| 106 |
+
|
| 107 |
+
assert i1 == i2
|
| 108 |
+
assert i1 == i3
|
| 109 |
+
|
| 110 |
+
i1 = Period("1982", freq="min")
|
| 111 |
+
msg = "'MIN' is deprecated and will be removed in a future version."
|
| 112 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 113 |
+
i2 = Period("1982", freq="MIN")
|
| 114 |
+
assert i1 == i2
|
| 115 |
+
|
| 116 |
+
i1 = Period(year=2005, month=3, day=1, freq="D")
|
| 117 |
+
i2 = Period("3/1/2005", freq="D")
|
| 118 |
+
assert i1 == i2
|
| 119 |
+
|
| 120 |
+
i3 = Period(year=2005, month=3, day=1, freq="d")
|
| 121 |
+
assert i1 == i3
|
| 122 |
+
|
| 123 |
+
i1 = Period("2007-01-01 09:00:00.001")
|
| 124 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1000), freq="ms")
|
| 125 |
+
assert i1 == expected
|
| 126 |
+
|
| 127 |
+
expected = Period("2007-01-01 09:00:00.001", freq="ms")
|
| 128 |
+
assert i1 == expected
|
| 129 |
+
|
| 130 |
+
i1 = Period("2007-01-01 09:00:00.00101")
|
| 131 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1010), freq="us")
|
| 132 |
+
assert i1 == expected
|
| 133 |
+
|
| 134 |
+
expected = Period("2007-01-01 09:00:00.00101", freq="us")
|
| 135 |
+
assert i1 == expected
|
| 136 |
+
|
| 137 |
+
msg = "Must supply freq for ordinal value"
|
| 138 |
+
with pytest.raises(ValueError, match=msg):
|
| 139 |
+
Period(ordinal=200701)
|
| 140 |
+
|
| 141 |
+
msg = "Invalid frequency: X"
|
| 142 |
+
with pytest.raises(ValueError, match=msg):
|
| 143 |
+
Period("2007-1-1", freq="X")
|
| 144 |
+
|
| 145 |
+
def test_tuple_freq_disallowed(self):
|
| 146 |
+
# GH#34703 tuple freq disallowed
|
| 147 |
+
with pytest.raises(TypeError, match="pass as a string instead"):
|
| 148 |
+
Period("1982", freq=("Min", 1))
|
| 149 |
+
|
| 150 |
+
with pytest.raises(TypeError, match="pass as a string instead"):
|
| 151 |
+
Period("2006-12-31", ("w", 1))
|
| 152 |
+
|
| 153 |
+
def test_construction_from_timestamp_nanos(self):
|
| 154 |
+
# GH#46811 don't drop nanos from Timestamp
|
| 155 |
+
ts = Timestamp("2022-04-20 09:23:24.123456789")
|
| 156 |
+
per = Period(ts, freq="ns")
|
| 157 |
+
|
| 158 |
+
# should losslessly round-trip, not lose the 789
|
| 159 |
+
rt = per.to_timestamp()
|
| 160 |
+
assert rt == ts
|
| 161 |
+
|
| 162 |
+
# same thing but from a datetime64 object
|
| 163 |
+
dt64 = ts.asm8
|
| 164 |
+
per2 = Period(dt64, freq="ns")
|
| 165 |
+
rt2 = per2.to_timestamp()
|
| 166 |
+
assert rt2.asm8 == dt64
|
| 167 |
+
|
| 168 |
+
def test_construction_bday(self):
|
| 169 |
+
# Biz day construction, roll forward if non-weekday
|
| 170 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 171 |
+
i1 = Period("3/10/12", freq="B")
|
| 172 |
+
i2 = Period("3/10/12", freq="D")
|
| 173 |
+
assert i1 == i2.asfreq("B")
|
| 174 |
+
i2 = Period("3/11/12", freq="D")
|
| 175 |
+
assert i1 == i2.asfreq("B")
|
| 176 |
+
i2 = Period("3/12/12", freq="D")
|
| 177 |
+
assert i1 == i2.asfreq("B")
|
| 178 |
+
|
| 179 |
+
i3 = Period("3/10/12", freq="b")
|
| 180 |
+
assert i1 == i3
|
| 181 |
+
|
| 182 |
+
i1 = Period(year=2012, month=3, day=10, freq="B")
|
| 183 |
+
i2 = Period("3/12/12", freq="B")
|
| 184 |
+
assert i1 == i2
|
| 185 |
+
|
| 186 |
+
def test_construction_quarter(self):
|
| 187 |
+
i1 = Period(year=2005, quarter=1, freq="Q")
|
| 188 |
+
i2 = Period("1/1/2005", freq="Q")
|
| 189 |
+
assert i1 == i2
|
| 190 |
+
|
| 191 |
+
i1 = Period(year=2005, quarter=3, freq="Q")
|
| 192 |
+
i2 = Period("9/1/2005", freq="Q")
|
| 193 |
+
assert i1 == i2
|
| 194 |
+
|
| 195 |
+
i1 = Period("2005Q1")
|
| 196 |
+
i2 = Period(year=2005, quarter=1, freq="Q")
|
| 197 |
+
i3 = Period("2005q1")
|
| 198 |
+
assert i1 == i2
|
| 199 |
+
assert i1 == i3
|
| 200 |
+
|
| 201 |
+
i1 = Period("05Q1")
|
| 202 |
+
assert i1 == i2
|
| 203 |
+
lower = Period("05q1")
|
| 204 |
+
assert i1 == lower
|
| 205 |
+
|
| 206 |
+
i1 = Period("1Q2005")
|
| 207 |
+
assert i1 == i2
|
| 208 |
+
lower = Period("1q2005")
|
| 209 |
+
assert i1 == lower
|
| 210 |
+
|
| 211 |
+
i1 = Period("1Q05")
|
| 212 |
+
assert i1 == i2
|
| 213 |
+
lower = Period("1q05")
|
| 214 |
+
assert i1 == lower
|
| 215 |
+
|
| 216 |
+
i1 = Period("4Q1984")
|
| 217 |
+
assert i1.year == 1984
|
| 218 |
+
lower = Period("4q1984")
|
| 219 |
+
assert i1 == lower
|
| 220 |
+
|
| 221 |
+
def test_construction_month(self):
|
| 222 |
+
expected = Period("2007-01", freq="M")
|
| 223 |
+
i1 = Period("200701", freq="M")
|
| 224 |
+
assert i1 == expected
|
| 225 |
+
|
| 226 |
+
i1 = Period("200701", freq="M")
|
| 227 |
+
assert i1 == expected
|
| 228 |
+
|
| 229 |
+
i1 = Period(200701, freq="M")
|
| 230 |
+
assert i1 == expected
|
| 231 |
+
|
| 232 |
+
i1 = Period(ordinal=200701, freq="M")
|
| 233 |
+
assert i1.year == 18695
|
| 234 |
+
|
| 235 |
+
i1 = Period(datetime(2007, 1, 1), freq="M")
|
| 236 |
+
i2 = Period("200701", freq="M")
|
| 237 |
+
assert i1 == i2
|
| 238 |
+
|
| 239 |
+
i1 = Period(date(2007, 1, 1), freq="M")
|
| 240 |
+
i2 = Period(datetime(2007, 1, 1), freq="M")
|
| 241 |
+
i3 = Period(np.datetime64("2007-01-01"), freq="M")
|
| 242 |
+
i4 = Period("2007-01-01 00:00:00", freq="M")
|
| 243 |
+
i5 = Period("2007-01-01 00:00:00.000", freq="M")
|
| 244 |
+
assert i1 == i2
|
| 245 |
+
assert i1 == i3
|
| 246 |
+
assert i1 == i4
|
| 247 |
+
assert i1 == i5
|
| 248 |
+
|
| 249 |
+
def test_period_constructor_offsets(self):
|
| 250 |
+
assert Period("1/1/2005", freq=offsets.MonthEnd()) == Period(
|
| 251 |
+
"1/1/2005", freq="M"
|
| 252 |
+
)
|
| 253 |
+
assert Period("2005", freq=offsets.YearEnd()) == Period("2005", freq="Y")
|
| 254 |
+
assert Period("2005", freq=offsets.MonthEnd()) == Period("2005", freq="M")
|
| 255 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 256 |
+
assert Period("3/10/12", freq=offsets.BusinessDay()) == Period(
|
| 257 |
+
"3/10/12", freq="B"
|
| 258 |
+
)
|
| 259 |
+
assert Period("3/10/12", freq=offsets.Day()) == Period("3/10/12", freq="D")
|
| 260 |
+
|
| 261 |
+
assert Period(
|
| 262 |
+
year=2005, quarter=1, freq=offsets.QuarterEnd(startingMonth=12)
|
| 263 |
+
) == Period(year=2005, quarter=1, freq="Q")
|
| 264 |
+
assert Period(
|
| 265 |
+
year=2005, quarter=2, freq=offsets.QuarterEnd(startingMonth=12)
|
| 266 |
+
) == Period(year=2005, quarter=2, freq="Q")
|
| 267 |
+
|
| 268 |
+
assert Period(year=2005, month=3, day=1, freq=offsets.Day()) == Period(
|
| 269 |
+
year=2005, month=3, day=1, freq="D"
|
| 270 |
+
)
|
| 271 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 272 |
+
assert Period(year=2012, month=3, day=10, freq=offsets.BDay()) == Period(
|
| 273 |
+
year=2012, month=3, day=10, freq="B"
|
| 274 |
+
)
|
| 275 |
+
|
| 276 |
+
expected = Period("2005-03-01", freq="3D")
|
| 277 |
+
assert Period(year=2005, month=3, day=1, freq=offsets.Day(3)) == expected
|
| 278 |
+
assert Period(year=2005, month=3, day=1, freq="3D") == expected
|
| 279 |
+
|
| 280 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 281 |
+
assert Period(year=2012, month=3, day=10, freq=offsets.BDay(3)) == Period(
|
| 282 |
+
year=2012, month=3, day=10, freq="3B"
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
assert Period(200701, freq=offsets.MonthEnd()) == Period(200701, freq="M")
|
| 286 |
+
|
| 287 |
+
i1 = Period(ordinal=200701, freq=offsets.MonthEnd())
|
| 288 |
+
i2 = Period(ordinal=200701, freq="M")
|
| 289 |
+
assert i1 == i2
|
| 290 |
+
assert i1.year == 18695
|
| 291 |
+
assert i2.year == 18695
|
| 292 |
+
|
| 293 |
+
i1 = Period(datetime(2007, 1, 1), freq="M")
|
| 294 |
+
i2 = Period("200701", freq="M")
|
| 295 |
+
assert i1 == i2
|
| 296 |
+
|
| 297 |
+
i1 = Period(date(2007, 1, 1), freq="M")
|
| 298 |
+
i2 = Period(datetime(2007, 1, 1), freq="M")
|
| 299 |
+
i3 = Period(np.datetime64("2007-01-01"), freq="M")
|
| 300 |
+
i4 = Period("2007-01-01 00:00:00", freq="M")
|
| 301 |
+
i5 = Period("2007-01-01 00:00:00.000", freq="M")
|
| 302 |
+
assert i1 == i2
|
| 303 |
+
assert i1 == i3
|
| 304 |
+
assert i1 == i4
|
| 305 |
+
assert i1 == i5
|
| 306 |
+
|
| 307 |
+
i1 = Period("2007-01-01 09:00:00.001")
|
| 308 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1000), freq="ms")
|
| 309 |
+
assert i1 == expected
|
| 310 |
+
|
| 311 |
+
expected = Period("2007-01-01 09:00:00.001", freq="ms")
|
| 312 |
+
assert i1 == expected
|
| 313 |
+
|
| 314 |
+
i1 = Period("2007-01-01 09:00:00.00101")
|
| 315 |
+
expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1010), freq="us")
|
| 316 |
+
assert i1 == expected
|
| 317 |
+
|
| 318 |
+
expected = Period("2007-01-01 09:00:00.00101", freq="us")
|
| 319 |
+
assert i1 == expected
|
| 320 |
+
|
| 321 |
+
def test_invalid_arguments(self):
|
| 322 |
+
msg = "Must supply freq for datetime value"
|
| 323 |
+
with pytest.raises(ValueError, match=msg):
|
| 324 |
+
Period(datetime.now())
|
| 325 |
+
with pytest.raises(ValueError, match=msg):
|
| 326 |
+
Period(datetime.now().date())
|
| 327 |
+
|
| 328 |
+
msg = "Value must be Period, string, integer, or datetime"
|
| 329 |
+
with pytest.raises(ValueError, match=msg):
|
| 330 |
+
Period(1.6, freq="D")
|
| 331 |
+
msg = "Ordinal must be an integer"
|
| 332 |
+
with pytest.raises(ValueError, match=msg):
|
| 333 |
+
Period(ordinal=1.6, freq="D")
|
| 334 |
+
msg = "Only value or ordinal but not both should be given but not both"
|
| 335 |
+
with pytest.raises(ValueError, match=msg):
|
| 336 |
+
Period(ordinal=2, value=1, freq="D")
|
| 337 |
+
|
| 338 |
+
msg = "If value is None, freq cannot be None"
|
| 339 |
+
with pytest.raises(ValueError, match=msg):
|
| 340 |
+
Period(month=1)
|
| 341 |
+
|
| 342 |
+
msg = '^Given date string "-2000" not likely a datetime$'
|
| 343 |
+
with pytest.raises(ValueError, match=msg):
|
| 344 |
+
Period("-2000", "Y")
|
| 345 |
+
msg = "day is out of range for month"
|
| 346 |
+
with pytest.raises(DateParseError, match=msg):
|
| 347 |
+
Period("0", "Y")
|
| 348 |
+
msg = "Unknown datetime string format, unable to parse"
|
| 349 |
+
with pytest.raises(DateParseError, match=msg):
|
| 350 |
+
Period("1/1/-2000", "Y")
|
| 351 |
+
|
| 352 |
+
def test_constructor_corner(self):
|
| 353 |
+
expected = Period("2007-01", freq="2M")
|
| 354 |
+
assert Period(year=2007, month=1, freq="2M") == expected
|
| 355 |
+
|
| 356 |
+
assert Period(None) is NaT
|
| 357 |
+
|
| 358 |
+
p = Period("2007-01-01", freq="D")
|
| 359 |
+
|
| 360 |
+
result = Period(p, freq="Y")
|
| 361 |
+
exp = Period("2007", freq="Y")
|
| 362 |
+
assert result == exp
|
| 363 |
+
|
| 364 |
+
def test_constructor_infer_freq(self):
|
| 365 |
+
p = Period("2007-01-01")
|
| 366 |
+
assert p.freq == "D"
|
| 367 |
+
|
| 368 |
+
p = Period("2007-01-01 07")
|
| 369 |
+
assert p.freq == "h"
|
| 370 |
+
|
| 371 |
+
p = Period("2007-01-01 07:10")
|
| 372 |
+
assert p.freq == "min"
|
| 373 |
+
|
| 374 |
+
p = Period("2007-01-01 07:10:15")
|
| 375 |
+
assert p.freq == "s"
|
| 376 |
+
|
| 377 |
+
p = Period("2007-01-01 07:10:15.123")
|
| 378 |
+
assert p.freq == "ms"
|
| 379 |
+
|
| 380 |
+
# We see that there are 6 digits after the decimal, so get microsecond
|
| 381 |
+
# even though they are all zeros.
|
| 382 |
+
p = Period("2007-01-01 07:10:15.123000")
|
| 383 |
+
assert p.freq == "us"
|
| 384 |
+
|
| 385 |
+
p = Period("2007-01-01 07:10:15.123400")
|
| 386 |
+
assert p.freq == "us"
|
| 387 |
+
|
| 388 |
+
def test_multiples(self):
|
| 389 |
+
result1 = Period("1989", freq="2Y")
|
| 390 |
+
result2 = Period("1989", freq="Y")
|
| 391 |
+
assert result1.ordinal == result2.ordinal
|
| 392 |
+
assert result1.freqstr == "2Y-DEC"
|
| 393 |
+
assert result2.freqstr == "Y-DEC"
|
| 394 |
+
assert result1.freq == offsets.YearEnd(2)
|
| 395 |
+
assert result2.freq == offsets.YearEnd()
|
| 396 |
+
|
| 397 |
+
assert (result1 + 1).ordinal == result1.ordinal + 2
|
| 398 |
+
assert (1 + result1).ordinal == result1.ordinal + 2
|
| 399 |
+
assert (result1 - 1).ordinal == result2.ordinal - 2
|
| 400 |
+
assert (-1 + result1).ordinal == result2.ordinal - 2
|
| 401 |
+
|
| 402 |
+
@pytest.mark.parametrize("month", MONTHS)
|
| 403 |
+
def test_period_cons_quarterly(self, month):
|
| 404 |
+
# bugs in scikits.timeseries
|
| 405 |
+
freq = f"Q-{month}"
|
| 406 |
+
exp = Period("1989Q3", freq=freq)
|
| 407 |
+
assert "1989Q3" in str(exp)
|
| 408 |
+
stamp = exp.to_timestamp("D", how="end")
|
| 409 |
+
p = Period(stamp, freq=freq)
|
| 410 |
+
assert p == exp
|
| 411 |
+
|
| 412 |
+
stamp = exp.to_timestamp("3D", how="end")
|
| 413 |
+
p = Period(stamp, freq=freq)
|
| 414 |
+
assert p == exp
|
| 415 |
+
|
| 416 |
+
@pytest.mark.parametrize("month", MONTHS)
|
| 417 |
+
def test_period_cons_annual(self, month):
|
| 418 |
+
# bugs in scikits.timeseries
|
| 419 |
+
freq = f"Y-{month}"
|
| 420 |
+
exp = Period("1989", freq=freq)
|
| 421 |
+
stamp = exp.to_timestamp("D", how="end") + timedelta(days=30)
|
| 422 |
+
p = Period(stamp, freq=freq)
|
| 423 |
+
|
| 424 |
+
assert p == exp + 1
|
| 425 |
+
assert isinstance(p, Period)
|
| 426 |
+
|
| 427 |
+
@pytest.mark.parametrize("day", DAYS)
|
| 428 |
+
@pytest.mark.parametrize("num", range(10, 17))
|
| 429 |
+
def test_period_cons_weekly(self, num, day):
|
| 430 |
+
daystr = f"2011-02-{num}"
|
| 431 |
+
freq = f"W-{day}"
|
| 432 |
+
|
| 433 |
+
result = Period(daystr, freq=freq)
|
| 434 |
+
expected = Period(daystr, freq="D").asfreq(freq)
|
| 435 |
+
assert result == expected
|
| 436 |
+
assert isinstance(result, Period)
|
| 437 |
+
|
| 438 |
+
def test_parse_week_str_roundstrip(self):
|
| 439 |
+
# GH#50803
|
| 440 |
+
per = Period("2017-01-23/2017-01-29")
|
| 441 |
+
assert per.freq.freqstr == "W-SUN"
|
| 442 |
+
|
| 443 |
+
per = Period("2017-01-24/2017-01-30")
|
| 444 |
+
assert per.freq.freqstr == "W-MON"
|
| 445 |
+
|
| 446 |
+
msg = "Could not parse as weekly-freq Period"
|
| 447 |
+
with pytest.raises(ValueError, match=msg):
|
| 448 |
+
# not 6 days apart
|
| 449 |
+
Period("2016-01-23/2017-01-29")
|
| 450 |
+
|
| 451 |
+
def test_period_from_ordinal(self):
|
| 452 |
+
p = Period("2011-01", freq="M")
|
| 453 |
+
res = Period._from_ordinal(p.ordinal, freq=p.freq)
|
| 454 |
+
assert p == res
|
| 455 |
+
assert isinstance(res, Period)
|
| 456 |
+
|
| 457 |
+
@pytest.mark.parametrize("freq", ["Y", "M", "D", "h"])
|
| 458 |
+
def test_construct_from_nat_string_and_freq(self, freq):
|
| 459 |
+
per = Period("NaT", freq=freq)
|
| 460 |
+
assert per is NaT
|
| 461 |
+
|
| 462 |
+
per = Period("NaT", freq="2" + freq)
|
| 463 |
+
assert per is NaT
|
| 464 |
+
|
| 465 |
+
per = Period("NaT", freq="3" + freq)
|
| 466 |
+
assert per is NaT
|
| 467 |
+
|
| 468 |
+
def test_period_cons_nat(self):
|
| 469 |
+
p = Period("nat", freq="W-SUN")
|
| 470 |
+
assert p is NaT
|
| 471 |
+
|
| 472 |
+
p = Period(iNaT, freq="D")
|
| 473 |
+
assert p is NaT
|
| 474 |
+
|
| 475 |
+
p = Period(iNaT, freq="3D")
|
| 476 |
+
assert p is NaT
|
| 477 |
+
|
| 478 |
+
p = Period(iNaT, freq="1D1h")
|
| 479 |
+
assert p is NaT
|
| 480 |
+
|
| 481 |
+
p = Period("NaT")
|
| 482 |
+
assert p is NaT
|
| 483 |
+
|
| 484 |
+
p = Period(iNaT)
|
| 485 |
+
assert p is NaT
|
| 486 |
+
|
| 487 |
+
def test_period_cons_mult(self):
|
| 488 |
+
p1 = Period("2011-01", freq="3M")
|
| 489 |
+
p2 = Period("2011-01", freq="M")
|
| 490 |
+
assert p1.ordinal == p2.ordinal
|
| 491 |
+
|
| 492 |
+
assert p1.freq == offsets.MonthEnd(3)
|
| 493 |
+
assert p1.freqstr == "3M"
|
| 494 |
+
|
| 495 |
+
assert p2.freq == offsets.MonthEnd()
|
| 496 |
+
assert p2.freqstr == "M"
|
| 497 |
+
|
| 498 |
+
result = p1 + 1
|
| 499 |
+
assert result.ordinal == (p2 + 3).ordinal
|
| 500 |
+
|
| 501 |
+
assert result.freq == p1.freq
|
| 502 |
+
assert result.freqstr == "3M"
|
| 503 |
+
|
| 504 |
+
result = p1 - 1
|
| 505 |
+
assert result.ordinal == (p2 - 3).ordinal
|
| 506 |
+
assert result.freq == p1.freq
|
| 507 |
+
assert result.freqstr == "3M"
|
| 508 |
+
|
| 509 |
+
msg = "Frequency must be positive, because it represents span: -3M"
|
| 510 |
+
with pytest.raises(ValueError, match=msg):
|
| 511 |
+
Period("2011-01", freq="-3M")
|
| 512 |
+
|
| 513 |
+
msg = "Frequency must be positive, because it represents span: 0M"
|
| 514 |
+
with pytest.raises(ValueError, match=msg):
|
| 515 |
+
Period("2011-01", freq="0M")
|
| 516 |
+
|
| 517 |
+
def test_period_cons_combined(self):
|
| 518 |
+
p = [
|
| 519 |
+
(
|
| 520 |
+
Period("2011-01", freq="1D1h"),
|
| 521 |
+
Period("2011-01", freq="1h1D"),
|
| 522 |
+
Period("2011-01", freq="h"),
|
| 523 |
+
),
|
| 524 |
+
(
|
| 525 |
+
Period(ordinal=1, freq="1D1h"),
|
| 526 |
+
Period(ordinal=1, freq="1h1D"),
|
| 527 |
+
Period(ordinal=1, freq="h"),
|
| 528 |
+
),
|
| 529 |
+
]
|
| 530 |
+
|
| 531 |
+
for p1, p2, p3 in p:
|
| 532 |
+
assert p1.ordinal == p3.ordinal
|
| 533 |
+
assert p2.ordinal == p3.ordinal
|
| 534 |
+
|
| 535 |
+
assert p1.freq == offsets.Hour(25)
|
| 536 |
+
assert p1.freqstr == "25h"
|
| 537 |
+
|
| 538 |
+
assert p2.freq == offsets.Hour(25)
|
| 539 |
+
assert p2.freqstr == "25h"
|
| 540 |
+
|
| 541 |
+
assert p3.freq == offsets.Hour()
|
| 542 |
+
assert p3.freqstr == "h"
|
| 543 |
+
|
| 544 |
+
result = p1 + 1
|
| 545 |
+
assert result.ordinal == (p3 + 25).ordinal
|
| 546 |
+
assert result.freq == p1.freq
|
| 547 |
+
assert result.freqstr == "25h"
|
| 548 |
+
|
| 549 |
+
result = p2 + 1
|
| 550 |
+
assert result.ordinal == (p3 + 25).ordinal
|
| 551 |
+
assert result.freq == p2.freq
|
| 552 |
+
assert result.freqstr == "25h"
|
| 553 |
+
|
| 554 |
+
result = p1 - 1
|
| 555 |
+
assert result.ordinal == (p3 - 25).ordinal
|
| 556 |
+
assert result.freq == p1.freq
|
| 557 |
+
assert result.freqstr == "25h"
|
| 558 |
+
|
| 559 |
+
result = p2 - 1
|
| 560 |
+
assert result.ordinal == (p3 - 25).ordinal
|
| 561 |
+
assert result.freq == p2.freq
|
| 562 |
+
assert result.freqstr == "25h"
|
| 563 |
+
|
| 564 |
+
msg = "Frequency must be positive, because it represents span: -25h"
|
| 565 |
+
with pytest.raises(ValueError, match=msg):
|
| 566 |
+
Period("2011-01", freq="-1D1h")
|
| 567 |
+
with pytest.raises(ValueError, match=msg):
|
| 568 |
+
Period("2011-01", freq="-1h1D")
|
| 569 |
+
with pytest.raises(ValueError, match=msg):
|
| 570 |
+
Period(ordinal=1, freq="-1D1h")
|
| 571 |
+
with pytest.raises(ValueError, match=msg):
|
| 572 |
+
Period(ordinal=1, freq="-1h1D")
|
| 573 |
+
|
| 574 |
+
msg = "Frequency must be positive, because it represents span: 0D"
|
| 575 |
+
with pytest.raises(ValueError, match=msg):
|
| 576 |
+
Period("2011-01", freq="0D0h")
|
| 577 |
+
with pytest.raises(ValueError, match=msg):
|
| 578 |
+
Period(ordinal=1, freq="0D0h")
|
| 579 |
+
|
| 580 |
+
# You can only combine together day and intraday offsets
|
| 581 |
+
msg = "Invalid frequency: 1W1D"
|
| 582 |
+
with pytest.raises(ValueError, match=msg):
|
| 583 |
+
Period("2011-01", freq="1W1D")
|
| 584 |
+
msg = "Invalid frequency: 1D1W"
|
| 585 |
+
with pytest.raises(ValueError, match=msg):
|
| 586 |
+
Period("2011-01", freq="1D1W")
|
| 587 |
+
|
| 588 |
+
@pytest.mark.parametrize("day", ["1970/01/01 ", "2020-12-31 ", "1981/09/13 "])
|
| 589 |
+
@pytest.mark.parametrize("hour", ["00:00:00", "00:00:01", "23:59:59", "12:00:59"])
|
| 590 |
+
@pytest.mark.parametrize(
|
| 591 |
+
"sec_float, expected",
|
| 592 |
+
[
|
| 593 |
+
(".000000001", 1),
|
| 594 |
+
(".000000999", 999),
|
| 595 |
+
(".123456789", 789),
|
| 596 |
+
(".999999999", 999),
|
| 597 |
+
(".999999000", 0),
|
| 598 |
+
# Test femtoseconds, attoseconds, picoseconds are dropped like Timestamp
|
| 599 |
+
(".999999001123", 1),
|
| 600 |
+
(".999999001123456", 1),
|
| 601 |
+
(".999999001123456789", 1),
|
| 602 |
+
],
|
| 603 |
+
)
|
| 604 |
+
def test_period_constructor_nanosecond(self, day, hour, sec_float, expected):
|
| 605 |
+
# GH 34621
|
| 606 |
+
|
| 607 |
+
assert Period(day + hour + sec_float).start_time.nanosecond == expected
|
| 608 |
+
|
| 609 |
+
@pytest.mark.parametrize("hour", range(24))
|
| 610 |
+
def test_period_large_ordinal(self, hour):
|
| 611 |
+
# Issue #36430
|
| 612 |
+
# Integer overflow for Period over the maximum timestamp
|
| 613 |
+
p = Period(ordinal=2562048 + hour, freq="1h")
|
| 614 |
+
assert p.hour == hour
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
class TestPeriodMethods:
|
| 618 |
+
def test_round_trip(self):
|
| 619 |
+
p = Period("2000Q1")
|
| 620 |
+
new_p = tm.round_trip_pickle(p)
|
| 621 |
+
assert new_p == p
|
| 622 |
+
|
| 623 |
+
def test_hash(self):
|
| 624 |
+
assert hash(Period("2011-01", freq="M")) == hash(Period("2011-01", freq="M"))
|
| 625 |
+
|
| 626 |
+
assert hash(Period("2011-01-01", freq="D")) != hash(Period("2011-01", freq="M"))
|
| 627 |
+
|
| 628 |
+
assert hash(Period("2011-01", freq="3M")) != hash(Period("2011-01", freq="2M"))
|
| 629 |
+
|
| 630 |
+
assert hash(Period("2011-01", freq="M")) != hash(Period("2011-02", freq="M"))
|
| 631 |
+
|
| 632 |
+
# --------------------------------------------------------------
|
| 633 |
+
# to_timestamp
|
| 634 |
+
|
| 635 |
+
def test_to_timestamp_mult(self):
|
| 636 |
+
p = Period("2011-01", freq="M")
|
| 637 |
+
assert p.to_timestamp(how="S") == Timestamp("2011-01-01")
|
| 638 |
+
expected = Timestamp("2011-02-01") - Timedelta(1, "ns")
|
| 639 |
+
assert p.to_timestamp(how="E") == expected
|
| 640 |
+
|
| 641 |
+
p = Period("2011-01", freq="3M")
|
| 642 |
+
assert p.to_timestamp(how="S") == Timestamp("2011-01-01")
|
| 643 |
+
expected = Timestamp("2011-04-01") - Timedelta(1, "ns")
|
| 644 |
+
assert p.to_timestamp(how="E") == expected
|
| 645 |
+
|
| 646 |
+
@pytest.mark.filterwarnings(
|
| 647 |
+
"ignore:Period with BDay freq is deprecated:FutureWarning"
|
| 648 |
+
)
|
| 649 |
+
def test_to_timestamp(self):
|
| 650 |
+
p = Period("1982", freq="Y")
|
| 651 |
+
start_ts = p.to_timestamp(how="S")
|
| 652 |
+
aliases = ["s", "StarT", "BEGIn"]
|
| 653 |
+
for a in aliases:
|
| 654 |
+
assert start_ts == p.to_timestamp("D", how=a)
|
| 655 |
+
# freq with mult should not affect to the result
|
| 656 |
+
assert start_ts == p.to_timestamp("3D", how=a)
|
| 657 |
+
|
| 658 |
+
end_ts = p.to_timestamp(how="E")
|
| 659 |
+
aliases = ["e", "end", "FINIsH"]
|
| 660 |
+
for a in aliases:
|
| 661 |
+
assert end_ts == p.to_timestamp("D", how=a)
|
| 662 |
+
assert end_ts == p.to_timestamp("3D", how=a)
|
| 663 |
+
|
| 664 |
+
from_lst = ["Y", "Q", "M", "W", "B", "D", "h", "Min", "s"]
|
| 665 |
+
|
| 666 |
+
def _ex(p):
|
| 667 |
+
if p.freq == "B":
|
| 668 |
+
return p.start_time + Timedelta(days=1, nanoseconds=-1)
|
| 669 |
+
return Timestamp((p + p.freq).start_time._value - 1)
|
| 670 |
+
|
| 671 |
+
for fcode in from_lst:
|
| 672 |
+
p = Period("1982", freq=fcode)
|
| 673 |
+
result = p.to_timestamp().to_period(fcode)
|
| 674 |
+
assert result == p
|
| 675 |
+
|
| 676 |
+
assert p.start_time == p.to_timestamp(how="S")
|
| 677 |
+
|
| 678 |
+
assert p.end_time == _ex(p)
|
| 679 |
+
|
| 680 |
+
# Frequency other than daily
|
| 681 |
+
|
| 682 |
+
p = Period("1985", freq="Y")
|
| 683 |
+
|
| 684 |
+
result = p.to_timestamp("h", how="end")
|
| 685 |
+
expected = Timestamp(1986, 1, 1) - Timedelta(1, "ns")
|
| 686 |
+
assert result == expected
|
| 687 |
+
result = p.to_timestamp("3h", how="end")
|
| 688 |
+
assert result == expected
|
| 689 |
+
|
| 690 |
+
result = p.to_timestamp("min", how="end")
|
| 691 |
+
expected = Timestamp(1986, 1, 1) - Timedelta(1, "ns")
|
| 692 |
+
assert result == expected
|
| 693 |
+
result = p.to_timestamp("2min", how="end")
|
| 694 |
+
assert result == expected
|
| 695 |
+
|
| 696 |
+
result = p.to_timestamp(how="end")
|
| 697 |
+
expected = Timestamp(1986, 1, 1) - Timedelta(1, "ns")
|
| 698 |
+
assert result == expected
|
| 699 |
+
|
| 700 |
+
expected = datetime(1985, 1, 1)
|
| 701 |
+
result = p.to_timestamp("h", how="start")
|
| 702 |
+
assert result == expected
|
| 703 |
+
result = p.to_timestamp("min", how="start")
|
| 704 |
+
assert result == expected
|
| 705 |
+
result = p.to_timestamp("s", how="start")
|
| 706 |
+
assert result == expected
|
| 707 |
+
result = p.to_timestamp("3h", how="start")
|
| 708 |
+
assert result == expected
|
| 709 |
+
result = p.to_timestamp("5s", how="start")
|
| 710 |
+
assert result == expected
|
| 711 |
+
|
| 712 |
+
def test_to_timestamp_business_end(self):
|
| 713 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 714 |
+
per = Period("1990-01-05", "B") # Friday
|
| 715 |
+
result = per.to_timestamp("B", how="E")
|
| 716 |
+
|
| 717 |
+
expected = Timestamp("1990-01-06") - Timedelta(nanoseconds=1)
|
| 718 |
+
assert result == expected
|
| 719 |
+
|
| 720 |
+
@pytest.mark.parametrize(
|
| 721 |
+
"ts, expected",
|
| 722 |
+
[
|
| 723 |
+
("1970-01-01 00:00:00", 0),
|
| 724 |
+
("1970-01-01 00:00:00.000001", 1),
|
| 725 |
+
("1970-01-01 00:00:00.00001", 10),
|
| 726 |
+
("1970-01-01 00:00:00.499", 499000),
|
| 727 |
+
("1999-12-31 23:59:59.999", 999000),
|
| 728 |
+
("1999-12-31 23:59:59.999999", 999999),
|
| 729 |
+
("2050-12-31 23:59:59.5", 500000),
|
| 730 |
+
("2050-12-31 23:59:59.500001", 500001),
|
| 731 |
+
("2050-12-31 23:59:59.123456", 123456),
|
| 732 |
+
],
|
| 733 |
+
)
|
| 734 |
+
@pytest.mark.parametrize("freq", [None, "us", "ns"])
|
| 735 |
+
def test_to_timestamp_microsecond(self, ts, expected, freq):
|
| 736 |
+
# GH 24444
|
| 737 |
+
result = Period(ts).to_timestamp(freq=freq).microsecond
|
| 738 |
+
assert result == expected
|
| 739 |
+
|
| 740 |
+
# --------------------------------------------------------------
|
| 741 |
+
# Rendering: __repr__, strftime, etc
|
| 742 |
+
|
| 743 |
+
@pytest.mark.parametrize(
|
| 744 |
+
"str_ts,freq,str_res,str_freq",
|
| 745 |
+
(
|
| 746 |
+
("Jan-2000", None, "2000-01", "M"),
|
| 747 |
+
("2000-12-15", None, "2000-12-15", "D"),
|
| 748 |
+
(
|
| 749 |
+
"2000-12-15 13:45:26.123456789",
|
| 750 |
+
"ns",
|
| 751 |
+
"2000-12-15 13:45:26.123456789",
|
| 752 |
+
"ns",
|
| 753 |
+
),
|
| 754 |
+
("2000-12-15 13:45:26.123456789", "us", "2000-12-15 13:45:26.123456", "us"),
|
| 755 |
+
("2000-12-15 13:45:26.123456", None, "2000-12-15 13:45:26.123456", "us"),
|
| 756 |
+
("2000-12-15 13:45:26.123456789", "ms", "2000-12-15 13:45:26.123", "ms"),
|
| 757 |
+
("2000-12-15 13:45:26.123", None, "2000-12-15 13:45:26.123", "ms"),
|
| 758 |
+
("2000-12-15 13:45:26", "s", "2000-12-15 13:45:26", "s"),
|
| 759 |
+
("2000-12-15 13:45:26", "min", "2000-12-15 13:45", "min"),
|
| 760 |
+
("2000-12-15 13:45:26", "h", "2000-12-15 13:00", "h"),
|
| 761 |
+
("2000-12-15", "Y", "2000", "Y-DEC"),
|
| 762 |
+
("2000-12-15", "Q", "2000Q4", "Q-DEC"),
|
| 763 |
+
("2000-12-15", "M", "2000-12", "M"),
|
| 764 |
+
("2000-12-15", "W", "2000-12-11/2000-12-17", "W-SUN"),
|
| 765 |
+
("2000-12-15", "D", "2000-12-15", "D"),
|
| 766 |
+
("2000-12-15", "B", "2000-12-15", "B"),
|
| 767 |
+
),
|
| 768 |
+
)
|
| 769 |
+
@pytest.mark.filterwarnings(
|
| 770 |
+
"ignore:Period with BDay freq is deprecated:FutureWarning"
|
| 771 |
+
)
|
| 772 |
+
def test_repr(self, str_ts, freq, str_res, str_freq):
|
| 773 |
+
p = Period(str_ts, freq=freq)
|
| 774 |
+
assert str(p) == str_res
|
| 775 |
+
assert repr(p) == f"Period('{str_res}', '{str_freq}')"
|
| 776 |
+
|
| 777 |
+
def test_repr_nat(self):
|
| 778 |
+
p = Period("nat", freq="M")
|
| 779 |
+
assert repr(NaT) in repr(p)
|
| 780 |
+
|
| 781 |
+
def test_strftime(self):
|
| 782 |
+
# GH#3363
|
| 783 |
+
p = Period("2000-1-1 12:34:12", freq="s")
|
| 784 |
+
res = p.strftime("%Y-%m-%d %H:%M:%S")
|
| 785 |
+
assert res == "2000-01-01 12:34:12"
|
| 786 |
+
assert isinstance(res, str)
|
| 787 |
+
|
| 788 |
+
|
| 789 |
+
class TestPeriodProperties:
|
| 790 |
+
"""Test properties such as year, month, weekday, etc...."""
|
| 791 |
+
|
| 792 |
+
@pytest.mark.parametrize("freq", ["Y", "M", "D", "h"])
|
| 793 |
+
def test_is_leap_year(self, freq):
|
| 794 |
+
# GH 13727
|
| 795 |
+
p = Period("2000-01-01 00:00:00", freq=freq)
|
| 796 |
+
assert p.is_leap_year
|
| 797 |
+
assert isinstance(p.is_leap_year, bool)
|
| 798 |
+
|
| 799 |
+
p = Period("1999-01-01 00:00:00", freq=freq)
|
| 800 |
+
assert not p.is_leap_year
|
| 801 |
+
|
| 802 |
+
p = Period("2004-01-01 00:00:00", freq=freq)
|
| 803 |
+
assert p.is_leap_year
|
| 804 |
+
|
| 805 |
+
p = Period("2100-01-01 00:00:00", freq=freq)
|
| 806 |
+
assert not p.is_leap_year
|
| 807 |
+
|
| 808 |
+
def test_quarterly_negative_ordinals(self):
|
| 809 |
+
p = Period(ordinal=-1, freq="Q-DEC")
|
| 810 |
+
assert p.year == 1969
|
| 811 |
+
assert p.quarter == 4
|
| 812 |
+
assert isinstance(p, Period)
|
| 813 |
+
|
| 814 |
+
p = Period(ordinal=-2, freq="Q-DEC")
|
| 815 |
+
assert p.year == 1969
|
| 816 |
+
assert p.quarter == 3
|
| 817 |
+
assert isinstance(p, Period)
|
| 818 |
+
|
| 819 |
+
p = Period(ordinal=-2, freq="M")
|
| 820 |
+
assert p.year == 1969
|
| 821 |
+
assert p.month == 11
|
| 822 |
+
assert isinstance(p, Period)
|
| 823 |
+
|
| 824 |
+
def test_freq_str(self):
|
| 825 |
+
i1 = Period("1982", freq="Min")
|
| 826 |
+
assert i1.freq == offsets.Minute()
|
| 827 |
+
assert i1.freqstr == "min"
|
| 828 |
+
|
| 829 |
+
@pytest.mark.filterwarnings(
|
| 830 |
+
"ignore:Period with BDay freq is deprecated:FutureWarning"
|
| 831 |
+
)
|
| 832 |
+
def test_period_deprecated_freq(self):
|
| 833 |
+
cases = {
|
| 834 |
+
"M": ["MTH", "MONTH", "MONTHLY", "Mth", "month", "monthly"],
|
| 835 |
+
"B": ["BUS", "BUSINESS", "BUSINESSLY", "WEEKDAY", "bus"],
|
| 836 |
+
"D": ["DAY", "DLY", "DAILY", "Day", "Dly", "Daily"],
|
| 837 |
+
"h": ["HR", "HOUR", "HRLY", "HOURLY", "hr", "Hour", "HRly"],
|
| 838 |
+
"min": ["minute", "MINUTE", "MINUTELY", "minutely"],
|
| 839 |
+
"s": ["sec", "SEC", "SECOND", "SECONDLY", "second"],
|
| 840 |
+
"ms": ["MILLISECOND", "MILLISECONDLY", "millisecond"],
|
| 841 |
+
"us": ["MICROSECOND", "MICROSECONDLY", "microsecond"],
|
| 842 |
+
"ns": ["NANOSECOND", "NANOSECONDLY", "nanosecond"],
|
| 843 |
+
}
|
| 844 |
+
|
| 845 |
+
msg = INVALID_FREQ_ERR_MSG
|
| 846 |
+
for exp, freqs in cases.items():
|
| 847 |
+
for freq in freqs:
|
| 848 |
+
with pytest.raises(ValueError, match=msg):
|
| 849 |
+
Period("2016-03-01 09:00", freq=freq)
|
| 850 |
+
with pytest.raises(ValueError, match=msg):
|
| 851 |
+
Period(ordinal=1, freq=freq)
|
| 852 |
+
|
| 853 |
+
# check supported freq-aliases still works
|
| 854 |
+
p1 = Period("2016-03-01 09:00", freq=exp)
|
| 855 |
+
p2 = Period(ordinal=1, freq=exp)
|
| 856 |
+
assert isinstance(p1, Period)
|
| 857 |
+
assert isinstance(p2, Period)
|
| 858 |
+
|
| 859 |
+
@staticmethod
|
| 860 |
+
def _period_constructor(bound, offset):
|
| 861 |
+
return Period(
|
| 862 |
+
year=bound.year,
|
| 863 |
+
month=bound.month,
|
| 864 |
+
day=bound.day,
|
| 865 |
+
hour=bound.hour,
|
| 866 |
+
minute=bound.minute,
|
| 867 |
+
second=bound.second + offset,
|
| 868 |
+
freq="us",
|
| 869 |
+
)
|
| 870 |
+
|
| 871 |
+
@pytest.mark.parametrize("bound, offset", [(Timestamp.min, -1), (Timestamp.max, 1)])
|
| 872 |
+
@pytest.mark.parametrize("period_property", ["start_time", "end_time"])
|
| 873 |
+
def test_outer_bounds_start_and_end_time(self, bound, offset, period_property):
|
| 874 |
+
# GH #13346
|
| 875 |
+
period = TestPeriodProperties._period_constructor(bound, offset)
|
| 876 |
+
with pytest.raises(OutOfBoundsDatetime, match="Out of bounds nanosecond"):
|
| 877 |
+
getattr(period, period_property)
|
| 878 |
+
|
| 879 |
+
@pytest.mark.parametrize("bound, offset", [(Timestamp.min, -1), (Timestamp.max, 1)])
|
| 880 |
+
@pytest.mark.parametrize("period_property", ["start_time", "end_time"])
|
| 881 |
+
def test_inner_bounds_start_and_end_time(self, bound, offset, period_property):
|
| 882 |
+
# GH #13346
|
| 883 |
+
period = TestPeriodProperties._period_constructor(bound, -offset)
|
| 884 |
+
expected = period.to_timestamp().round(freq="s")
|
| 885 |
+
assert getattr(period, period_property).round(freq="s") == expected
|
| 886 |
+
expected = (bound - offset * Timedelta(1, unit="s")).floor("s")
|
| 887 |
+
assert getattr(period, period_property).floor("s") == expected
|
| 888 |
+
|
| 889 |
+
def test_start_time(self):
|
| 890 |
+
freq_lst = ["Y", "Q", "M", "D", "h", "min", "s"]
|
| 891 |
+
xp = datetime(2012, 1, 1)
|
| 892 |
+
for f in freq_lst:
|
| 893 |
+
p = Period("2012", freq=f)
|
| 894 |
+
assert p.start_time == xp
|
| 895 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 896 |
+
assert Period("2012", freq="B").start_time == datetime(2012, 1, 2)
|
| 897 |
+
assert Period("2012", freq="W").start_time == datetime(2011, 12, 26)
|
| 898 |
+
|
| 899 |
+
def test_end_time(self):
|
| 900 |
+
p = Period("2012", freq="Y")
|
| 901 |
+
|
| 902 |
+
def _ex(*args):
|
| 903 |
+
return Timestamp(Timestamp(datetime(*args)).as_unit("ns")._value - 1)
|
| 904 |
+
|
| 905 |
+
xp = _ex(2013, 1, 1)
|
| 906 |
+
assert xp == p.end_time
|
| 907 |
+
|
| 908 |
+
p = Period("2012", freq="Q")
|
| 909 |
+
xp = _ex(2012, 4, 1)
|
| 910 |
+
assert xp == p.end_time
|
| 911 |
+
|
| 912 |
+
p = Period("2012", freq="M")
|
| 913 |
+
xp = _ex(2012, 2, 1)
|
| 914 |
+
assert xp == p.end_time
|
| 915 |
+
|
| 916 |
+
p = Period("2012", freq="D")
|
| 917 |
+
xp = _ex(2012, 1, 2)
|
| 918 |
+
assert xp == p.end_time
|
| 919 |
+
|
| 920 |
+
p = Period("2012", freq="h")
|
| 921 |
+
xp = _ex(2012, 1, 1, 1)
|
| 922 |
+
assert xp == p.end_time
|
| 923 |
+
|
| 924 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 925 |
+
p = Period("2012", freq="B")
|
| 926 |
+
xp = _ex(2012, 1, 3)
|
| 927 |
+
assert xp == p.end_time
|
| 928 |
+
|
| 929 |
+
p = Period("2012", freq="W")
|
| 930 |
+
xp = _ex(2012, 1, 2)
|
| 931 |
+
assert xp == p.end_time
|
| 932 |
+
|
| 933 |
+
# Test for GH 11738
|
| 934 |
+
p = Period("2012", freq="15D")
|
| 935 |
+
xp = _ex(2012, 1, 16)
|
| 936 |
+
assert xp == p.end_time
|
| 937 |
+
|
| 938 |
+
p = Period("2012", freq="1D1h")
|
| 939 |
+
xp = _ex(2012, 1, 2, 1)
|
| 940 |
+
assert xp == p.end_time
|
| 941 |
+
|
| 942 |
+
p = Period("2012", freq="1h1D")
|
| 943 |
+
xp = _ex(2012, 1, 2, 1)
|
| 944 |
+
assert xp == p.end_time
|
| 945 |
+
|
| 946 |
+
def test_end_time_business_friday(self):
|
| 947 |
+
# GH#34449
|
| 948 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 949 |
+
per = Period("1990-01-05", "B")
|
| 950 |
+
result = per.end_time
|
| 951 |
+
|
| 952 |
+
expected = Timestamp("1990-01-06") - Timedelta(nanoseconds=1)
|
| 953 |
+
assert result == expected
|
| 954 |
+
|
| 955 |
+
def test_anchor_week_end_time(self):
|
| 956 |
+
def _ex(*args):
|
| 957 |
+
return Timestamp(Timestamp(datetime(*args)).as_unit("ns")._value - 1)
|
| 958 |
+
|
| 959 |
+
p = Period("2013-1-1", "W-SAT")
|
| 960 |
+
xp = _ex(2013, 1, 6)
|
| 961 |
+
assert p.end_time == xp
|
| 962 |
+
|
| 963 |
+
def test_properties_annually(self):
|
| 964 |
+
# Test properties on Periods with annually frequency.
|
| 965 |
+
a_date = Period(freq="Y", year=2007)
|
| 966 |
+
assert a_date.year == 2007
|
| 967 |
+
|
| 968 |
+
def test_properties_quarterly(self):
|
| 969 |
+
# Test properties on Periods with daily frequency.
|
| 970 |
+
qedec_date = Period(freq="Q-DEC", year=2007, quarter=1)
|
| 971 |
+
qejan_date = Period(freq="Q-JAN", year=2007, quarter=1)
|
| 972 |
+
qejun_date = Period(freq="Q-JUN", year=2007, quarter=1)
|
| 973 |
+
#
|
| 974 |
+
for x in range(3):
|
| 975 |
+
for qd in (qedec_date, qejan_date, qejun_date):
|
| 976 |
+
assert (qd + x).qyear == 2007
|
| 977 |
+
assert (qd + x).quarter == x + 1
|
| 978 |
+
|
| 979 |
+
def test_properties_monthly(self):
|
| 980 |
+
# Test properties on Periods with daily frequency.
|
| 981 |
+
m_date = Period(freq="M", year=2007, month=1)
|
| 982 |
+
for x in range(11):
|
| 983 |
+
m_ival_x = m_date + x
|
| 984 |
+
assert m_ival_x.year == 2007
|
| 985 |
+
if 1 <= x + 1 <= 3:
|
| 986 |
+
assert m_ival_x.quarter == 1
|
| 987 |
+
elif 4 <= x + 1 <= 6:
|
| 988 |
+
assert m_ival_x.quarter == 2
|
| 989 |
+
elif 7 <= x + 1 <= 9:
|
| 990 |
+
assert m_ival_x.quarter == 3
|
| 991 |
+
elif 10 <= x + 1 <= 12:
|
| 992 |
+
assert m_ival_x.quarter == 4
|
| 993 |
+
assert m_ival_x.month == x + 1
|
| 994 |
+
|
| 995 |
+
def test_properties_weekly(self):
|
| 996 |
+
# Test properties on Periods with daily frequency.
|
| 997 |
+
w_date = Period(freq="W", year=2007, month=1, day=7)
|
| 998 |
+
#
|
| 999 |
+
assert w_date.year == 2007
|
| 1000 |
+
assert w_date.quarter == 1
|
| 1001 |
+
assert w_date.month == 1
|
| 1002 |
+
assert w_date.week == 1
|
| 1003 |
+
assert (w_date - 1).week == 52
|
| 1004 |
+
assert w_date.days_in_month == 31
|
| 1005 |
+
assert Period(freq="W", year=2012, month=2, day=1).days_in_month == 29
|
| 1006 |
+
|
| 1007 |
+
def test_properties_weekly_legacy(self):
|
| 1008 |
+
# Test properties on Periods with daily frequency.
|
| 1009 |
+
w_date = Period(freq="W", year=2007, month=1, day=7)
|
| 1010 |
+
assert w_date.year == 2007
|
| 1011 |
+
assert w_date.quarter == 1
|
| 1012 |
+
assert w_date.month == 1
|
| 1013 |
+
assert w_date.week == 1
|
| 1014 |
+
assert (w_date - 1).week == 52
|
| 1015 |
+
assert w_date.days_in_month == 31
|
| 1016 |
+
|
| 1017 |
+
exp = Period(freq="W", year=2012, month=2, day=1)
|
| 1018 |
+
assert exp.days_in_month == 29
|
| 1019 |
+
|
| 1020 |
+
msg = INVALID_FREQ_ERR_MSG
|
| 1021 |
+
with pytest.raises(ValueError, match=msg):
|
| 1022 |
+
Period(freq="WK", year=2007, month=1, day=7)
|
| 1023 |
+
|
| 1024 |
+
def test_properties_daily(self):
|
| 1025 |
+
# Test properties on Periods with daily frequency.
|
| 1026 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 1027 |
+
b_date = Period(freq="B", year=2007, month=1, day=1)
|
| 1028 |
+
#
|
| 1029 |
+
assert b_date.year == 2007
|
| 1030 |
+
assert b_date.quarter == 1
|
| 1031 |
+
assert b_date.month == 1
|
| 1032 |
+
assert b_date.day == 1
|
| 1033 |
+
assert b_date.weekday == 0
|
| 1034 |
+
assert b_date.dayofyear == 1
|
| 1035 |
+
assert b_date.days_in_month == 31
|
| 1036 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 1037 |
+
assert Period(freq="B", year=2012, month=2, day=1).days_in_month == 29
|
| 1038 |
+
|
| 1039 |
+
d_date = Period(freq="D", year=2007, month=1, day=1)
|
| 1040 |
+
|
| 1041 |
+
assert d_date.year == 2007
|
| 1042 |
+
assert d_date.quarter == 1
|
| 1043 |
+
assert d_date.month == 1
|
| 1044 |
+
assert d_date.day == 1
|
| 1045 |
+
assert d_date.weekday == 0
|
| 1046 |
+
assert d_date.dayofyear == 1
|
| 1047 |
+
assert d_date.days_in_month == 31
|
| 1048 |
+
assert Period(freq="D", year=2012, month=2, day=1).days_in_month == 29
|
| 1049 |
+
|
| 1050 |
+
def test_properties_hourly(self):
|
| 1051 |
+
# Test properties on Periods with hourly frequency.
|
| 1052 |
+
h_date1 = Period(freq="h", year=2007, month=1, day=1, hour=0)
|
| 1053 |
+
h_date2 = Period(freq="2h", year=2007, month=1, day=1, hour=0)
|
| 1054 |
+
|
| 1055 |
+
for h_date in [h_date1, h_date2]:
|
| 1056 |
+
assert h_date.year == 2007
|
| 1057 |
+
assert h_date.quarter == 1
|
| 1058 |
+
assert h_date.month == 1
|
| 1059 |
+
assert h_date.day == 1
|
| 1060 |
+
assert h_date.weekday == 0
|
| 1061 |
+
assert h_date.dayofyear == 1
|
| 1062 |
+
assert h_date.hour == 0
|
| 1063 |
+
assert h_date.days_in_month == 31
|
| 1064 |
+
assert (
|
| 1065 |
+
Period(freq="h", year=2012, month=2, day=1, hour=0).days_in_month == 29
|
| 1066 |
+
)
|
| 1067 |
+
|
| 1068 |
+
def test_properties_minutely(self):
|
| 1069 |
+
# Test properties on Periods with minutely frequency.
|
| 1070 |
+
t_date = Period(freq="Min", year=2007, month=1, day=1, hour=0, minute=0)
|
| 1071 |
+
#
|
| 1072 |
+
assert t_date.quarter == 1
|
| 1073 |
+
assert t_date.month == 1
|
| 1074 |
+
assert t_date.day == 1
|
| 1075 |
+
assert t_date.weekday == 0
|
| 1076 |
+
assert t_date.dayofyear == 1
|
| 1077 |
+
assert t_date.hour == 0
|
| 1078 |
+
assert t_date.minute == 0
|
| 1079 |
+
assert t_date.days_in_month == 31
|
| 1080 |
+
assert (
|
| 1081 |
+
Period(freq="D", year=2012, month=2, day=1, hour=0, minute=0).days_in_month
|
| 1082 |
+
== 29
|
| 1083 |
+
)
|
| 1084 |
+
|
| 1085 |
+
def test_properties_secondly(self):
|
| 1086 |
+
# Test properties on Periods with secondly frequency.
|
| 1087 |
+
s_date = Period(
|
| 1088 |
+
freq="Min", year=2007, month=1, day=1, hour=0, minute=0, second=0
|
| 1089 |
+
)
|
| 1090 |
+
#
|
| 1091 |
+
assert s_date.year == 2007
|
| 1092 |
+
assert s_date.quarter == 1
|
| 1093 |
+
assert s_date.month == 1
|
| 1094 |
+
assert s_date.day == 1
|
| 1095 |
+
assert s_date.weekday == 0
|
| 1096 |
+
assert s_date.dayofyear == 1
|
| 1097 |
+
assert s_date.hour == 0
|
| 1098 |
+
assert s_date.minute == 0
|
| 1099 |
+
assert s_date.second == 0
|
| 1100 |
+
assert s_date.days_in_month == 31
|
| 1101 |
+
assert (
|
| 1102 |
+
Period(
|
| 1103 |
+
freq="Min", year=2012, month=2, day=1, hour=0, minute=0, second=0
|
| 1104 |
+
).days_in_month
|
| 1105 |
+
== 29
|
| 1106 |
+
)
|
| 1107 |
+
|
| 1108 |
+
|
| 1109 |
+
class TestPeriodComparisons:
|
| 1110 |
+
def test_sort_periods(self):
|
| 1111 |
+
jan = Period("2000-01", "M")
|
| 1112 |
+
feb = Period("2000-02", "M")
|
| 1113 |
+
mar = Period("2000-03", "M")
|
| 1114 |
+
periods = [mar, jan, feb]
|
| 1115 |
+
correctPeriods = [jan, feb, mar]
|
| 1116 |
+
assert sorted(periods) == correctPeriods
|
| 1117 |
+
|
| 1118 |
+
|
| 1119 |
+
def test_period_immutable():
|
| 1120 |
+
# see gh-17116
|
| 1121 |
+
msg = "not writable"
|
| 1122 |
+
|
| 1123 |
+
per = Period("2014Q1")
|
| 1124 |
+
with pytest.raises(AttributeError, match=msg):
|
| 1125 |
+
per.ordinal = 14
|
| 1126 |
+
|
| 1127 |
+
freq = per.freq
|
| 1128 |
+
with pytest.raises(AttributeError, match=msg):
|
| 1129 |
+
per.freq = 2 * freq
|
| 1130 |
+
|
| 1131 |
+
|
| 1132 |
+
def test_small_year_parsing():
|
| 1133 |
+
per1 = Period("0001-01-07", "D")
|
| 1134 |
+
assert per1.year == 1
|
| 1135 |
+
assert per1.day == 7
|
| 1136 |
+
|
| 1137 |
+
|
| 1138 |
+
def test_negone_ordinals():
|
| 1139 |
+
freqs = ["Y", "M", "Q", "D", "h", "min", "s"]
|
| 1140 |
+
|
| 1141 |
+
period = Period(ordinal=-1, freq="D")
|
| 1142 |
+
for freq in freqs:
|
| 1143 |
+
repr(period.asfreq(freq))
|
| 1144 |
+
|
| 1145 |
+
for freq in freqs:
|
| 1146 |
+
period = Period(ordinal=-1, freq=freq)
|
| 1147 |
+
repr(period)
|
| 1148 |
+
assert period.year == 1969
|
| 1149 |
+
|
| 1150 |
+
with tm.assert_produces_warning(FutureWarning, match=bday_msg):
|
| 1151 |
+
period = Period(ordinal=-1, freq="B")
|
| 1152 |
+
repr(period)
|
| 1153 |
+
period = Period(ordinal=-1, freq="W")
|
| 1154 |
+
repr(period)
|
llava_next/lib/python3.10/site-packages/pandas/tests/scalar/timedelta/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (185 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__init__.py
ADDED
|
File without changes
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (188 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_freq_code.cpython-310.pyc
ADDED
|
Binary file (1.91 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_frequencies.cpython-310.pyc
ADDED
|
Binary file (806 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_inference.cpython-310.pyc
ADDED
|
Binary file (13.8 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_freq_code.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
from pandas._libs.tslibs import (
|
| 5 |
+
Period,
|
| 6 |
+
to_offset,
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@pytest.mark.parametrize(
|
| 11 |
+
"freqstr,exp_freqstr",
|
| 12 |
+
[("D", "D"), ("W", "D"), ("ME", "D"), ("s", "s"), ("min", "s"), ("h", "s")],
|
| 13 |
+
)
|
| 14 |
+
def test_get_to_timestamp_base(freqstr, exp_freqstr):
|
| 15 |
+
off = to_offset(freqstr)
|
| 16 |
+
per = Period._from_ordinal(1, off)
|
| 17 |
+
exp_code = to_offset(exp_freqstr)._period_dtype_code
|
| 18 |
+
|
| 19 |
+
result_code = per._dtype._get_to_timestamp_base()
|
| 20 |
+
assert result_code == exp_code
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@pytest.mark.parametrize(
|
| 24 |
+
"args,expected",
|
| 25 |
+
[
|
| 26 |
+
((1.5, "min"), (90, "s")),
|
| 27 |
+
((62.4, "min"), (3744, "s")),
|
| 28 |
+
((1.04, "h"), (3744, "s")),
|
| 29 |
+
((1, "D"), (1, "D")),
|
| 30 |
+
((0.342931, "h"), (1234551600, "us")),
|
| 31 |
+
((1.2345, "D"), (106660800, "ms")),
|
| 32 |
+
],
|
| 33 |
+
)
|
| 34 |
+
def test_resolution_bumping(args, expected):
|
| 35 |
+
# see gh-14378
|
| 36 |
+
off = to_offset(str(args[0]) + args[1])
|
| 37 |
+
assert off.n == expected[0]
|
| 38 |
+
assert off._prefix == expected[1]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@pytest.mark.parametrize(
|
| 42 |
+
"args",
|
| 43 |
+
[
|
| 44 |
+
(0.5, "ns"),
|
| 45 |
+
# Too much precision in the input can prevent.
|
| 46 |
+
(0.3429324798798269273987982, "h"),
|
| 47 |
+
],
|
| 48 |
+
)
|
| 49 |
+
def test_cat(args):
|
| 50 |
+
msg = "Invalid frequency"
|
| 51 |
+
|
| 52 |
+
with pytest.raises(ValueError, match=msg):
|
| 53 |
+
to_offset(str(args[0]) + args[1])
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@pytest.mark.parametrize(
|
| 57 |
+
"freqstr,expected",
|
| 58 |
+
[
|
| 59 |
+
("1h", "2021-01-01T09:00:00"),
|
| 60 |
+
("1D", "2021-01-02T08:00:00"),
|
| 61 |
+
("1W", "2021-01-03T08:00:00"),
|
| 62 |
+
("1ME", "2021-01-31T08:00:00"),
|
| 63 |
+
("1YE", "2021-12-31T08:00:00"),
|
| 64 |
+
],
|
| 65 |
+
)
|
| 66 |
+
def test_compatibility(freqstr, expected):
|
| 67 |
+
ts_np = np.datetime64("2021-01-01T08:00:00.00")
|
| 68 |
+
do = to_offset(freqstr)
|
| 69 |
+
assert ts_np + do == np.datetime64(expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_frequencies.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from pandas._libs.tslibs import offsets
|
| 4 |
+
|
| 5 |
+
from pandas.tseries.frequencies import (
|
| 6 |
+
is_subperiod,
|
| 7 |
+
is_superperiod,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@pytest.mark.parametrize(
|
| 12 |
+
"p1,p2,expected",
|
| 13 |
+
[
|
| 14 |
+
# Input validation.
|
| 15 |
+
(offsets.MonthEnd(), None, False),
|
| 16 |
+
(offsets.YearEnd(), None, False),
|
| 17 |
+
(None, offsets.YearEnd(), False),
|
| 18 |
+
(None, offsets.MonthEnd(), False),
|
| 19 |
+
(None, None, False),
|
| 20 |
+
(offsets.YearEnd(), offsets.MonthEnd(), True),
|
| 21 |
+
(offsets.Hour(), offsets.Minute(), True),
|
| 22 |
+
(offsets.Second(), offsets.Milli(), True),
|
| 23 |
+
(offsets.Milli(), offsets.Micro(), True),
|
| 24 |
+
(offsets.Micro(), offsets.Nano(), True),
|
| 25 |
+
],
|
| 26 |
+
)
|
| 27 |
+
def test_super_sub_symmetry(p1, p2, expected):
|
| 28 |
+
assert is_superperiod(p1, p2) is expected
|
| 29 |
+
assert is_subperiod(p2, p1) is expected
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_inference.py
ADDED
|
@@ -0,0 +1,558 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import (
|
| 2 |
+
datetime,
|
| 3 |
+
timedelta,
|
| 4 |
+
)
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from pandas._libs.tslibs.ccalendar import (
|
| 10 |
+
DAYS,
|
| 11 |
+
MONTHS,
|
| 12 |
+
)
|
| 13 |
+
from pandas._libs.tslibs.offsets import _get_offset
|
| 14 |
+
from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
|
| 15 |
+
from pandas.compat import is_platform_windows
|
| 16 |
+
|
| 17 |
+
from pandas import (
|
| 18 |
+
DatetimeIndex,
|
| 19 |
+
Index,
|
| 20 |
+
RangeIndex,
|
| 21 |
+
Series,
|
| 22 |
+
Timestamp,
|
| 23 |
+
date_range,
|
| 24 |
+
period_range,
|
| 25 |
+
)
|
| 26 |
+
import pandas._testing as tm
|
| 27 |
+
from pandas.core.arrays import (
|
| 28 |
+
DatetimeArray,
|
| 29 |
+
TimedeltaArray,
|
| 30 |
+
)
|
| 31 |
+
from pandas.core.tools.datetimes import to_datetime
|
| 32 |
+
|
| 33 |
+
from pandas.tseries import (
|
| 34 |
+
frequencies,
|
| 35 |
+
offsets,
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@pytest.fixture(
|
| 40 |
+
params=[
|
| 41 |
+
(timedelta(1), "D"),
|
| 42 |
+
(timedelta(hours=1), "h"),
|
| 43 |
+
(timedelta(minutes=1), "min"),
|
| 44 |
+
(timedelta(seconds=1), "s"),
|
| 45 |
+
(np.timedelta64(1, "ns"), "ns"),
|
| 46 |
+
(timedelta(microseconds=1), "us"),
|
| 47 |
+
(timedelta(microseconds=1000), "ms"),
|
| 48 |
+
]
|
| 49 |
+
)
|
| 50 |
+
def base_delta_code_pair(request):
|
| 51 |
+
return request.param
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
freqs = (
|
| 55 |
+
[f"QE-{month}" for month in MONTHS]
|
| 56 |
+
+ [f"{annual}-{month}" for annual in ["YE", "BYE"] for month in MONTHS]
|
| 57 |
+
+ ["ME", "BME", "BMS"]
|
| 58 |
+
+ [f"WOM-{count}{day}" for count in range(1, 5) for day in DAYS]
|
| 59 |
+
+ [f"W-{day}" for day in DAYS]
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
@pytest.mark.parametrize("freq", freqs)
|
| 64 |
+
@pytest.mark.parametrize("periods", [5, 7])
|
| 65 |
+
def test_infer_freq_range(periods, freq):
|
| 66 |
+
freq = freq.upper()
|
| 67 |
+
|
| 68 |
+
gen = date_range("1/1/2000", periods=periods, freq=freq)
|
| 69 |
+
index = DatetimeIndex(gen.values)
|
| 70 |
+
|
| 71 |
+
if not freq.startswith("QE-"):
|
| 72 |
+
assert frequencies.infer_freq(index) == gen.freqstr
|
| 73 |
+
else:
|
| 74 |
+
inf_freq = frequencies.infer_freq(index)
|
| 75 |
+
is_dec_range = inf_freq == "QE-DEC" and gen.freqstr in (
|
| 76 |
+
"QE",
|
| 77 |
+
"QE-DEC",
|
| 78 |
+
"QE-SEP",
|
| 79 |
+
"QE-JUN",
|
| 80 |
+
"QE-MAR",
|
| 81 |
+
)
|
| 82 |
+
is_nov_range = inf_freq == "QE-NOV" and gen.freqstr in (
|
| 83 |
+
"QE-NOV",
|
| 84 |
+
"QE-AUG",
|
| 85 |
+
"QE-MAY",
|
| 86 |
+
"QE-FEB",
|
| 87 |
+
)
|
| 88 |
+
is_oct_range = inf_freq == "QE-OCT" and gen.freqstr in (
|
| 89 |
+
"QE-OCT",
|
| 90 |
+
"QE-JUL",
|
| 91 |
+
"QE-APR",
|
| 92 |
+
"QE-JAN",
|
| 93 |
+
)
|
| 94 |
+
assert is_dec_range or is_nov_range or is_oct_range
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def test_raise_if_period_index():
|
| 98 |
+
index = period_range(start="1/1/1990", periods=20, freq="M")
|
| 99 |
+
msg = "Check the `freq` attribute instead of using infer_freq"
|
| 100 |
+
|
| 101 |
+
with pytest.raises(TypeError, match=msg):
|
| 102 |
+
frequencies.infer_freq(index)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def test_raise_if_too_few():
|
| 106 |
+
index = DatetimeIndex(["12/31/1998", "1/3/1999"])
|
| 107 |
+
msg = "Need at least 3 dates to infer frequency"
|
| 108 |
+
|
| 109 |
+
with pytest.raises(ValueError, match=msg):
|
| 110 |
+
frequencies.infer_freq(index)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def test_business_daily():
|
| 114 |
+
index = DatetimeIndex(["01/01/1999", "1/4/1999", "1/5/1999"])
|
| 115 |
+
assert frequencies.infer_freq(index) == "B"
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def test_business_daily_look_alike():
|
| 119 |
+
# see gh-16624
|
| 120 |
+
#
|
| 121 |
+
# Do not infer "B when "weekend" (2-day gap) in wrong place.
|
| 122 |
+
index = DatetimeIndex(["12/31/1998", "1/3/1999", "1/4/1999"])
|
| 123 |
+
assert frequencies.infer_freq(index) is None
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def test_day_corner():
|
| 127 |
+
index = DatetimeIndex(["1/1/2000", "1/2/2000", "1/3/2000"])
|
| 128 |
+
assert frequencies.infer_freq(index) == "D"
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def test_non_datetime_index():
|
| 132 |
+
dates = to_datetime(["1/1/2000", "1/2/2000", "1/3/2000"])
|
| 133 |
+
assert frequencies.infer_freq(dates) == "D"
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def test_fifth_week_of_month_infer():
|
| 137 |
+
# see gh-9425
|
| 138 |
+
#
|
| 139 |
+
# Only attempt to infer up to WOM-4.
|
| 140 |
+
index = DatetimeIndex(["2014-03-31", "2014-06-30", "2015-03-30"])
|
| 141 |
+
assert frequencies.infer_freq(index) is None
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def test_week_of_month_fake():
|
| 145 |
+
# All of these dates are on same day
|
| 146 |
+
# of week and are 4 or 5 weeks apart.
|
| 147 |
+
index = DatetimeIndex(["2013-08-27", "2013-10-01", "2013-10-29", "2013-11-26"])
|
| 148 |
+
assert frequencies.infer_freq(index) != "WOM-4TUE"
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def test_fifth_week_of_month():
|
| 152 |
+
# see gh-9425
|
| 153 |
+
#
|
| 154 |
+
# Only supports freq up to WOM-4.
|
| 155 |
+
msg = (
|
| 156 |
+
"Of the four parameters: start, end, periods, "
|
| 157 |
+
"and freq, exactly three must be specified"
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
with pytest.raises(ValueError, match=msg):
|
| 161 |
+
date_range("2014-01-01", freq="WOM-5MON")
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def test_monthly_ambiguous():
|
| 165 |
+
rng = DatetimeIndex(["1/31/2000", "2/29/2000", "3/31/2000"])
|
| 166 |
+
assert rng.inferred_freq == "ME"
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
def test_annual_ambiguous():
|
| 170 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
| 171 |
+
assert rng.inferred_freq == "YE-JAN"
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
@pytest.mark.parametrize("count", range(1, 5))
|
| 175 |
+
def test_infer_freq_delta(base_delta_code_pair, count):
|
| 176 |
+
b = Timestamp(datetime.now())
|
| 177 |
+
base_delta, code = base_delta_code_pair
|
| 178 |
+
|
| 179 |
+
inc = base_delta * count
|
| 180 |
+
index = DatetimeIndex([b + inc * j for j in range(3)])
|
| 181 |
+
|
| 182 |
+
exp_freq = f"{count:d}{code}" if count > 1 else code
|
| 183 |
+
assert frequencies.infer_freq(index) == exp_freq
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@pytest.mark.parametrize(
|
| 187 |
+
"constructor",
|
| 188 |
+
[
|
| 189 |
+
lambda now, delta: DatetimeIndex(
|
| 190 |
+
[now + delta * 7] + [now + delta * j for j in range(3)]
|
| 191 |
+
),
|
| 192 |
+
lambda now, delta: DatetimeIndex(
|
| 193 |
+
[now + delta * j for j in range(3)] + [now + delta * 7]
|
| 194 |
+
),
|
| 195 |
+
],
|
| 196 |
+
)
|
| 197 |
+
def test_infer_freq_custom(base_delta_code_pair, constructor):
|
| 198 |
+
b = Timestamp(datetime.now())
|
| 199 |
+
base_delta, _ = base_delta_code_pair
|
| 200 |
+
|
| 201 |
+
index = constructor(b, base_delta)
|
| 202 |
+
assert frequencies.infer_freq(index) is None
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
@pytest.mark.parametrize(
|
| 206 |
+
"freq,expected", [("Q", "QE-DEC"), ("Q-NOV", "QE-NOV"), ("Q-OCT", "QE-OCT")]
|
| 207 |
+
)
|
| 208 |
+
def test_infer_freq_index(freq, expected):
|
| 209 |
+
rng = period_range("1959Q2", "2009Q3", freq=freq)
|
| 210 |
+
with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
|
| 211 |
+
rng = Index(rng.to_timestamp("D", how="e").astype(object))
|
| 212 |
+
|
| 213 |
+
assert rng.inferred_freq == expected
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
@pytest.mark.parametrize(
|
| 217 |
+
"expected,dates",
|
| 218 |
+
list(
|
| 219 |
+
{
|
| 220 |
+
"YS-JAN": ["2009-01-01", "2010-01-01", "2011-01-01", "2012-01-01"],
|
| 221 |
+
"QE-OCT": ["2009-01-31", "2009-04-30", "2009-07-31", "2009-10-31"],
|
| 222 |
+
"ME": ["2010-11-30", "2010-12-31", "2011-01-31", "2011-02-28"],
|
| 223 |
+
"W-SAT": ["2010-12-25", "2011-01-01", "2011-01-08", "2011-01-15"],
|
| 224 |
+
"D": ["2011-01-01", "2011-01-02", "2011-01-03", "2011-01-04"],
|
| 225 |
+
"h": [
|
| 226 |
+
"2011-12-31 22:00",
|
| 227 |
+
"2011-12-31 23:00",
|
| 228 |
+
"2012-01-01 00:00",
|
| 229 |
+
"2012-01-01 01:00",
|
| 230 |
+
],
|
| 231 |
+
}.items()
|
| 232 |
+
),
|
| 233 |
+
)
|
| 234 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
| 235 |
+
def test_infer_freq_tz(tz_naive_fixture, expected, dates, unit):
|
| 236 |
+
# see gh-7310, GH#55609
|
| 237 |
+
tz = tz_naive_fixture
|
| 238 |
+
idx = DatetimeIndex(dates, tz=tz).as_unit(unit)
|
| 239 |
+
assert idx.inferred_freq == expected
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def test_infer_freq_tz_series(tz_naive_fixture):
|
| 243 |
+
# infer_freq should work with both tz-naive and tz-aware series. See gh-52456
|
| 244 |
+
tz = tz_naive_fixture
|
| 245 |
+
idx = date_range("2021-01-01", "2021-01-04", tz=tz)
|
| 246 |
+
series = idx.to_series().reset_index(drop=True)
|
| 247 |
+
inferred_freq = frequencies.infer_freq(series)
|
| 248 |
+
assert inferred_freq == "D"
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@pytest.mark.parametrize(
|
| 252 |
+
"date_pair",
|
| 253 |
+
[
|
| 254 |
+
["2013-11-02", "2013-11-5"], # Fall DST
|
| 255 |
+
["2014-03-08", "2014-03-11"], # Spring DST
|
| 256 |
+
["2014-01-01", "2014-01-03"], # Regular Time
|
| 257 |
+
],
|
| 258 |
+
)
|
| 259 |
+
@pytest.mark.parametrize(
|
| 260 |
+
"freq",
|
| 261 |
+
["h", "3h", "10min", "3601s", "3600001ms", "3600000001us", "3600000000001ns"],
|
| 262 |
+
)
|
| 263 |
+
def test_infer_freq_tz_transition(tz_naive_fixture, date_pair, freq):
|
| 264 |
+
# see gh-8772
|
| 265 |
+
tz = tz_naive_fixture
|
| 266 |
+
idx = date_range(date_pair[0], date_pair[1], freq=freq, tz=tz)
|
| 267 |
+
assert idx.inferred_freq == freq
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def test_infer_freq_tz_transition_custom():
|
| 271 |
+
index = date_range("2013-11-03", periods=5, freq="3h").tz_localize(
|
| 272 |
+
"America/Chicago"
|
| 273 |
+
)
|
| 274 |
+
assert index.inferred_freq is None
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
@pytest.mark.parametrize(
|
| 278 |
+
"data,expected",
|
| 279 |
+
[
|
| 280 |
+
# Hourly freq in a day must result in "h"
|
| 281 |
+
(
|
| 282 |
+
[
|
| 283 |
+
"2014-07-01 09:00",
|
| 284 |
+
"2014-07-01 10:00",
|
| 285 |
+
"2014-07-01 11:00",
|
| 286 |
+
"2014-07-01 12:00",
|
| 287 |
+
"2014-07-01 13:00",
|
| 288 |
+
"2014-07-01 14:00",
|
| 289 |
+
],
|
| 290 |
+
"h",
|
| 291 |
+
),
|
| 292 |
+
(
|
| 293 |
+
[
|
| 294 |
+
"2014-07-01 09:00",
|
| 295 |
+
"2014-07-01 10:00",
|
| 296 |
+
"2014-07-01 11:00",
|
| 297 |
+
"2014-07-01 12:00",
|
| 298 |
+
"2014-07-01 13:00",
|
| 299 |
+
"2014-07-01 14:00",
|
| 300 |
+
"2014-07-01 15:00",
|
| 301 |
+
"2014-07-01 16:00",
|
| 302 |
+
"2014-07-02 09:00",
|
| 303 |
+
"2014-07-02 10:00",
|
| 304 |
+
"2014-07-02 11:00",
|
| 305 |
+
],
|
| 306 |
+
"bh",
|
| 307 |
+
),
|
| 308 |
+
(
|
| 309 |
+
[
|
| 310 |
+
"2014-07-04 09:00",
|
| 311 |
+
"2014-07-04 10:00",
|
| 312 |
+
"2014-07-04 11:00",
|
| 313 |
+
"2014-07-04 12:00",
|
| 314 |
+
"2014-07-04 13:00",
|
| 315 |
+
"2014-07-04 14:00",
|
| 316 |
+
"2014-07-04 15:00",
|
| 317 |
+
"2014-07-04 16:00",
|
| 318 |
+
"2014-07-07 09:00",
|
| 319 |
+
"2014-07-07 10:00",
|
| 320 |
+
"2014-07-07 11:00",
|
| 321 |
+
],
|
| 322 |
+
"bh",
|
| 323 |
+
),
|
| 324 |
+
(
|
| 325 |
+
[
|
| 326 |
+
"2014-07-04 09:00",
|
| 327 |
+
"2014-07-04 10:00",
|
| 328 |
+
"2014-07-04 11:00",
|
| 329 |
+
"2014-07-04 12:00",
|
| 330 |
+
"2014-07-04 13:00",
|
| 331 |
+
"2014-07-04 14:00",
|
| 332 |
+
"2014-07-04 15:00",
|
| 333 |
+
"2014-07-04 16:00",
|
| 334 |
+
"2014-07-07 09:00",
|
| 335 |
+
"2014-07-07 10:00",
|
| 336 |
+
"2014-07-07 11:00",
|
| 337 |
+
"2014-07-07 12:00",
|
| 338 |
+
"2014-07-07 13:00",
|
| 339 |
+
"2014-07-07 14:00",
|
| 340 |
+
"2014-07-07 15:00",
|
| 341 |
+
"2014-07-07 16:00",
|
| 342 |
+
"2014-07-08 09:00",
|
| 343 |
+
"2014-07-08 10:00",
|
| 344 |
+
"2014-07-08 11:00",
|
| 345 |
+
"2014-07-08 12:00",
|
| 346 |
+
"2014-07-08 13:00",
|
| 347 |
+
"2014-07-08 14:00",
|
| 348 |
+
"2014-07-08 15:00",
|
| 349 |
+
"2014-07-08 16:00",
|
| 350 |
+
],
|
| 351 |
+
"bh",
|
| 352 |
+
),
|
| 353 |
+
],
|
| 354 |
+
)
|
| 355 |
+
def test_infer_freq_business_hour(data, expected):
|
| 356 |
+
# see gh-7905
|
| 357 |
+
idx = DatetimeIndex(data)
|
| 358 |
+
assert idx.inferred_freq == expected
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def test_not_monotonic():
|
| 362 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
| 363 |
+
rng = rng[::-1]
|
| 364 |
+
|
| 365 |
+
assert rng.inferred_freq == "-1YE-JAN"
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def test_non_datetime_index2():
|
| 369 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
| 370 |
+
vals = rng.to_pydatetime()
|
| 371 |
+
|
| 372 |
+
result = frequencies.infer_freq(vals)
|
| 373 |
+
assert result == rng.inferred_freq
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
@pytest.mark.parametrize(
|
| 377 |
+
"idx",
|
| 378 |
+
[
|
| 379 |
+
Index(np.arange(5), dtype=np.int64),
|
| 380 |
+
Index(np.arange(5), dtype=np.float64),
|
| 381 |
+
period_range("2020-01-01", periods=5),
|
| 382 |
+
RangeIndex(5),
|
| 383 |
+
],
|
| 384 |
+
)
|
| 385 |
+
def test_invalid_index_types(idx):
|
| 386 |
+
# see gh-48439
|
| 387 |
+
msg = "|".join(
|
| 388 |
+
[
|
| 389 |
+
"cannot infer freq from a non-convertible",
|
| 390 |
+
"Check the `freq` attribute instead of using infer_freq",
|
| 391 |
+
]
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
with pytest.raises(TypeError, match=msg):
|
| 395 |
+
frequencies.infer_freq(idx)
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
@pytest.mark.skipif(is_platform_windows(), reason="see gh-10822: Windows issue")
|
| 399 |
+
def test_invalid_index_types_unicode():
|
| 400 |
+
# see gh-10822
|
| 401 |
+
#
|
| 402 |
+
# Odd error message on conversions to datetime for unicode.
|
| 403 |
+
msg = "Unknown datetime string format"
|
| 404 |
+
|
| 405 |
+
with pytest.raises(ValueError, match=msg):
|
| 406 |
+
frequencies.infer_freq(Index(["ZqgszYBfuL"]))
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
def test_string_datetime_like_compat():
|
| 410 |
+
# see gh-6463
|
| 411 |
+
data = ["2004-01", "2004-02", "2004-03", "2004-04"]
|
| 412 |
+
|
| 413 |
+
expected = frequencies.infer_freq(data)
|
| 414 |
+
result = frequencies.infer_freq(Index(data))
|
| 415 |
+
|
| 416 |
+
assert result == expected
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
def test_series():
|
| 420 |
+
# see gh-6407
|
| 421 |
+
s = Series(date_range("20130101", "20130110"))
|
| 422 |
+
inferred = frequencies.infer_freq(s)
|
| 423 |
+
assert inferred == "D"
|
| 424 |
+
|
| 425 |
+
|
| 426 |
+
@pytest.mark.parametrize("end", [10, 10.0])
|
| 427 |
+
def test_series_invalid_type(end):
|
| 428 |
+
# see gh-6407
|
| 429 |
+
msg = "cannot infer freq from a non-convertible dtype on a Series"
|
| 430 |
+
s = Series(np.arange(end))
|
| 431 |
+
|
| 432 |
+
with pytest.raises(TypeError, match=msg):
|
| 433 |
+
frequencies.infer_freq(s)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
def test_series_inconvertible_string(using_infer_string):
|
| 437 |
+
# see gh-6407
|
| 438 |
+
if using_infer_string:
|
| 439 |
+
msg = "cannot infer freq from"
|
| 440 |
+
|
| 441 |
+
with pytest.raises(TypeError, match=msg):
|
| 442 |
+
frequencies.infer_freq(Series(["foo", "bar"]))
|
| 443 |
+
else:
|
| 444 |
+
msg = "Unknown datetime string format"
|
| 445 |
+
|
| 446 |
+
with pytest.raises(ValueError, match=msg):
|
| 447 |
+
frequencies.infer_freq(Series(["foo", "bar"]))
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
@pytest.mark.parametrize("freq", [None, "ms"])
|
| 451 |
+
def test_series_period_index(freq):
|
| 452 |
+
# see gh-6407
|
| 453 |
+
#
|
| 454 |
+
# Cannot infer on PeriodIndex
|
| 455 |
+
msg = "cannot infer freq from a non-convertible dtype on a Series"
|
| 456 |
+
s = Series(period_range("2013", periods=10, freq=freq))
|
| 457 |
+
|
| 458 |
+
with pytest.raises(TypeError, match=msg):
|
| 459 |
+
frequencies.infer_freq(s)
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
@pytest.mark.parametrize("freq", ["ME", "ms", "s"])
|
| 463 |
+
def test_series_datetime_index(freq):
|
| 464 |
+
s = Series(date_range("20130101", periods=10, freq=freq))
|
| 465 |
+
inferred = frequencies.infer_freq(s)
|
| 466 |
+
assert inferred == freq
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
@pytest.mark.parametrize(
|
| 470 |
+
"offset_func",
|
| 471 |
+
[
|
| 472 |
+
_get_offset,
|
| 473 |
+
lambda freq: date_range("2011-01-01", periods=5, freq=freq),
|
| 474 |
+
],
|
| 475 |
+
)
|
| 476 |
+
@pytest.mark.parametrize(
|
| 477 |
+
"freq",
|
| 478 |
+
[
|
| 479 |
+
"WEEKDAY",
|
| 480 |
+
"EOM",
|
| 481 |
+
"W@MON",
|
| 482 |
+
"W@TUE",
|
| 483 |
+
"W@WED",
|
| 484 |
+
"W@THU",
|
| 485 |
+
"W@FRI",
|
| 486 |
+
"W@SAT",
|
| 487 |
+
"W@SUN",
|
| 488 |
+
"QE@JAN",
|
| 489 |
+
"QE@FEB",
|
| 490 |
+
"QE@MAR",
|
| 491 |
+
"YE@JAN",
|
| 492 |
+
"YE@FEB",
|
| 493 |
+
"YE@MAR",
|
| 494 |
+
"YE@APR",
|
| 495 |
+
"YE@MAY",
|
| 496 |
+
"YE@JUN",
|
| 497 |
+
"YE@JUL",
|
| 498 |
+
"YE@AUG",
|
| 499 |
+
"YE@SEP",
|
| 500 |
+
"YE@OCT",
|
| 501 |
+
"YE@NOV",
|
| 502 |
+
"YE@DEC",
|
| 503 |
+
"YE@JAN",
|
| 504 |
+
"WOM@1MON",
|
| 505 |
+
"WOM@2MON",
|
| 506 |
+
"WOM@3MON",
|
| 507 |
+
"WOM@4MON",
|
| 508 |
+
"WOM@1TUE",
|
| 509 |
+
"WOM@2TUE",
|
| 510 |
+
"WOM@3TUE",
|
| 511 |
+
"WOM@4TUE",
|
| 512 |
+
"WOM@1WED",
|
| 513 |
+
"WOM@2WED",
|
| 514 |
+
"WOM@3WED",
|
| 515 |
+
"WOM@4WED",
|
| 516 |
+
"WOM@1THU",
|
| 517 |
+
"WOM@2THU",
|
| 518 |
+
"WOM@3THU",
|
| 519 |
+
"WOM@4THU",
|
| 520 |
+
"WOM@1FRI",
|
| 521 |
+
"WOM@2FRI",
|
| 522 |
+
"WOM@3FRI",
|
| 523 |
+
"WOM@4FRI",
|
| 524 |
+
],
|
| 525 |
+
)
|
| 526 |
+
def test_legacy_offset_warnings(offset_func, freq):
|
| 527 |
+
with pytest.raises(ValueError, match=INVALID_FREQ_ERR_MSG):
|
| 528 |
+
offset_func(freq)
|
| 529 |
+
|
| 530 |
+
|
| 531 |
+
def test_ms_vs_capital_ms():
|
| 532 |
+
left = _get_offset("ms")
|
| 533 |
+
right = _get_offset("MS")
|
| 534 |
+
|
| 535 |
+
assert left == offsets.Milli()
|
| 536 |
+
assert right == offsets.MonthBegin()
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def test_infer_freq_non_nano():
|
| 540 |
+
arr = np.arange(10).astype(np.int64).view("M8[s]")
|
| 541 |
+
dta = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
| 542 |
+
res = frequencies.infer_freq(dta)
|
| 543 |
+
assert res == "s"
|
| 544 |
+
|
| 545 |
+
arr2 = arr.view("m8[ms]")
|
| 546 |
+
tda = TimedeltaArray._simple_new(arr2, dtype=arr2.dtype)
|
| 547 |
+
res2 = frequencies.infer_freq(tda)
|
| 548 |
+
assert res2 == "ms"
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
def test_infer_freq_non_nano_tzaware(tz_aware_fixture):
|
| 552 |
+
tz = tz_aware_fixture
|
| 553 |
+
|
| 554 |
+
dti = date_range("2016-01-01", periods=365, freq="B", tz=tz)
|
| 555 |
+
dta = dti._data.as_unit("s")
|
| 556 |
+
|
| 557 |
+
res = frequencies.infer_freq(dta)
|
| 558 |
+
assert res == "B"
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__init__.py
ADDED
|
File without changes
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (184 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_calendar.cpython-310.pyc
ADDED
|
Binary file (3.75 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_federal.cpython-310.pyc
ADDED
|
Binary file (1.96 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_holiday.cpython-310.pyc
ADDED
|
Binary file (7.78 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_observance.cpython-310.pyc
ADDED
|
Binary file (2.47 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_calendar.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
from pandas import (
|
| 6 |
+
DatetimeIndex,
|
| 7 |
+
offsets,
|
| 8 |
+
to_datetime,
|
| 9 |
+
)
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
|
| 12 |
+
from pandas.tseries.holiday import (
|
| 13 |
+
AbstractHolidayCalendar,
|
| 14 |
+
Holiday,
|
| 15 |
+
Timestamp,
|
| 16 |
+
USFederalHolidayCalendar,
|
| 17 |
+
USLaborDay,
|
| 18 |
+
USThanksgivingDay,
|
| 19 |
+
get_calendar,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@pytest.mark.parametrize(
|
| 24 |
+
"transform", [lambda x: x, lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
|
| 25 |
+
)
|
| 26 |
+
def test_calendar(transform):
|
| 27 |
+
start_date = datetime(2012, 1, 1)
|
| 28 |
+
end_date = datetime(2012, 12, 31)
|
| 29 |
+
|
| 30 |
+
calendar = USFederalHolidayCalendar()
|
| 31 |
+
holidays = calendar.holidays(transform(start_date), transform(end_date))
|
| 32 |
+
|
| 33 |
+
expected = [
|
| 34 |
+
datetime(2012, 1, 2),
|
| 35 |
+
datetime(2012, 1, 16),
|
| 36 |
+
datetime(2012, 2, 20),
|
| 37 |
+
datetime(2012, 5, 28),
|
| 38 |
+
datetime(2012, 7, 4),
|
| 39 |
+
datetime(2012, 9, 3),
|
| 40 |
+
datetime(2012, 10, 8),
|
| 41 |
+
datetime(2012, 11, 12),
|
| 42 |
+
datetime(2012, 11, 22),
|
| 43 |
+
datetime(2012, 12, 25),
|
| 44 |
+
]
|
| 45 |
+
|
| 46 |
+
assert list(holidays.to_pydatetime()) == expected
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def test_calendar_caching():
|
| 50 |
+
# see gh-9552.
|
| 51 |
+
|
| 52 |
+
class TestCalendar(AbstractHolidayCalendar):
|
| 53 |
+
def __init__(self, name=None, rules=None) -> None:
|
| 54 |
+
super().__init__(name=name, rules=rules)
|
| 55 |
+
|
| 56 |
+
jan1 = TestCalendar(rules=[Holiday("jan1", year=2015, month=1, day=1)])
|
| 57 |
+
jan2 = TestCalendar(rules=[Holiday("jan2", year=2015, month=1, day=2)])
|
| 58 |
+
|
| 59 |
+
# Getting holidays for Jan 1 should not alter results for Jan 2.
|
| 60 |
+
expected = DatetimeIndex(["01-Jan-2015"]).as_unit("ns")
|
| 61 |
+
tm.assert_index_equal(jan1.holidays(), expected)
|
| 62 |
+
|
| 63 |
+
expected2 = DatetimeIndex(["02-Jan-2015"]).as_unit("ns")
|
| 64 |
+
tm.assert_index_equal(jan2.holidays(), expected2)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def test_calendar_observance_dates():
|
| 68 |
+
# see gh-11477
|
| 69 |
+
us_fed_cal = get_calendar("USFederalHolidayCalendar")
|
| 70 |
+
holidays0 = us_fed_cal.holidays(
|
| 71 |
+
datetime(2015, 7, 3), datetime(2015, 7, 3)
|
| 72 |
+
) # <-- same start and end dates
|
| 73 |
+
holidays1 = us_fed_cal.holidays(
|
| 74 |
+
datetime(2015, 7, 3), datetime(2015, 7, 6)
|
| 75 |
+
) # <-- different start and end dates
|
| 76 |
+
holidays2 = us_fed_cal.holidays(
|
| 77 |
+
datetime(2015, 7, 3), datetime(2015, 7, 3)
|
| 78 |
+
) # <-- same start and end dates
|
| 79 |
+
|
| 80 |
+
# These should all produce the same result.
|
| 81 |
+
#
|
| 82 |
+
# In addition, calling with different start and end
|
| 83 |
+
# dates should not alter the output if we call the
|
| 84 |
+
# function again with the same start and end date.
|
| 85 |
+
tm.assert_index_equal(holidays0, holidays1)
|
| 86 |
+
tm.assert_index_equal(holidays0, holidays2)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def test_rule_from_name():
|
| 90 |
+
us_fed_cal = get_calendar("USFederalHolidayCalendar")
|
| 91 |
+
assert us_fed_cal.rule_from_name("Thanksgiving Day") == USThanksgivingDay
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def test_calendar_2031():
|
| 95 |
+
# See gh-27790
|
| 96 |
+
#
|
| 97 |
+
# Labor Day 2031 is on September 1. Saturday before is August 30.
|
| 98 |
+
# Next working day after August 30 ought to be Tuesday, September 2.
|
| 99 |
+
|
| 100 |
+
class testCalendar(AbstractHolidayCalendar):
|
| 101 |
+
rules = [USLaborDay]
|
| 102 |
+
|
| 103 |
+
cal = testCalendar()
|
| 104 |
+
workDay = offsets.CustomBusinessDay(calendar=cal)
|
| 105 |
+
Sat_before_Labor_Day_2031 = to_datetime("2031-08-30")
|
| 106 |
+
next_working_day = Sat_before_Labor_Day_2031 + 0 * workDay
|
| 107 |
+
assert next_working_day == to_datetime("2031-09-02")
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def test_no_holidays_calendar():
|
| 111 |
+
# Test for issue #31415
|
| 112 |
+
|
| 113 |
+
class NoHolidaysCalendar(AbstractHolidayCalendar):
|
| 114 |
+
pass
|
| 115 |
+
|
| 116 |
+
cal = NoHolidaysCalendar()
|
| 117 |
+
holidays = cal.holidays(Timestamp("01-Jan-2020"), Timestamp("01-Jan-2021"))
|
| 118 |
+
empty_index = DatetimeIndex([]) # Type is DatetimeIndex since return_name=False
|
| 119 |
+
tm.assert_index_equal(holidays, empty_index)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_federal.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
|
| 3 |
+
from pandas import DatetimeIndex
|
| 4 |
+
import pandas._testing as tm
|
| 5 |
+
|
| 6 |
+
from pandas.tseries.holiday import (
|
| 7 |
+
AbstractHolidayCalendar,
|
| 8 |
+
USFederalHolidayCalendar,
|
| 9 |
+
USMartinLutherKingJr,
|
| 10 |
+
USMemorialDay,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def test_no_mlk_before_1986():
|
| 15 |
+
# see gh-10278
|
| 16 |
+
class MLKCalendar(AbstractHolidayCalendar):
|
| 17 |
+
rules = [USMartinLutherKingJr]
|
| 18 |
+
|
| 19 |
+
holidays = MLKCalendar().holidays(start="1984", end="1988").to_pydatetime().tolist()
|
| 20 |
+
|
| 21 |
+
# Testing to make sure holiday is not incorrectly observed before 1986.
|
| 22 |
+
assert holidays == [datetime(1986, 1, 20, 0, 0), datetime(1987, 1, 19, 0, 0)]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def test_memorial_day():
|
| 26 |
+
class MemorialDay(AbstractHolidayCalendar):
|
| 27 |
+
rules = [USMemorialDay]
|
| 28 |
+
|
| 29 |
+
holidays = MemorialDay().holidays(start="1971", end="1980").to_pydatetime().tolist()
|
| 30 |
+
|
| 31 |
+
# Fixes 5/31 error and checked manually against Wikipedia.
|
| 32 |
+
assert holidays == [
|
| 33 |
+
datetime(1971, 5, 31, 0, 0),
|
| 34 |
+
datetime(1972, 5, 29, 0, 0),
|
| 35 |
+
datetime(1973, 5, 28, 0, 0),
|
| 36 |
+
datetime(1974, 5, 27, 0, 0),
|
| 37 |
+
datetime(1975, 5, 26, 0, 0),
|
| 38 |
+
datetime(1976, 5, 31, 0, 0),
|
| 39 |
+
datetime(1977, 5, 30, 0, 0),
|
| 40 |
+
datetime(1978, 5, 29, 0, 0),
|
| 41 |
+
datetime(1979, 5, 28, 0, 0),
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def test_federal_holiday_inconsistent_returntype():
|
| 46 |
+
# GH 49075 test case
|
| 47 |
+
# Instantiate two calendars to rule out _cache
|
| 48 |
+
cal1 = USFederalHolidayCalendar()
|
| 49 |
+
cal2 = USFederalHolidayCalendar()
|
| 50 |
+
|
| 51 |
+
results_2018 = cal1.holidays(start=datetime(2018, 8, 1), end=datetime(2018, 8, 31))
|
| 52 |
+
results_2019 = cal2.holidays(start=datetime(2019, 8, 1), end=datetime(2019, 8, 31))
|
| 53 |
+
expected_results = DatetimeIndex([], dtype="datetime64[ns]", freq=None)
|
| 54 |
+
|
| 55 |
+
# Check against expected results to ensure both date
|
| 56 |
+
# ranges generate expected results as per GH49075 submission
|
| 57 |
+
tm.assert_index_equal(results_2018, expected_results)
|
| 58 |
+
tm.assert_index_equal(results_2019, expected_results)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_holiday.py
ADDED
|
@@ -0,0 +1,332 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
from pytz import utc
|
| 5 |
+
|
| 6 |
+
from pandas import (
|
| 7 |
+
DatetimeIndex,
|
| 8 |
+
Series,
|
| 9 |
+
)
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
|
| 12 |
+
from pandas.tseries.holiday import (
|
| 13 |
+
MO,
|
| 14 |
+
SA,
|
| 15 |
+
AbstractHolidayCalendar,
|
| 16 |
+
DateOffset,
|
| 17 |
+
EasterMonday,
|
| 18 |
+
GoodFriday,
|
| 19 |
+
Holiday,
|
| 20 |
+
HolidayCalendarFactory,
|
| 21 |
+
Timestamp,
|
| 22 |
+
USColumbusDay,
|
| 23 |
+
USFederalHolidayCalendar,
|
| 24 |
+
USLaborDay,
|
| 25 |
+
USMartinLutherKingJr,
|
| 26 |
+
USMemorialDay,
|
| 27 |
+
USPresidentsDay,
|
| 28 |
+
USThanksgivingDay,
|
| 29 |
+
get_calendar,
|
| 30 |
+
next_monday,
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@pytest.mark.parametrize(
|
| 35 |
+
"holiday,start_date,end_date,expected",
|
| 36 |
+
[
|
| 37 |
+
(
|
| 38 |
+
USMemorialDay,
|
| 39 |
+
datetime(2011, 1, 1),
|
| 40 |
+
datetime(2020, 12, 31),
|
| 41 |
+
[
|
| 42 |
+
datetime(2011, 5, 30),
|
| 43 |
+
datetime(2012, 5, 28),
|
| 44 |
+
datetime(2013, 5, 27),
|
| 45 |
+
datetime(2014, 5, 26),
|
| 46 |
+
datetime(2015, 5, 25),
|
| 47 |
+
datetime(2016, 5, 30),
|
| 48 |
+
datetime(2017, 5, 29),
|
| 49 |
+
datetime(2018, 5, 28),
|
| 50 |
+
datetime(2019, 5, 27),
|
| 51 |
+
datetime(2020, 5, 25),
|
| 52 |
+
],
|
| 53 |
+
),
|
| 54 |
+
(
|
| 55 |
+
Holiday("July 4th Eve", month=7, day=3),
|
| 56 |
+
"2001-01-01",
|
| 57 |
+
"2003-03-03",
|
| 58 |
+
[Timestamp("2001-07-03 00:00:00"), Timestamp("2002-07-03 00:00:00")],
|
| 59 |
+
),
|
| 60 |
+
(
|
| 61 |
+
Holiday("July 4th Eve", month=7, day=3, days_of_week=(0, 1, 2, 3)),
|
| 62 |
+
"2001-01-01",
|
| 63 |
+
"2008-03-03",
|
| 64 |
+
[
|
| 65 |
+
Timestamp("2001-07-03 00:00:00"),
|
| 66 |
+
Timestamp("2002-07-03 00:00:00"),
|
| 67 |
+
Timestamp("2003-07-03 00:00:00"),
|
| 68 |
+
Timestamp("2006-07-03 00:00:00"),
|
| 69 |
+
Timestamp("2007-07-03 00:00:00"),
|
| 70 |
+
],
|
| 71 |
+
),
|
| 72 |
+
(
|
| 73 |
+
EasterMonday,
|
| 74 |
+
datetime(2011, 1, 1),
|
| 75 |
+
datetime(2020, 12, 31),
|
| 76 |
+
[
|
| 77 |
+
Timestamp("2011-04-25 00:00:00"),
|
| 78 |
+
Timestamp("2012-04-09 00:00:00"),
|
| 79 |
+
Timestamp("2013-04-01 00:00:00"),
|
| 80 |
+
Timestamp("2014-04-21 00:00:00"),
|
| 81 |
+
Timestamp("2015-04-06 00:00:00"),
|
| 82 |
+
Timestamp("2016-03-28 00:00:00"),
|
| 83 |
+
Timestamp("2017-04-17 00:00:00"),
|
| 84 |
+
Timestamp("2018-04-02 00:00:00"),
|
| 85 |
+
Timestamp("2019-04-22 00:00:00"),
|
| 86 |
+
Timestamp("2020-04-13 00:00:00"),
|
| 87 |
+
],
|
| 88 |
+
),
|
| 89 |
+
(
|
| 90 |
+
GoodFriday,
|
| 91 |
+
datetime(2011, 1, 1),
|
| 92 |
+
datetime(2020, 12, 31),
|
| 93 |
+
[
|
| 94 |
+
Timestamp("2011-04-22 00:00:00"),
|
| 95 |
+
Timestamp("2012-04-06 00:00:00"),
|
| 96 |
+
Timestamp("2013-03-29 00:00:00"),
|
| 97 |
+
Timestamp("2014-04-18 00:00:00"),
|
| 98 |
+
Timestamp("2015-04-03 00:00:00"),
|
| 99 |
+
Timestamp("2016-03-25 00:00:00"),
|
| 100 |
+
Timestamp("2017-04-14 00:00:00"),
|
| 101 |
+
Timestamp("2018-03-30 00:00:00"),
|
| 102 |
+
Timestamp("2019-04-19 00:00:00"),
|
| 103 |
+
Timestamp("2020-04-10 00:00:00"),
|
| 104 |
+
],
|
| 105 |
+
),
|
| 106 |
+
(
|
| 107 |
+
USThanksgivingDay,
|
| 108 |
+
datetime(2011, 1, 1),
|
| 109 |
+
datetime(2020, 12, 31),
|
| 110 |
+
[
|
| 111 |
+
datetime(2011, 11, 24),
|
| 112 |
+
datetime(2012, 11, 22),
|
| 113 |
+
datetime(2013, 11, 28),
|
| 114 |
+
datetime(2014, 11, 27),
|
| 115 |
+
datetime(2015, 11, 26),
|
| 116 |
+
datetime(2016, 11, 24),
|
| 117 |
+
datetime(2017, 11, 23),
|
| 118 |
+
datetime(2018, 11, 22),
|
| 119 |
+
datetime(2019, 11, 28),
|
| 120 |
+
datetime(2020, 11, 26),
|
| 121 |
+
],
|
| 122 |
+
),
|
| 123 |
+
],
|
| 124 |
+
)
|
| 125 |
+
def test_holiday_dates(holiday, start_date, end_date, expected):
|
| 126 |
+
assert list(holiday.dates(start_date, end_date)) == expected
|
| 127 |
+
|
| 128 |
+
# Verify that timezone info is preserved.
|
| 129 |
+
assert list(
|
| 130 |
+
holiday.dates(
|
| 131 |
+
utc.localize(Timestamp(start_date)), utc.localize(Timestamp(end_date))
|
| 132 |
+
)
|
| 133 |
+
) == [utc.localize(dt) for dt in expected]
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@pytest.mark.parametrize(
|
| 137 |
+
"holiday,start,expected",
|
| 138 |
+
[
|
| 139 |
+
(USMemorialDay, datetime(2015, 7, 1), []),
|
| 140 |
+
(USMemorialDay, "2015-05-25", [Timestamp("2015-05-25")]),
|
| 141 |
+
(USLaborDay, datetime(2015, 7, 1), []),
|
| 142 |
+
(USLaborDay, "2015-09-07", [Timestamp("2015-09-07")]),
|
| 143 |
+
(USColumbusDay, datetime(2015, 7, 1), []),
|
| 144 |
+
(USColumbusDay, "2015-10-12", [Timestamp("2015-10-12")]),
|
| 145 |
+
(USThanksgivingDay, datetime(2015, 7, 1), []),
|
| 146 |
+
(USThanksgivingDay, "2015-11-26", [Timestamp("2015-11-26")]),
|
| 147 |
+
(USMartinLutherKingJr, datetime(2015, 7, 1), []),
|
| 148 |
+
(USMartinLutherKingJr, "2015-01-19", [Timestamp("2015-01-19")]),
|
| 149 |
+
(USPresidentsDay, datetime(2015, 7, 1), []),
|
| 150 |
+
(USPresidentsDay, "2015-02-16", [Timestamp("2015-02-16")]),
|
| 151 |
+
(GoodFriday, datetime(2015, 7, 1), []),
|
| 152 |
+
(GoodFriday, "2015-04-03", [Timestamp("2015-04-03")]),
|
| 153 |
+
(EasterMonday, "2015-04-06", [Timestamp("2015-04-06")]),
|
| 154 |
+
(EasterMonday, datetime(2015, 7, 1), []),
|
| 155 |
+
(EasterMonday, "2015-04-05", []),
|
| 156 |
+
("New Year's Day", "2015-01-01", [Timestamp("2015-01-01")]),
|
| 157 |
+
("New Year's Day", "2010-12-31", [Timestamp("2010-12-31")]),
|
| 158 |
+
("New Year's Day", datetime(2015, 7, 1), []),
|
| 159 |
+
("New Year's Day", "2011-01-01", []),
|
| 160 |
+
("Independence Day", "2015-07-03", [Timestamp("2015-07-03")]),
|
| 161 |
+
("Independence Day", datetime(2015, 7, 1), []),
|
| 162 |
+
("Independence Day", "2015-07-04", []),
|
| 163 |
+
("Veterans Day", "2012-11-12", [Timestamp("2012-11-12")]),
|
| 164 |
+
("Veterans Day", datetime(2015, 7, 1), []),
|
| 165 |
+
("Veterans Day", "2012-11-11", []),
|
| 166 |
+
("Christmas Day", "2011-12-26", [Timestamp("2011-12-26")]),
|
| 167 |
+
("Christmas Day", datetime(2015, 7, 1), []),
|
| 168 |
+
("Christmas Day", "2011-12-25", []),
|
| 169 |
+
("Juneteenth National Independence Day", "2020-06-19", []),
|
| 170 |
+
(
|
| 171 |
+
"Juneteenth National Independence Day",
|
| 172 |
+
"2021-06-18",
|
| 173 |
+
[Timestamp("2021-06-18")],
|
| 174 |
+
),
|
| 175 |
+
("Juneteenth National Independence Day", "2022-06-19", []),
|
| 176 |
+
(
|
| 177 |
+
"Juneteenth National Independence Day",
|
| 178 |
+
"2022-06-20",
|
| 179 |
+
[Timestamp("2022-06-20")],
|
| 180 |
+
),
|
| 181 |
+
],
|
| 182 |
+
)
|
| 183 |
+
def test_holidays_within_dates(holiday, start, expected):
|
| 184 |
+
# see gh-11477
|
| 185 |
+
#
|
| 186 |
+
# Fix holiday behavior where holiday.dates returned dates outside
|
| 187 |
+
# start/end date, or observed rules could not be applied because the
|
| 188 |
+
# holiday was not in the original date range (e.g., 7/4/2015 -> 7/3/2015).
|
| 189 |
+
if isinstance(holiday, str):
|
| 190 |
+
calendar = get_calendar("USFederalHolidayCalendar")
|
| 191 |
+
holiday = calendar.rule_from_name(holiday)
|
| 192 |
+
|
| 193 |
+
assert list(holiday.dates(start, start)) == expected
|
| 194 |
+
|
| 195 |
+
# Verify that timezone info is preserved.
|
| 196 |
+
assert list(
|
| 197 |
+
holiday.dates(utc.localize(Timestamp(start)), utc.localize(Timestamp(start)))
|
| 198 |
+
) == [utc.localize(dt) for dt in expected]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
@pytest.mark.parametrize(
|
| 202 |
+
"transform", [lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
|
| 203 |
+
)
|
| 204 |
+
def test_argument_types(transform):
|
| 205 |
+
start_date = datetime(2011, 1, 1)
|
| 206 |
+
end_date = datetime(2020, 12, 31)
|
| 207 |
+
|
| 208 |
+
holidays = USThanksgivingDay.dates(start_date, end_date)
|
| 209 |
+
holidays2 = USThanksgivingDay.dates(transform(start_date), transform(end_date))
|
| 210 |
+
tm.assert_index_equal(holidays, holidays2)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
@pytest.mark.parametrize(
|
| 214 |
+
"name,kwargs",
|
| 215 |
+
[
|
| 216 |
+
("One-Time", {"year": 2012, "month": 5, "day": 28}),
|
| 217 |
+
(
|
| 218 |
+
"Range",
|
| 219 |
+
{
|
| 220 |
+
"month": 5,
|
| 221 |
+
"day": 28,
|
| 222 |
+
"start_date": datetime(2012, 1, 1),
|
| 223 |
+
"end_date": datetime(2012, 12, 31),
|
| 224 |
+
"offset": DateOffset(weekday=MO(1)),
|
| 225 |
+
},
|
| 226 |
+
),
|
| 227 |
+
],
|
| 228 |
+
)
|
| 229 |
+
def test_special_holidays(name, kwargs):
|
| 230 |
+
base_date = [datetime(2012, 5, 28)]
|
| 231 |
+
holiday = Holiday(name, **kwargs)
|
| 232 |
+
|
| 233 |
+
start_date = datetime(2011, 1, 1)
|
| 234 |
+
end_date = datetime(2020, 12, 31)
|
| 235 |
+
|
| 236 |
+
assert base_date == holiday.dates(start_date, end_date)
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
def test_get_calendar():
|
| 240 |
+
class TestCalendar(AbstractHolidayCalendar):
|
| 241 |
+
rules = []
|
| 242 |
+
|
| 243 |
+
calendar = get_calendar("TestCalendar")
|
| 244 |
+
assert TestCalendar == type(calendar)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def test_factory():
|
| 248 |
+
class_1 = HolidayCalendarFactory(
|
| 249 |
+
"MemorialDay", AbstractHolidayCalendar, USMemorialDay
|
| 250 |
+
)
|
| 251 |
+
class_2 = HolidayCalendarFactory(
|
| 252 |
+
"Thanksgiving", AbstractHolidayCalendar, USThanksgivingDay
|
| 253 |
+
)
|
| 254 |
+
class_3 = HolidayCalendarFactory("Combined", class_1, class_2)
|
| 255 |
+
|
| 256 |
+
assert len(class_1.rules) == 1
|
| 257 |
+
assert len(class_2.rules) == 1
|
| 258 |
+
assert len(class_3.rules) == 2
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def test_both_offset_observance_raises():
|
| 262 |
+
# see gh-10217
|
| 263 |
+
msg = "Cannot use both offset and observance"
|
| 264 |
+
with pytest.raises(NotImplementedError, match=msg):
|
| 265 |
+
Holiday(
|
| 266 |
+
"Cyber Monday",
|
| 267 |
+
month=11,
|
| 268 |
+
day=1,
|
| 269 |
+
offset=[DateOffset(weekday=SA(4))],
|
| 270 |
+
observance=next_monday,
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def test_half_open_interval_with_observance():
|
| 275 |
+
# Prompted by GH 49075
|
| 276 |
+
# Check for holidays that have a half-open date interval where
|
| 277 |
+
# they have either a start_date or end_date defined along
|
| 278 |
+
# with a defined observance pattern to make sure that the return type
|
| 279 |
+
# for Holiday.dates() remains consistent before & after the year that
|
| 280 |
+
# marks the 'edge' of the half-open date interval.
|
| 281 |
+
|
| 282 |
+
holiday_1 = Holiday(
|
| 283 |
+
"Arbitrary Holiday - start 2022-03-14",
|
| 284 |
+
start_date=datetime(2022, 3, 14),
|
| 285 |
+
month=3,
|
| 286 |
+
day=14,
|
| 287 |
+
observance=next_monday,
|
| 288 |
+
)
|
| 289 |
+
holiday_2 = Holiday(
|
| 290 |
+
"Arbitrary Holiday 2 - end 2022-03-20",
|
| 291 |
+
end_date=datetime(2022, 3, 20),
|
| 292 |
+
month=3,
|
| 293 |
+
day=20,
|
| 294 |
+
observance=next_monday,
|
| 295 |
+
)
|
| 296 |
+
|
| 297 |
+
class TestHolidayCalendar(AbstractHolidayCalendar):
|
| 298 |
+
rules = [
|
| 299 |
+
USMartinLutherKingJr,
|
| 300 |
+
holiday_1,
|
| 301 |
+
holiday_2,
|
| 302 |
+
USLaborDay,
|
| 303 |
+
]
|
| 304 |
+
|
| 305 |
+
start = Timestamp("2022-08-01")
|
| 306 |
+
end = Timestamp("2022-08-31")
|
| 307 |
+
year_offset = DateOffset(years=5)
|
| 308 |
+
expected_results = DatetimeIndex([], dtype="datetime64[ns]", freq=None)
|
| 309 |
+
test_cal = TestHolidayCalendar()
|
| 310 |
+
|
| 311 |
+
date_interval_low = test_cal.holidays(start - year_offset, end - year_offset)
|
| 312 |
+
date_window_edge = test_cal.holidays(start, end)
|
| 313 |
+
date_interval_high = test_cal.holidays(start + year_offset, end + year_offset)
|
| 314 |
+
|
| 315 |
+
tm.assert_index_equal(date_interval_low, expected_results)
|
| 316 |
+
tm.assert_index_equal(date_window_edge, expected_results)
|
| 317 |
+
tm.assert_index_equal(date_interval_high, expected_results)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def test_holidays_with_timezone_specified_but_no_occurences():
|
| 321 |
+
# GH 54580
|
| 322 |
+
# _apply_rule() in holiday.py was silently dropping timezones if you passed it
|
| 323 |
+
# an empty list of holiday dates that had timezone information
|
| 324 |
+
start_date = Timestamp("2018-01-01", tz="America/Chicago")
|
| 325 |
+
end_date = Timestamp("2018-01-11", tz="America/Chicago")
|
| 326 |
+
test_case = USFederalHolidayCalendar().holidays(
|
| 327 |
+
start_date, end_date, return_name=True
|
| 328 |
+
)
|
| 329 |
+
expected_results = Series("New Year's Day", index=[start_date])
|
| 330 |
+
expected_results.index = expected_results.index.as_unit("ns")
|
| 331 |
+
|
| 332 |
+
tm.assert_equal(test_case, expected_results)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__init__.py
ADDED
|
File without changes
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (184 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_hour.cpython-310.pyc
ADDED
|
Binary file (27.8 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_month.cpython-310.pyc
ADDED
|
Binary file (4.49 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_quarter.cpython-310.pyc
ADDED
|
Binary file (7.36 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_year.cpython-310.pyc
ADDED
|
Binary file (4.62 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_common.cpython-310.pyc
ADDED
|
Binary file (4.95 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_custom_business_day.cpython-310.pyc
ADDED
|
Binary file (4.05 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_custom_business_month.cpython-310.pyc
ADDED
|
Binary file (11.2 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_offsets.cpython-310.pyc
ADDED
|
Binary file (30.5 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_week.cpython-310.pyc
ADDED
|
Binary file (9.76 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/common.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Assertion helpers and base class for offsets tests
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def assert_offset_equal(offset, base, expected):
|
| 8 |
+
actual = offset + base
|
| 9 |
+
actual_swapped = base + offset
|
| 10 |
+
actual_apply = offset._apply(base)
|
| 11 |
+
try:
|
| 12 |
+
assert actual == expected
|
| 13 |
+
assert actual_swapped == expected
|
| 14 |
+
assert actual_apply == expected
|
| 15 |
+
except AssertionError as err:
|
| 16 |
+
raise AssertionError(
|
| 17 |
+
f"\nExpected: {expected}\nActual: {actual}\nFor Offset: {offset})"
|
| 18 |
+
f"\nAt Date: {base}"
|
| 19 |
+
) from err
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def assert_is_on_offset(offset, date, expected):
|
| 23 |
+
actual = offset.is_on_offset(date)
|
| 24 |
+
assert actual == expected, (
|
| 25 |
+
f"\nExpected: {expected}\nActual: {actual}\nFor Offset: {offset})"
|
| 26 |
+
f"\nAt Date: {date}"
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class WeekDay:
|
| 31 |
+
MON = 0
|
| 32 |
+
TUE = 1
|
| 33 |
+
WED = 2
|
| 34 |
+
THU = 3
|
| 35 |
+
FRI = 4
|
| 36 |
+
SAT = 5
|
| 37 |
+
SUN = 6
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_day.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for offsets.BDay
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from datetime import (
|
| 7 |
+
date,
|
| 8 |
+
datetime,
|
| 9 |
+
timedelta,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
import numpy as np
|
| 13 |
+
import pytest
|
| 14 |
+
|
| 15 |
+
from pandas._libs.tslibs.offsets import (
|
| 16 |
+
ApplyTypeError,
|
| 17 |
+
BDay,
|
| 18 |
+
BMonthEnd,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from pandas import (
|
| 22 |
+
DatetimeIndex,
|
| 23 |
+
Timedelta,
|
| 24 |
+
_testing as tm,
|
| 25 |
+
)
|
| 26 |
+
from pandas.tests.tseries.offsets.common import (
|
| 27 |
+
assert_is_on_offset,
|
| 28 |
+
assert_offset_equal,
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
from pandas.tseries import offsets
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@pytest.fixture
|
| 35 |
+
def dt():
|
| 36 |
+
return datetime(2008, 1, 1)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@pytest.fixture
|
| 40 |
+
def _offset():
|
| 41 |
+
return BDay
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@pytest.fixture
|
| 45 |
+
def offset(_offset):
|
| 46 |
+
return _offset()
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
@pytest.fixture
|
| 50 |
+
def offset2(_offset):
|
| 51 |
+
return _offset(2)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class TestBusinessDay:
|
| 55 |
+
def test_different_normalize_equals(self, _offset, offset2):
|
| 56 |
+
# GH#21404 changed __eq__ to return False when `normalize` does not match
|
| 57 |
+
offset = _offset()
|
| 58 |
+
offset2 = _offset(normalize=True)
|
| 59 |
+
assert offset != offset2
|
| 60 |
+
|
| 61 |
+
def test_repr(self, offset, offset2):
|
| 62 |
+
assert repr(offset) == "<BusinessDay>"
|
| 63 |
+
assert repr(offset2) == "<2 * BusinessDays>"
|
| 64 |
+
|
| 65 |
+
expected = "<BusinessDay: offset=datetime.timedelta(days=1)>"
|
| 66 |
+
assert repr(offset + timedelta(1)) == expected
|
| 67 |
+
|
| 68 |
+
def test_with_offset(self, dt, offset):
|
| 69 |
+
offset = offset + timedelta(hours=2)
|
| 70 |
+
|
| 71 |
+
assert (dt + offset) == datetime(2008, 1, 2, 2)
|
| 72 |
+
|
| 73 |
+
@pytest.mark.parametrize(
|
| 74 |
+
"td",
|
| 75 |
+
[
|
| 76 |
+
Timedelta(hours=2),
|
| 77 |
+
Timedelta(hours=2).to_pytimedelta(),
|
| 78 |
+
Timedelta(hours=2).to_timedelta64(),
|
| 79 |
+
],
|
| 80 |
+
ids=lambda x: type(x),
|
| 81 |
+
)
|
| 82 |
+
def test_with_offset_index(self, td, dt, offset):
|
| 83 |
+
dti = DatetimeIndex([dt])
|
| 84 |
+
expected = DatetimeIndex([datetime(2008, 1, 2, 2)])
|
| 85 |
+
|
| 86 |
+
result = dti + (td + offset)
|
| 87 |
+
tm.assert_index_equal(result, expected)
|
| 88 |
+
|
| 89 |
+
result = dti + (offset + td)
|
| 90 |
+
tm.assert_index_equal(result, expected)
|
| 91 |
+
|
| 92 |
+
def test_eq(self, offset2):
|
| 93 |
+
assert offset2 == offset2
|
| 94 |
+
|
| 95 |
+
def test_hash(self, offset2):
|
| 96 |
+
assert hash(offset2) == hash(offset2)
|
| 97 |
+
|
| 98 |
+
def test_add_datetime(self, dt, offset2):
|
| 99 |
+
assert offset2 + dt == datetime(2008, 1, 3)
|
| 100 |
+
assert offset2 + np.datetime64("2008-01-01 00:00:00") == datetime(2008, 1, 3)
|
| 101 |
+
|
| 102 |
+
def testRollback1(self, dt, _offset):
|
| 103 |
+
assert _offset(10).rollback(dt) == dt
|
| 104 |
+
|
| 105 |
+
def testRollback2(self, _offset):
|
| 106 |
+
assert _offset(10).rollback(datetime(2008, 1, 5)) == datetime(2008, 1, 4)
|
| 107 |
+
|
| 108 |
+
def testRollforward1(self, dt, _offset):
|
| 109 |
+
assert _offset(10).rollforward(dt) == dt
|
| 110 |
+
|
| 111 |
+
def testRollforward2(self, _offset):
|
| 112 |
+
assert _offset(10).rollforward(datetime(2008, 1, 5)) == datetime(2008, 1, 7)
|
| 113 |
+
|
| 114 |
+
def test_roll_date_object(self, offset):
|
| 115 |
+
dt = date(2012, 9, 15)
|
| 116 |
+
|
| 117 |
+
result = offset.rollback(dt)
|
| 118 |
+
assert result == datetime(2012, 9, 14)
|
| 119 |
+
|
| 120 |
+
result = offset.rollforward(dt)
|
| 121 |
+
assert result == datetime(2012, 9, 17)
|
| 122 |
+
|
| 123 |
+
offset = offsets.Day()
|
| 124 |
+
result = offset.rollback(dt)
|
| 125 |
+
assert result == datetime(2012, 9, 15)
|
| 126 |
+
|
| 127 |
+
result = offset.rollforward(dt)
|
| 128 |
+
assert result == datetime(2012, 9, 15)
|
| 129 |
+
|
| 130 |
+
@pytest.mark.parametrize(
|
| 131 |
+
"dt, expected",
|
| 132 |
+
[
|
| 133 |
+
(datetime(2008, 1, 1), True),
|
| 134 |
+
(datetime(2008, 1, 5), False),
|
| 135 |
+
],
|
| 136 |
+
)
|
| 137 |
+
def test_is_on_offset(self, offset, dt, expected):
|
| 138 |
+
assert_is_on_offset(offset, dt, expected)
|
| 139 |
+
|
| 140 |
+
apply_cases: list[tuple[int, dict[datetime, datetime]]] = [
|
| 141 |
+
(
|
| 142 |
+
1,
|
| 143 |
+
{
|
| 144 |
+
datetime(2008, 1, 1): datetime(2008, 1, 2),
|
| 145 |
+
datetime(2008, 1, 4): datetime(2008, 1, 7),
|
| 146 |
+
datetime(2008, 1, 5): datetime(2008, 1, 7),
|
| 147 |
+
datetime(2008, 1, 6): datetime(2008, 1, 7),
|
| 148 |
+
datetime(2008, 1, 7): datetime(2008, 1, 8),
|
| 149 |
+
},
|
| 150 |
+
),
|
| 151 |
+
(
|
| 152 |
+
2,
|
| 153 |
+
{
|
| 154 |
+
datetime(2008, 1, 1): datetime(2008, 1, 3),
|
| 155 |
+
datetime(2008, 1, 4): datetime(2008, 1, 8),
|
| 156 |
+
datetime(2008, 1, 5): datetime(2008, 1, 8),
|
| 157 |
+
datetime(2008, 1, 6): datetime(2008, 1, 8),
|
| 158 |
+
datetime(2008, 1, 7): datetime(2008, 1, 9),
|
| 159 |
+
},
|
| 160 |
+
),
|
| 161 |
+
(
|
| 162 |
+
-1,
|
| 163 |
+
{
|
| 164 |
+
datetime(2008, 1, 1): datetime(2007, 12, 31),
|
| 165 |
+
datetime(2008, 1, 4): datetime(2008, 1, 3),
|
| 166 |
+
datetime(2008, 1, 5): datetime(2008, 1, 4),
|
| 167 |
+
datetime(2008, 1, 6): datetime(2008, 1, 4),
|
| 168 |
+
datetime(2008, 1, 7): datetime(2008, 1, 4),
|
| 169 |
+
datetime(2008, 1, 8): datetime(2008, 1, 7),
|
| 170 |
+
},
|
| 171 |
+
),
|
| 172 |
+
(
|
| 173 |
+
-2,
|
| 174 |
+
{
|
| 175 |
+
datetime(2008, 1, 1): datetime(2007, 12, 28),
|
| 176 |
+
datetime(2008, 1, 4): datetime(2008, 1, 2),
|
| 177 |
+
datetime(2008, 1, 5): datetime(2008, 1, 3),
|
| 178 |
+
datetime(2008, 1, 6): datetime(2008, 1, 3),
|
| 179 |
+
datetime(2008, 1, 7): datetime(2008, 1, 3),
|
| 180 |
+
datetime(2008, 1, 8): datetime(2008, 1, 4),
|
| 181 |
+
datetime(2008, 1, 9): datetime(2008, 1, 7),
|
| 182 |
+
},
|
| 183 |
+
),
|
| 184 |
+
(
|
| 185 |
+
0,
|
| 186 |
+
{
|
| 187 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 188 |
+
datetime(2008, 1, 4): datetime(2008, 1, 4),
|
| 189 |
+
datetime(2008, 1, 5): datetime(2008, 1, 7),
|
| 190 |
+
datetime(2008, 1, 6): datetime(2008, 1, 7),
|
| 191 |
+
datetime(2008, 1, 7): datetime(2008, 1, 7),
|
| 192 |
+
},
|
| 193 |
+
),
|
| 194 |
+
]
|
| 195 |
+
|
| 196 |
+
@pytest.mark.parametrize("case", apply_cases)
|
| 197 |
+
def test_apply(self, case, _offset):
|
| 198 |
+
n, cases = case
|
| 199 |
+
offset = _offset(n)
|
| 200 |
+
for base, expected in cases.items():
|
| 201 |
+
assert_offset_equal(offset, base, expected)
|
| 202 |
+
|
| 203 |
+
def test_apply_large_n(self, _offset):
|
| 204 |
+
dt = datetime(2012, 10, 23)
|
| 205 |
+
|
| 206 |
+
result = dt + _offset(10)
|
| 207 |
+
assert result == datetime(2012, 11, 6)
|
| 208 |
+
|
| 209 |
+
result = dt + _offset(100) - _offset(100)
|
| 210 |
+
assert result == dt
|
| 211 |
+
|
| 212 |
+
off = _offset() * 6
|
| 213 |
+
rs = datetime(2012, 1, 1) - off
|
| 214 |
+
xp = datetime(2011, 12, 23)
|
| 215 |
+
assert rs == xp
|
| 216 |
+
|
| 217 |
+
st = datetime(2011, 12, 18)
|
| 218 |
+
rs = st + off
|
| 219 |
+
xp = datetime(2011, 12, 26)
|
| 220 |
+
assert rs == xp
|
| 221 |
+
|
| 222 |
+
off = _offset() * 10
|
| 223 |
+
rs = datetime(2014, 1, 5) + off # see #5890
|
| 224 |
+
xp = datetime(2014, 1, 17)
|
| 225 |
+
assert rs == xp
|
| 226 |
+
|
| 227 |
+
def test_apply_corner(self, _offset):
|
| 228 |
+
if _offset is BDay:
|
| 229 |
+
msg = "Only know how to combine business day with datetime or timedelta"
|
| 230 |
+
else:
|
| 231 |
+
msg = (
|
| 232 |
+
"Only know how to combine trading day "
|
| 233 |
+
"with datetime, datetime64 or timedelta"
|
| 234 |
+
)
|
| 235 |
+
with pytest.raises(ApplyTypeError, match=msg):
|
| 236 |
+
_offset()._apply(BMonthEnd())
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_hour.py
ADDED
|
@@ -0,0 +1,1445 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for offsets.BusinessHour
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from datetime import (
|
| 7 |
+
datetime,
|
| 8 |
+
time as dt_time,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
import pytest
|
| 12 |
+
|
| 13 |
+
from pandas._libs.tslibs import (
|
| 14 |
+
Timedelta,
|
| 15 |
+
Timestamp,
|
| 16 |
+
)
|
| 17 |
+
from pandas._libs.tslibs.offsets import (
|
| 18 |
+
BDay,
|
| 19 |
+
BusinessHour,
|
| 20 |
+
Nano,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from pandas import (
|
| 24 |
+
DatetimeIndex,
|
| 25 |
+
_testing as tm,
|
| 26 |
+
date_range,
|
| 27 |
+
)
|
| 28 |
+
from pandas.tests.tseries.offsets.common import assert_offset_equal
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@pytest.fixture
|
| 32 |
+
def dt():
|
| 33 |
+
return datetime(2014, 7, 1, 10, 00)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
@pytest.fixture
|
| 37 |
+
def _offset():
|
| 38 |
+
return BusinessHour
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@pytest.fixture
|
| 42 |
+
def offset1():
|
| 43 |
+
return BusinessHour()
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
@pytest.fixture
|
| 47 |
+
def offset2():
|
| 48 |
+
return BusinessHour(n=3)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@pytest.fixture
|
| 52 |
+
def offset3():
|
| 53 |
+
return BusinessHour(n=-1)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@pytest.fixture
|
| 57 |
+
def offset4():
|
| 58 |
+
return BusinessHour(n=-4)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@pytest.fixture
|
| 62 |
+
def offset5():
|
| 63 |
+
return BusinessHour(start=dt_time(11, 0), end=dt_time(14, 30))
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@pytest.fixture
|
| 67 |
+
def offset6():
|
| 68 |
+
return BusinessHour(start="20:00", end="05:00")
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
@pytest.fixture
|
| 72 |
+
def offset7():
|
| 73 |
+
return BusinessHour(n=-2, start=dt_time(21, 30), end=dt_time(6, 30))
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
@pytest.fixture
|
| 77 |
+
def offset8():
|
| 78 |
+
return BusinessHour(start=["09:00", "13:00"], end=["12:00", "17:00"])
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@pytest.fixture
|
| 82 |
+
def offset9():
|
| 83 |
+
return BusinessHour(n=3, start=["09:00", "22:00"], end=["13:00", "03:00"])
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@pytest.fixture
|
| 87 |
+
def offset10():
|
| 88 |
+
return BusinessHour(n=-1, start=["23:00", "13:00"], end=["02:00", "17:00"])
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class TestBusinessHour:
|
| 92 |
+
@pytest.mark.parametrize(
|
| 93 |
+
"start,end,match",
|
| 94 |
+
[
|
| 95 |
+
(
|
| 96 |
+
dt_time(11, 0, 5),
|
| 97 |
+
"17:00",
|
| 98 |
+
"time data must be specified only with hour and minute",
|
| 99 |
+
),
|
| 100 |
+
("AAA", "17:00", "time data must match '%H:%M' format"),
|
| 101 |
+
("14:00:05", "17:00", "time data must match '%H:%M' format"),
|
| 102 |
+
([], "17:00", "Must include at least 1 start time"),
|
| 103 |
+
("09:00", [], "Must include at least 1 end time"),
|
| 104 |
+
(
|
| 105 |
+
["09:00", "11:00"],
|
| 106 |
+
"17:00",
|
| 107 |
+
"number of starting time and ending time must be the same",
|
| 108 |
+
),
|
| 109 |
+
(
|
| 110 |
+
["09:00", "11:00"],
|
| 111 |
+
["10:00"],
|
| 112 |
+
"number of starting time and ending time must be the same",
|
| 113 |
+
),
|
| 114 |
+
(
|
| 115 |
+
["09:00", "11:00"],
|
| 116 |
+
["12:00", "20:00"],
|
| 117 |
+
r"invalid starting and ending time\(s\): opening hours should not "
|
| 118 |
+
"touch or overlap with one another",
|
| 119 |
+
),
|
| 120 |
+
(
|
| 121 |
+
["12:00", "20:00"],
|
| 122 |
+
["09:00", "11:00"],
|
| 123 |
+
r"invalid starting and ending time\(s\): opening hours should not "
|
| 124 |
+
"touch or overlap with one another",
|
| 125 |
+
),
|
| 126 |
+
],
|
| 127 |
+
)
|
| 128 |
+
def test_constructor_errors(self, start, end, match):
|
| 129 |
+
with pytest.raises(ValueError, match=match):
|
| 130 |
+
BusinessHour(start=start, end=end)
|
| 131 |
+
|
| 132 |
+
def test_different_normalize_equals(self, _offset):
|
| 133 |
+
# GH#21404 changed __eq__ to return False when `normalize` does not match
|
| 134 |
+
offset = _offset()
|
| 135 |
+
offset2 = _offset(normalize=True)
|
| 136 |
+
assert offset != offset2
|
| 137 |
+
|
| 138 |
+
def test_repr(
|
| 139 |
+
self,
|
| 140 |
+
offset1,
|
| 141 |
+
offset2,
|
| 142 |
+
offset3,
|
| 143 |
+
offset4,
|
| 144 |
+
offset5,
|
| 145 |
+
offset6,
|
| 146 |
+
offset7,
|
| 147 |
+
offset8,
|
| 148 |
+
offset9,
|
| 149 |
+
offset10,
|
| 150 |
+
):
|
| 151 |
+
assert repr(offset1) == "<BusinessHour: bh=09:00-17:00>"
|
| 152 |
+
assert repr(offset2) == "<3 * BusinessHours: bh=09:00-17:00>"
|
| 153 |
+
assert repr(offset3) == "<-1 * BusinessHour: bh=09:00-17:00>"
|
| 154 |
+
assert repr(offset4) == "<-4 * BusinessHours: bh=09:00-17:00>"
|
| 155 |
+
|
| 156 |
+
assert repr(offset5) == "<BusinessHour: bh=11:00-14:30>"
|
| 157 |
+
assert repr(offset6) == "<BusinessHour: bh=20:00-05:00>"
|
| 158 |
+
assert repr(offset7) == "<-2 * BusinessHours: bh=21:30-06:30>"
|
| 159 |
+
assert repr(offset8) == "<BusinessHour: bh=09:00-12:00,13:00-17:00>"
|
| 160 |
+
assert repr(offset9) == "<3 * BusinessHours: bh=09:00-13:00,22:00-03:00>"
|
| 161 |
+
assert repr(offset10) == "<-1 * BusinessHour: bh=13:00-17:00,23:00-02:00>"
|
| 162 |
+
|
| 163 |
+
def test_with_offset(self, dt):
|
| 164 |
+
expected = Timestamp("2014-07-01 13:00")
|
| 165 |
+
|
| 166 |
+
assert dt + BusinessHour() * 3 == expected
|
| 167 |
+
assert dt + BusinessHour(n=3) == expected
|
| 168 |
+
|
| 169 |
+
@pytest.mark.parametrize(
|
| 170 |
+
"offset_name",
|
| 171 |
+
["offset1", "offset2", "offset3", "offset4", "offset8", "offset9", "offset10"],
|
| 172 |
+
)
|
| 173 |
+
def test_eq_attribute(self, offset_name, request):
|
| 174 |
+
offset = request.getfixturevalue(offset_name)
|
| 175 |
+
assert offset == offset
|
| 176 |
+
|
| 177 |
+
@pytest.mark.parametrize(
|
| 178 |
+
"offset1,offset2",
|
| 179 |
+
[
|
| 180 |
+
(BusinessHour(start="09:00"), BusinessHour()),
|
| 181 |
+
(
|
| 182 |
+
BusinessHour(start=["23:00", "13:00"], end=["12:00", "17:00"]),
|
| 183 |
+
BusinessHour(start=["13:00", "23:00"], end=["17:00", "12:00"]),
|
| 184 |
+
),
|
| 185 |
+
],
|
| 186 |
+
)
|
| 187 |
+
def test_eq(self, offset1, offset2):
|
| 188 |
+
assert offset1 == offset2
|
| 189 |
+
|
| 190 |
+
@pytest.mark.parametrize(
|
| 191 |
+
"offset1,offset2",
|
| 192 |
+
[
|
| 193 |
+
(BusinessHour(), BusinessHour(-1)),
|
| 194 |
+
(BusinessHour(start="09:00"), BusinessHour(start="09:01")),
|
| 195 |
+
(
|
| 196 |
+
BusinessHour(start="09:00", end="17:00"),
|
| 197 |
+
BusinessHour(start="17:00", end="09:01"),
|
| 198 |
+
),
|
| 199 |
+
(
|
| 200 |
+
BusinessHour(start=["13:00", "23:00"], end=["18:00", "07:00"]),
|
| 201 |
+
BusinessHour(start=["13:00", "23:00"], end=["17:00", "12:00"]),
|
| 202 |
+
),
|
| 203 |
+
],
|
| 204 |
+
)
|
| 205 |
+
def test_neq(self, offset1, offset2):
|
| 206 |
+
assert offset1 != offset2
|
| 207 |
+
|
| 208 |
+
@pytest.mark.parametrize(
|
| 209 |
+
"offset_name",
|
| 210 |
+
["offset1", "offset2", "offset3", "offset4", "offset8", "offset9", "offset10"],
|
| 211 |
+
)
|
| 212 |
+
def test_hash(self, offset_name, request):
|
| 213 |
+
offset = request.getfixturevalue(offset_name)
|
| 214 |
+
assert offset == offset
|
| 215 |
+
|
| 216 |
+
def test_add_datetime(
|
| 217 |
+
self,
|
| 218 |
+
dt,
|
| 219 |
+
offset1,
|
| 220 |
+
offset2,
|
| 221 |
+
offset3,
|
| 222 |
+
offset4,
|
| 223 |
+
offset8,
|
| 224 |
+
offset9,
|
| 225 |
+
offset10,
|
| 226 |
+
):
|
| 227 |
+
assert offset1 + dt == datetime(2014, 7, 1, 11)
|
| 228 |
+
assert offset2 + dt == datetime(2014, 7, 1, 13)
|
| 229 |
+
assert offset3 + dt == datetime(2014, 6, 30, 17)
|
| 230 |
+
assert offset4 + dt == datetime(2014, 6, 30, 14)
|
| 231 |
+
assert offset8 + dt == datetime(2014, 7, 1, 11)
|
| 232 |
+
assert offset9 + dt == datetime(2014, 7, 1, 22)
|
| 233 |
+
assert offset10 + dt == datetime(2014, 7, 1, 1)
|
| 234 |
+
|
| 235 |
+
def test_sub(self, dt, offset2, _offset):
|
| 236 |
+
off = offset2
|
| 237 |
+
msg = "Cannot subtract datetime from offset"
|
| 238 |
+
with pytest.raises(TypeError, match=msg):
|
| 239 |
+
off - dt
|
| 240 |
+
assert 2 * off - off == off
|
| 241 |
+
|
| 242 |
+
assert dt - offset2 == dt + _offset(-3)
|
| 243 |
+
|
| 244 |
+
def test_multiply_by_zero(self, dt, offset1, offset2):
|
| 245 |
+
assert dt - 0 * offset1 == dt
|
| 246 |
+
assert dt + 0 * offset1 == dt
|
| 247 |
+
assert dt - 0 * offset2 == dt
|
| 248 |
+
assert dt + 0 * offset2 == dt
|
| 249 |
+
|
| 250 |
+
def testRollback1(
|
| 251 |
+
self,
|
| 252 |
+
dt,
|
| 253 |
+
_offset,
|
| 254 |
+
offset1,
|
| 255 |
+
offset2,
|
| 256 |
+
offset3,
|
| 257 |
+
offset4,
|
| 258 |
+
offset5,
|
| 259 |
+
offset6,
|
| 260 |
+
offset7,
|
| 261 |
+
offset8,
|
| 262 |
+
offset9,
|
| 263 |
+
offset10,
|
| 264 |
+
):
|
| 265 |
+
assert offset1.rollback(dt) == dt
|
| 266 |
+
assert offset2.rollback(dt) == dt
|
| 267 |
+
assert offset3.rollback(dt) == dt
|
| 268 |
+
assert offset4.rollback(dt) == dt
|
| 269 |
+
assert offset5.rollback(dt) == datetime(2014, 6, 30, 14, 30)
|
| 270 |
+
assert offset6.rollback(dt) == datetime(2014, 7, 1, 5, 0)
|
| 271 |
+
assert offset7.rollback(dt) == datetime(2014, 7, 1, 6, 30)
|
| 272 |
+
assert offset8.rollback(dt) == dt
|
| 273 |
+
assert offset9.rollback(dt) == dt
|
| 274 |
+
assert offset10.rollback(dt) == datetime(2014, 7, 1, 2)
|
| 275 |
+
|
| 276 |
+
datet = datetime(2014, 7, 1, 0)
|
| 277 |
+
assert offset1.rollback(datet) == datetime(2014, 6, 30, 17)
|
| 278 |
+
assert offset2.rollback(datet) == datetime(2014, 6, 30, 17)
|
| 279 |
+
assert offset3.rollback(datet) == datetime(2014, 6, 30, 17)
|
| 280 |
+
assert offset4.rollback(datet) == datetime(2014, 6, 30, 17)
|
| 281 |
+
assert offset5.rollback(datet) == datetime(2014, 6, 30, 14, 30)
|
| 282 |
+
assert offset6.rollback(datet) == datet
|
| 283 |
+
assert offset7.rollback(datet) == datet
|
| 284 |
+
assert offset8.rollback(datet) == datetime(2014, 6, 30, 17)
|
| 285 |
+
assert offset9.rollback(datet) == datet
|
| 286 |
+
assert offset10.rollback(datet) == datet
|
| 287 |
+
|
| 288 |
+
assert _offset(5).rollback(dt) == dt
|
| 289 |
+
|
| 290 |
+
def testRollback2(self, _offset):
|
| 291 |
+
assert _offset(-3).rollback(datetime(2014, 7, 5, 15, 0)) == datetime(
|
| 292 |
+
2014, 7, 4, 17, 0
|
| 293 |
+
)
|
| 294 |
+
|
| 295 |
+
def testRollforward1(
|
| 296 |
+
self,
|
| 297 |
+
dt,
|
| 298 |
+
_offset,
|
| 299 |
+
offset1,
|
| 300 |
+
offset2,
|
| 301 |
+
offset3,
|
| 302 |
+
offset4,
|
| 303 |
+
offset5,
|
| 304 |
+
offset6,
|
| 305 |
+
offset7,
|
| 306 |
+
offset8,
|
| 307 |
+
offset9,
|
| 308 |
+
offset10,
|
| 309 |
+
):
|
| 310 |
+
assert offset1.rollforward(dt) == dt
|
| 311 |
+
assert offset2.rollforward(dt) == dt
|
| 312 |
+
assert offset3.rollforward(dt) == dt
|
| 313 |
+
assert offset4.rollforward(dt) == dt
|
| 314 |
+
assert offset5.rollforward(dt) == datetime(2014, 7, 1, 11, 0)
|
| 315 |
+
assert offset6.rollforward(dt) == datetime(2014, 7, 1, 20, 0)
|
| 316 |
+
assert offset7.rollforward(dt) == datetime(2014, 7, 1, 21, 30)
|
| 317 |
+
assert offset8.rollforward(dt) == dt
|
| 318 |
+
assert offset9.rollforward(dt) == dt
|
| 319 |
+
assert offset10.rollforward(dt) == datetime(2014, 7, 1, 13)
|
| 320 |
+
|
| 321 |
+
datet = datetime(2014, 7, 1, 0)
|
| 322 |
+
assert offset1.rollforward(datet) == datetime(2014, 7, 1, 9)
|
| 323 |
+
assert offset2.rollforward(datet) == datetime(2014, 7, 1, 9)
|
| 324 |
+
assert offset3.rollforward(datet) == datetime(2014, 7, 1, 9)
|
| 325 |
+
assert offset4.rollforward(datet) == datetime(2014, 7, 1, 9)
|
| 326 |
+
assert offset5.rollforward(datet) == datetime(2014, 7, 1, 11)
|
| 327 |
+
assert offset6.rollforward(datet) == datet
|
| 328 |
+
assert offset7.rollforward(datet) == datet
|
| 329 |
+
assert offset8.rollforward(datet) == datetime(2014, 7, 1, 9)
|
| 330 |
+
assert offset9.rollforward(datet) == datet
|
| 331 |
+
assert offset10.rollforward(datet) == datet
|
| 332 |
+
|
| 333 |
+
assert _offset(5).rollforward(dt) == dt
|
| 334 |
+
|
| 335 |
+
def testRollforward2(self, _offset):
|
| 336 |
+
assert _offset(-3).rollforward(datetime(2014, 7, 5, 16, 0)) == datetime(
|
| 337 |
+
2014, 7, 7, 9
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
def test_roll_date_object(self):
|
| 341 |
+
offset = BusinessHour()
|
| 342 |
+
|
| 343 |
+
dt = datetime(2014, 7, 6, 15, 0)
|
| 344 |
+
|
| 345 |
+
result = offset.rollback(dt)
|
| 346 |
+
assert result == datetime(2014, 7, 4, 17)
|
| 347 |
+
|
| 348 |
+
result = offset.rollforward(dt)
|
| 349 |
+
assert result == datetime(2014, 7, 7, 9)
|
| 350 |
+
|
| 351 |
+
normalize_cases = []
|
| 352 |
+
normalize_cases.append(
|
| 353 |
+
(
|
| 354 |
+
BusinessHour(normalize=True),
|
| 355 |
+
{
|
| 356 |
+
datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
|
| 357 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 2),
|
| 358 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 2),
|
| 359 |
+
datetime(2014, 7, 1, 23): datetime(2014, 7, 2),
|
| 360 |
+
datetime(2014, 7, 1, 0): datetime(2014, 7, 1),
|
| 361 |
+
datetime(2014, 7, 4, 15): datetime(2014, 7, 4),
|
| 362 |
+
datetime(2014, 7, 4, 15, 59): datetime(2014, 7, 4),
|
| 363 |
+
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7),
|
| 364 |
+
datetime(2014, 7, 5, 23): datetime(2014, 7, 7),
|
| 365 |
+
datetime(2014, 7, 6, 10): datetime(2014, 7, 7),
|
| 366 |
+
},
|
| 367 |
+
)
|
| 368 |
+
)
|
| 369 |
+
|
| 370 |
+
normalize_cases.append(
|
| 371 |
+
(
|
| 372 |
+
BusinessHour(-1, normalize=True),
|
| 373 |
+
{
|
| 374 |
+
datetime(2014, 7, 1, 8): datetime(2014, 6, 30),
|
| 375 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
|
| 376 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 1),
|
| 377 |
+
datetime(2014, 7, 1, 10): datetime(2014, 6, 30),
|
| 378 |
+
datetime(2014, 7, 1, 0): datetime(2014, 6, 30),
|
| 379 |
+
datetime(2014, 7, 7, 10): datetime(2014, 7, 4),
|
| 380 |
+
datetime(2014, 7, 7, 10, 1): datetime(2014, 7, 7),
|
| 381 |
+
datetime(2014, 7, 5, 23): datetime(2014, 7, 4),
|
| 382 |
+
datetime(2014, 7, 6, 10): datetime(2014, 7, 4),
|
| 383 |
+
},
|
| 384 |
+
)
|
| 385 |
+
)
|
| 386 |
+
|
| 387 |
+
normalize_cases.append(
|
| 388 |
+
(
|
| 389 |
+
BusinessHour(1, normalize=True, start="17:00", end="04:00"),
|
| 390 |
+
{
|
| 391 |
+
datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
|
| 392 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
|
| 393 |
+
datetime(2014, 7, 1, 23): datetime(2014, 7, 2),
|
| 394 |
+
datetime(2014, 7, 2, 2): datetime(2014, 7, 2),
|
| 395 |
+
datetime(2014, 7, 2, 3): datetime(2014, 7, 2),
|
| 396 |
+
datetime(2014, 7, 4, 23): datetime(2014, 7, 5),
|
| 397 |
+
datetime(2014, 7, 5, 2): datetime(2014, 7, 5),
|
| 398 |
+
datetime(2014, 7, 7, 2): datetime(2014, 7, 7),
|
| 399 |
+
datetime(2014, 7, 7, 17): datetime(2014, 7, 7),
|
| 400 |
+
},
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
@pytest.mark.parametrize("case", normalize_cases)
|
| 405 |
+
def test_normalize(self, case):
|
| 406 |
+
offset, cases = case
|
| 407 |
+
for dt, expected in cases.items():
|
| 408 |
+
assert offset._apply(dt) == expected
|
| 409 |
+
|
| 410 |
+
on_offset_cases = []
|
| 411 |
+
on_offset_cases.append(
|
| 412 |
+
(
|
| 413 |
+
BusinessHour(),
|
| 414 |
+
{
|
| 415 |
+
datetime(2014, 7, 1, 9): True,
|
| 416 |
+
datetime(2014, 7, 1, 8, 59): False,
|
| 417 |
+
datetime(2014, 7, 1, 8): False,
|
| 418 |
+
datetime(2014, 7, 1, 17): True,
|
| 419 |
+
datetime(2014, 7, 1, 17, 1): False,
|
| 420 |
+
datetime(2014, 7, 1, 18): False,
|
| 421 |
+
datetime(2014, 7, 5, 9): False,
|
| 422 |
+
datetime(2014, 7, 6, 12): False,
|
| 423 |
+
},
|
| 424 |
+
)
|
| 425 |
+
)
|
| 426 |
+
|
| 427 |
+
on_offset_cases.append(
|
| 428 |
+
(
|
| 429 |
+
BusinessHour(start="10:00", end="15:00"),
|
| 430 |
+
{
|
| 431 |
+
datetime(2014, 7, 1, 9): False,
|
| 432 |
+
datetime(2014, 7, 1, 10): True,
|
| 433 |
+
datetime(2014, 7, 1, 15): True,
|
| 434 |
+
datetime(2014, 7, 1, 15, 1): False,
|
| 435 |
+
datetime(2014, 7, 5, 12): False,
|
| 436 |
+
datetime(2014, 7, 6, 12): False,
|
| 437 |
+
},
|
| 438 |
+
)
|
| 439 |
+
)
|
| 440 |
+
|
| 441 |
+
on_offset_cases.append(
|
| 442 |
+
(
|
| 443 |
+
BusinessHour(start="19:00", end="05:00"),
|
| 444 |
+
{
|
| 445 |
+
datetime(2014, 7, 1, 9, 0): False,
|
| 446 |
+
datetime(2014, 7, 1, 10, 0): False,
|
| 447 |
+
datetime(2014, 7, 1, 15): False,
|
| 448 |
+
datetime(2014, 7, 1, 15, 1): False,
|
| 449 |
+
datetime(2014, 7, 5, 12, 0): False,
|
| 450 |
+
datetime(2014, 7, 6, 12, 0): False,
|
| 451 |
+
datetime(2014, 7, 1, 19, 0): True,
|
| 452 |
+
datetime(2014, 7, 2, 0, 0): True,
|
| 453 |
+
datetime(2014, 7, 4, 23): True,
|
| 454 |
+
datetime(2014, 7, 5, 1): True,
|
| 455 |
+
datetime(2014, 7, 5, 5, 0): True,
|
| 456 |
+
datetime(2014, 7, 6, 23, 0): False,
|
| 457 |
+
datetime(2014, 7, 7, 3, 0): False,
|
| 458 |
+
},
|
| 459 |
+
)
|
| 460 |
+
)
|
| 461 |
+
|
| 462 |
+
on_offset_cases.append(
|
| 463 |
+
(
|
| 464 |
+
BusinessHour(start=["09:00", "13:00"], end=["12:00", "17:00"]),
|
| 465 |
+
{
|
| 466 |
+
datetime(2014, 7, 1, 9): True,
|
| 467 |
+
datetime(2014, 7, 1, 8, 59): False,
|
| 468 |
+
datetime(2014, 7, 1, 8): False,
|
| 469 |
+
datetime(2014, 7, 1, 17): True,
|
| 470 |
+
datetime(2014, 7, 1, 17, 1): False,
|
| 471 |
+
datetime(2014, 7, 1, 18): False,
|
| 472 |
+
datetime(2014, 7, 5, 9): False,
|
| 473 |
+
datetime(2014, 7, 6, 12): False,
|
| 474 |
+
datetime(2014, 7, 1, 12, 30): False,
|
| 475 |
+
},
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
|
| 479 |
+
on_offset_cases.append(
|
| 480 |
+
(
|
| 481 |
+
BusinessHour(start=["19:00", "23:00"], end=["21:00", "05:00"]),
|
| 482 |
+
{
|
| 483 |
+
datetime(2014, 7, 1, 9, 0): False,
|
| 484 |
+
datetime(2014, 7, 1, 10, 0): False,
|
| 485 |
+
datetime(2014, 7, 1, 15): False,
|
| 486 |
+
datetime(2014, 7, 1, 15, 1): False,
|
| 487 |
+
datetime(2014, 7, 5, 12, 0): False,
|
| 488 |
+
datetime(2014, 7, 6, 12, 0): False,
|
| 489 |
+
datetime(2014, 7, 1, 19, 0): True,
|
| 490 |
+
datetime(2014, 7, 2, 0, 0): True,
|
| 491 |
+
datetime(2014, 7, 4, 23): True,
|
| 492 |
+
datetime(2014, 7, 5, 1): True,
|
| 493 |
+
datetime(2014, 7, 5, 5, 0): True,
|
| 494 |
+
datetime(2014, 7, 6, 23, 0): False,
|
| 495 |
+
datetime(2014, 7, 7, 3, 0): False,
|
| 496 |
+
datetime(2014, 7, 4, 22): False,
|
| 497 |
+
},
|
| 498 |
+
)
|
| 499 |
+
)
|
| 500 |
+
|
| 501 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 502 |
+
def test_is_on_offset(self, case):
|
| 503 |
+
offset, cases = case
|
| 504 |
+
for dt, expected in cases.items():
|
| 505 |
+
assert offset.is_on_offset(dt) == expected
|
| 506 |
+
|
| 507 |
+
apply_cases = [
|
| 508 |
+
(
|
| 509 |
+
BusinessHour(),
|
| 510 |
+
{
|
| 511 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 12),
|
| 512 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 1, 14),
|
| 513 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 1, 16),
|
| 514 |
+
datetime(2014, 7, 1, 19): datetime(2014, 7, 2, 10),
|
| 515 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 2, 9),
|
| 516 |
+
datetime(2014, 7, 1, 16, 30, 15): datetime(2014, 7, 2, 9, 30, 15),
|
| 517 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 10),
|
| 518 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 2, 12),
|
| 519 |
+
# out of business hours
|
| 520 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 10),
|
| 521 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 10),
|
| 522 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 10),
|
| 523 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 10),
|
| 524 |
+
# saturday
|
| 525 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 10),
|
| 526 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 10),
|
| 527 |
+
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 9, 30),
|
| 528 |
+
datetime(2014, 7, 4, 16, 30, 30): datetime(2014, 7, 7, 9, 30, 30),
|
| 529 |
+
},
|
| 530 |
+
),
|
| 531 |
+
(
|
| 532 |
+
BusinessHour(4),
|
| 533 |
+
{
|
| 534 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 15),
|
| 535 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 2, 9),
|
| 536 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 2, 11),
|
| 537 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 2, 12),
|
| 538 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 13),
|
| 539 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 2, 15),
|
| 540 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 13),
|
| 541 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 13),
|
| 542 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 13),
|
| 543 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 13),
|
| 544 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 13),
|
| 545 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 13),
|
| 546 |
+
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 12, 30),
|
| 547 |
+
datetime(2014, 7, 4, 16, 30, 30): datetime(2014, 7, 7, 12, 30, 30),
|
| 548 |
+
},
|
| 549 |
+
),
|
| 550 |
+
(
|
| 551 |
+
BusinessHour(-1),
|
| 552 |
+
{
|
| 553 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 10),
|
| 554 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 1, 12),
|
| 555 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 1, 14),
|
| 556 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 1, 15),
|
| 557 |
+
datetime(2014, 7, 1, 10): datetime(2014, 6, 30, 17),
|
| 558 |
+
datetime(2014, 7, 1, 16, 30, 15): datetime(2014, 7, 1, 15, 30, 15),
|
| 559 |
+
datetime(2014, 7, 1, 9, 30, 15): datetime(2014, 6, 30, 16, 30, 15),
|
| 560 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 16),
|
| 561 |
+
datetime(2014, 7, 1, 5): datetime(2014, 6, 30, 16),
|
| 562 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 2, 10),
|
| 563 |
+
# out of business hours
|
| 564 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 1, 16),
|
| 565 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 16),
|
| 566 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 2, 16),
|
| 567 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 16),
|
| 568 |
+
# saturday
|
| 569 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 16),
|
| 570 |
+
datetime(2014, 7, 7, 9): datetime(2014, 7, 4, 16),
|
| 571 |
+
datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 4, 16, 30),
|
| 572 |
+
datetime(2014, 7, 7, 9, 30, 30): datetime(2014, 7, 4, 16, 30, 30),
|
| 573 |
+
},
|
| 574 |
+
),
|
| 575 |
+
(
|
| 576 |
+
BusinessHour(-4),
|
| 577 |
+
{
|
| 578 |
+
datetime(2014, 7, 1, 11): datetime(2014, 6, 30, 15),
|
| 579 |
+
datetime(2014, 7, 1, 13): datetime(2014, 6, 30, 17),
|
| 580 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 1, 11),
|
| 581 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 1, 12),
|
| 582 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 13),
|
| 583 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 1, 15),
|
| 584 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 1, 13),
|
| 585 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 13),
|
| 586 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 2, 13),
|
| 587 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 13),
|
| 588 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 13),
|
| 589 |
+
datetime(2014, 7, 4, 18): datetime(2014, 7, 4, 13),
|
| 590 |
+
datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 4, 13, 30),
|
| 591 |
+
datetime(2014, 7, 7, 9, 30, 30): datetime(2014, 7, 4, 13, 30, 30),
|
| 592 |
+
},
|
| 593 |
+
),
|
| 594 |
+
(
|
| 595 |
+
BusinessHour(start="13:00", end="16:00"),
|
| 596 |
+
{
|
| 597 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 14),
|
| 598 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 1, 14),
|
| 599 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 2, 13),
|
| 600 |
+
datetime(2014, 7, 1, 19): datetime(2014, 7, 2, 14),
|
| 601 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 2, 14),
|
| 602 |
+
datetime(2014, 7, 1, 15, 30, 15): datetime(2014, 7, 2, 13, 30, 15),
|
| 603 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 14),
|
| 604 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 14),
|
| 605 |
+
},
|
| 606 |
+
),
|
| 607 |
+
(
|
| 608 |
+
BusinessHour(n=2, start="13:00", end="16:00"),
|
| 609 |
+
{
|
| 610 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 15),
|
| 611 |
+
datetime(2014, 7, 2, 14): datetime(2014, 7, 3, 13),
|
| 612 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 15),
|
| 613 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 15),
|
| 614 |
+
datetime(2014, 7, 2, 14, 30): datetime(2014, 7, 3, 13, 30),
|
| 615 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 15),
|
| 616 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 15),
|
| 617 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 15),
|
| 618 |
+
datetime(2014, 7, 4, 14, 30): datetime(2014, 7, 7, 13, 30),
|
| 619 |
+
datetime(2014, 7, 4, 14, 30, 30): datetime(2014, 7, 7, 13, 30, 30),
|
| 620 |
+
},
|
| 621 |
+
),
|
| 622 |
+
(
|
| 623 |
+
BusinessHour(n=-1, start="13:00", end="16:00"),
|
| 624 |
+
{
|
| 625 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 1, 15),
|
| 626 |
+
datetime(2014, 7, 2, 13): datetime(2014, 7, 1, 15),
|
| 627 |
+
datetime(2014, 7, 2, 14): datetime(2014, 7, 1, 16),
|
| 628 |
+
datetime(2014, 7, 2, 15): datetime(2014, 7, 2, 14),
|
| 629 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 15),
|
| 630 |
+
datetime(2014, 7, 2, 16): datetime(2014, 7, 2, 15),
|
| 631 |
+
datetime(2014, 7, 2, 13, 30, 15): datetime(2014, 7, 1, 15, 30, 15),
|
| 632 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 15),
|
| 633 |
+
datetime(2014, 7, 7, 11): datetime(2014, 7, 4, 15),
|
| 634 |
+
},
|
| 635 |
+
),
|
| 636 |
+
(
|
| 637 |
+
BusinessHour(n=-3, start="10:00", end="16:00"),
|
| 638 |
+
{
|
| 639 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 13),
|
| 640 |
+
datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 11),
|
| 641 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 1, 13),
|
| 642 |
+
datetime(2014, 7, 2, 13): datetime(2014, 7, 1, 16),
|
| 643 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 13),
|
| 644 |
+
datetime(2014, 7, 2, 11, 30): datetime(2014, 7, 1, 14, 30),
|
| 645 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 13),
|
| 646 |
+
datetime(2014, 7, 4, 10): datetime(2014, 7, 3, 13),
|
| 647 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 13),
|
| 648 |
+
datetime(2014, 7, 4, 16): datetime(2014, 7, 4, 13),
|
| 649 |
+
datetime(2014, 7, 4, 12, 30): datetime(2014, 7, 3, 15, 30),
|
| 650 |
+
datetime(2014, 7, 4, 12, 30, 30): datetime(2014, 7, 3, 15, 30, 30),
|
| 651 |
+
},
|
| 652 |
+
),
|
| 653 |
+
(
|
| 654 |
+
BusinessHour(start="19:00", end="05:00"),
|
| 655 |
+
{
|
| 656 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 20),
|
| 657 |
+
datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 20),
|
| 658 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 20),
|
| 659 |
+
datetime(2014, 7, 2, 13): datetime(2014, 7, 2, 20),
|
| 660 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 20),
|
| 661 |
+
datetime(2014, 7, 2, 4, 30): datetime(2014, 7, 2, 19, 30),
|
| 662 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 1),
|
| 663 |
+
datetime(2014, 7, 4, 10): datetime(2014, 7, 4, 20),
|
| 664 |
+
datetime(2014, 7, 4, 23): datetime(2014, 7, 5, 0),
|
| 665 |
+
datetime(2014, 7, 5, 0): datetime(2014, 7, 5, 1),
|
| 666 |
+
datetime(2014, 7, 5, 4): datetime(2014, 7, 7, 19),
|
| 667 |
+
datetime(2014, 7, 5, 4, 30): datetime(2014, 7, 7, 19, 30),
|
| 668 |
+
datetime(2014, 7, 5, 4, 30, 30): datetime(2014, 7, 7, 19, 30, 30),
|
| 669 |
+
},
|
| 670 |
+
),
|
| 671 |
+
(
|
| 672 |
+
BusinessHour(n=-1, start="19:00", end="05:00"),
|
| 673 |
+
{
|
| 674 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 4),
|
| 675 |
+
datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 4),
|
| 676 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 4),
|
| 677 |
+
datetime(2014, 7, 2, 13): datetime(2014, 7, 2, 4),
|
| 678 |
+
datetime(2014, 7, 2, 20): datetime(2014, 7, 2, 5),
|
| 679 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 4),
|
| 680 |
+
datetime(2014, 7, 2, 19, 30): datetime(2014, 7, 2, 4, 30),
|
| 681 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 23),
|
| 682 |
+
datetime(2014, 7, 3, 6): datetime(2014, 7, 3, 4),
|
| 683 |
+
datetime(2014, 7, 4, 23): datetime(2014, 7, 4, 22),
|
| 684 |
+
datetime(2014, 7, 5, 0): datetime(2014, 7, 4, 23),
|
| 685 |
+
datetime(2014, 7, 5, 4): datetime(2014, 7, 5, 3),
|
| 686 |
+
datetime(2014, 7, 7, 19, 30): datetime(2014, 7, 5, 4, 30),
|
| 687 |
+
datetime(2014, 7, 7, 19, 30, 30): datetime(2014, 7, 5, 4, 30, 30),
|
| 688 |
+
},
|
| 689 |
+
),
|
| 690 |
+
(
|
| 691 |
+
BusinessHour(n=4, start="00:00", end="23:00"),
|
| 692 |
+
{
|
| 693 |
+
datetime(2014, 7, 3, 22): datetime(2014, 7, 4, 3),
|
| 694 |
+
datetime(2014, 7, 4, 22): datetime(2014, 7, 7, 3),
|
| 695 |
+
datetime(2014, 7, 3, 22, 30): datetime(2014, 7, 4, 3, 30),
|
| 696 |
+
datetime(2014, 7, 3, 22, 20): datetime(2014, 7, 4, 3, 20),
|
| 697 |
+
datetime(2014, 7, 4, 22, 30, 30): datetime(2014, 7, 7, 3, 30, 30),
|
| 698 |
+
datetime(2014, 7, 4, 22, 30, 20): datetime(2014, 7, 7, 3, 30, 20),
|
| 699 |
+
},
|
| 700 |
+
),
|
| 701 |
+
(
|
| 702 |
+
BusinessHour(n=-4, start="00:00", end="23:00"),
|
| 703 |
+
{
|
| 704 |
+
datetime(2014, 7, 4, 3): datetime(2014, 7, 3, 22),
|
| 705 |
+
datetime(2014, 7, 7, 3): datetime(2014, 7, 4, 22),
|
| 706 |
+
datetime(2014, 7, 4, 3, 30): datetime(2014, 7, 3, 22, 30),
|
| 707 |
+
datetime(2014, 7, 4, 3, 20): datetime(2014, 7, 3, 22, 20),
|
| 708 |
+
datetime(2014, 7, 7, 3, 30, 30): datetime(2014, 7, 4, 22, 30, 30),
|
| 709 |
+
datetime(2014, 7, 7, 3, 30, 20): datetime(2014, 7, 4, 22, 30, 20),
|
| 710 |
+
},
|
| 711 |
+
),
|
| 712 |
+
(
|
| 713 |
+
BusinessHour(start=["09:00", "14:00"], end=["12:00", "18:00"]),
|
| 714 |
+
{
|
| 715 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 14),
|
| 716 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 1, 16),
|
| 717 |
+
datetime(2014, 7, 1, 19): datetime(2014, 7, 2, 10),
|
| 718 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 1, 17),
|
| 719 |
+
datetime(2014, 7, 1, 16, 30, 15): datetime(2014, 7, 1, 17, 30, 15),
|
| 720 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 9),
|
| 721 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 2, 14),
|
| 722 |
+
# out of business hours
|
| 723 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 1, 15),
|
| 724 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 10),
|
| 725 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 10),
|
| 726 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 10),
|
| 727 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 10),
|
| 728 |
+
# saturday
|
| 729 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 10),
|
| 730 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 9),
|
| 731 |
+
datetime(2014, 7, 4, 17, 30): datetime(2014, 7, 7, 9, 30),
|
| 732 |
+
datetime(2014, 7, 4, 17, 30, 30): datetime(2014, 7, 7, 9, 30, 30),
|
| 733 |
+
},
|
| 734 |
+
),
|
| 735 |
+
(
|
| 736 |
+
BusinessHour(n=4, start=["09:00", "14:00"], end=["12:00", "18:00"]),
|
| 737 |
+
{
|
| 738 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 17),
|
| 739 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 2, 9),
|
| 740 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 2, 10),
|
| 741 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 2, 11),
|
| 742 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 14),
|
| 743 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 2, 17),
|
| 744 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 15),
|
| 745 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 15),
|
| 746 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 15),
|
| 747 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 15),
|
| 748 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 15),
|
| 749 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 14),
|
| 750 |
+
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 11, 30),
|
| 751 |
+
datetime(2014, 7, 4, 16, 30, 30): datetime(2014, 7, 7, 11, 30, 30),
|
| 752 |
+
},
|
| 753 |
+
),
|
| 754 |
+
(
|
| 755 |
+
BusinessHour(n=-4, start=["09:00", "14:00"], end=["12:00", "18:00"]),
|
| 756 |
+
{
|
| 757 |
+
datetime(2014, 7, 1, 11): datetime(2014, 6, 30, 16),
|
| 758 |
+
datetime(2014, 7, 1, 13): datetime(2014, 6, 30, 17),
|
| 759 |
+
datetime(2014, 7, 1, 15): datetime(2014, 6, 30, 18),
|
| 760 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 1, 10),
|
| 761 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 11),
|
| 762 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 1, 16),
|
| 763 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 1, 12),
|
| 764 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 12),
|
| 765 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 2, 12),
|
| 766 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 12),
|
| 767 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 12),
|
| 768 |
+
datetime(2014, 7, 4, 18): datetime(2014, 7, 4, 12),
|
| 769 |
+
datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 4, 14, 30),
|
| 770 |
+
datetime(2014, 7, 7, 9, 30, 30): datetime(2014, 7, 4, 14, 30, 30),
|
| 771 |
+
},
|
| 772 |
+
),
|
| 773 |
+
(
|
| 774 |
+
BusinessHour(n=-1, start=["19:00", "03:00"], end=["01:00", "05:00"]),
|
| 775 |
+
{
|
| 776 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 4),
|
| 777 |
+
datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 4),
|
| 778 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 4),
|
| 779 |
+
datetime(2014, 7, 2, 13): datetime(2014, 7, 2, 4),
|
| 780 |
+
datetime(2014, 7, 2, 20): datetime(2014, 7, 2, 5),
|
| 781 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 4),
|
| 782 |
+
datetime(2014, 7, 2, 4): datetime(2014, 7, 2, 1),
|
| 783 |
+
datetime(2014, 7, 2, 19, 30): datetime(2014, 7, 2, 4, 30),
|
| 784 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 23),
|
| 785 |
+
datetime(2014, 7, 3, 6): datetime(2014, 7, 3, 4),
|
| 786 |
+
datetime(2014, 7, 4, 23): datetime(2014, 7, 4, 22),
|
| 787 |
+
datetime(2014, 7, 5, 0): datetime(2014, 7, 4, 23),
|
| 788 |
+
datetime(2014, 7, 5, 4): datetime(2014, 7, 5, 0),
|
| 789 |
+
datetime(2014, 7, 7, 3, 30): datetime(2014, 7, 5, 0, 30),
|
| 790 |
+
datetime(2014, 7, 7, 19, 30): datetime(2014, 7, 7, 4, 30),
|
| 791 |
+
datetime(2014, 7, 7, 19, 30, 30): datetime(2014, 7, 7, 4, 30, 30),
|
| 792 |
+
},
|
| 793 |
+
),
|
| 794 |
+
]
|
| 795 |
+
|
| 796 |
+
# long business hours (see gh-26381)
|
| 797 |
+
|
| 798 |
+
# multiple business hours
|
| 799 |
+
|
| 800 |
+
@pytest.mark.parametrize("case", apply_cases)
|
| 801 |
+
def test_apply(self, case):
|
| 802 |
+
offset, cases = case
|
| 803 |
+
for base, expected in cases.items():
|
| 804 |
+
assert_offset_equal(offset, base, expected)
|
| 805 |
+
|
| 806 |
+
apply_large_n_cases = [
|
| 807 |
+
(
|
| 808 |
+
# A week later
|
| 809 |
+
BusinessHour(40),
|
| 810 |
+
{
|
| 811 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 8, 11),
|
| 812 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 8, 13),
|
| 813 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 8, 15),
|
| 814 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 8, 16),
|
| 815 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 9, 9),
|
| 816 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 9, 11),
|
| 817 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 9, 9),
|
| 818 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 10, 9),
|
| 819 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 10, 9),
|
| 820 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 10, 9),
|
| 821 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 14, 9),
|
| 822 |
+
datetime(2014, 7, 4, 18): datetime(2014, 7, 14, 9),
|
| 823 |
+
datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 14, 9, 30),
|
| 824 |
+
datetime(2014, 7, 7, 9, 30, 30): datetime(2014, 7, 14, 9, 30, 30),
|
| 825 |
+
},
|
| 826 |
+
),
|
| 827 |
+
(
|
| 828 |
+
# 3 days and 1 hour before
|
| 829 |
+
BusinessHour(-25),
|
| 830 |
+
{
|
| 831 |
+
datetime(2014, 7, 1, 11): datetime(2014, 6, 26, 10),
|
| 832 |
+
datetime(2014, 7, 1, 13): datetime(2014, 6, 26, 12),
|
| 833 |
+
datetime(2014, 7, 1, 9): datetime(2014, 6, 25, 16),
|
| 834 |
+
datetime(2014, 7, 1, 10): datetime(2014, 6, 25, 17),
|
| 835 |
+
datetime(2014, 7, 3, 11): datetime(2014, 6, 30, 10),
|
| 836 |
+
datetime(2014, 7, 3, 8): datetime(2014, 6, 27, 16),
|
| 837 |
+
datetime(2014, 7, 3, 19): datetime(2014, 6, 30, 16),
|
| 838 |
+
datetime(2014, 7, 3, 23): datetime(2014, 6, 30, 16),
|
| 839 |
+
datetime(2014, 7, 4, 9): datetime(2014, 6, 30, 16),
|
| 840 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 1, 16),
|
| 841 |
+
datetime(2014, 7, 6, 18): datetime(2014, 7, 1, 16),
|
| 842 |
+
datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 1, 16, 30),
|
| 843 |
+
datetime(2014, 7, 7, 10, 30, 30): datetime(2014, 7, 2, 9, 30, 30),
|
| 844 |
+
},
|
| 845 |
+
),
|
| 846 |
+
(
|
| 847 |
+
# 5 days and 3 hours later
|
| 848 |
+
BusinessHour(28, start="21:00", end="02:00"),
|
| 849 |
+
{
|
| 850 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 9, 0),
|
| 851 |
+
datetime(2014, 7, 1, 22): datetime(2014, 7, 9, 1),
|
| 852 |
+
datetime(2014, 7, 1, 23): datetime(2014, 7, 9, 21),
|
| 853 |
+
datetime(2014, 7, 2, 2): datetime(2014, 7, 10, 0),
|
| 854 |
+
datetime(2014, 7, 3, 21): datetime(2014, 7, 11, 0),
|
| 855 |
+
datetime(2014, 7, 4, 1): datetime(2014, 7, 11, 23),
|
| 856 |
+
datetime(2014, 7, 4, 2): datetime(2014, 7, 12, 0),
|
| 857 |
+
datetime(2014, 7, 4, 3): datetime(2014, 7, 12, 0),
|
| 858 |
+
datetime(2014, 7, 5, 1): datetime(2014, 7, 14, 23),
|
| 859 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 15, 0),
|
| 860 |
+
datetime(2014, 7, 6, 18): datetime(2014, 7, 15, 0),
|
| 861 |
+
datetime(2014, 7, 7, 1): datetime(2014, 7, 15, 0),
|
| 862 |
+
datetime(2014, 7, 7, 23, 30): datetime(2014, 7, 15, 21, 30),
|
| 863 |
+
},
|
| 864 |
+
),
|
| 865 |
+
(
|
| 866 |
+
# large n for multiple opening hours (3 days and 1 hour before)
|
| 867 |
+
BusinessHour(n=-25, start=["09:00", "14:00"], end=["12:00", "19:00"]),
|
| 868 |
+
{
|
| 869 |
+
datetime(2014, 7, 1, 11): datetime(2014, 6, 26, 10),
|
| 870 |
+
datetime(2014, 7, 1, 13): datetime(2014, 6, 26, 11),
|
| 871 |
+
datetime(2014, 7, 1, 9): datetime(2014, 6, 25, 18),
|
| 872 |
+
datetime(2014, 7, 1, 10): datetime(2014, 6, 25, 19),
|
| 873 |
+
datetime(2014, 7, 3, 11): datetime(2014, 6, 30, 10),
|
| 874 |
+
datetime(2014, 7, 3, 8): datetime(2014, 6, 27, 18),
|
| 875 |
+
datetime(2014, 7, 3, 19): datetime(2014, 6, 30, 18),
|
| 876 |
+
datetime(2014, 7, 3, 23): datetime(2014, 6, 30, 18),
|
| 877 |
+
datetime(2014, 7, 4, 9): datetime(2014, 6, 30, 18),
|
| 878 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 1, 18),
|
| 879 |
+
datetime(2014, 7, 6, 18): datetime(2014, 7, 1, 18),
|
| 880 |
+
datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 1, 18, 30),
|
| 881 |
+
datetime(2014, 7, 7, 10, 30, 30): datetime(2014, 7, 2, 9, 30, 30),
|
| 882 |
+
},
|
| 883 |
+
),
|
| 884 |
+
(
|
| 885 |
+
# 5 days and 3 hours later
|
| 886 |
+
BusinessHour(28, start=["21:00", "03:00"], end=["01:00", "04:00"]),
|
| 887 |
+
{
|
| 888 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 9, 0),
|
| 889 |
+
datetime(2014, 7, 1, 22): datetime(2014, 7, 9, 3),
|
| 890 |
+
datetime(2014, 7, 1, 23): datetime(2014, 7, 9, 21),
|
| 891 |
+
datetime(2014, 7, 2, 2): datetime(2014, 7, 9, 23),
|
| 892 |
+
datetime(2014, 7, 3, 21): datetime(2014, 7, 11, 0),
|
| 893 |
+
datetime(2014, 7, 4, 1): datetime(2014, 7, 11, 23),
|
| 894 |
+
datetime(2014, 7, 4, 2): datetime(2014, 7, 11, 23),
|
| 895 |
+
datetime(2014, 7, 4, 3): datetime(2014, 7, 11, 23),
|
| 896 |
+
datetime(2014, 7, 4, 21): datetime(2014, 7, 12, 0),
|
| 897 |
+
datetime(2014, 7, 5, 0): datetime(2014, 7, 14, 22),
|
| 898 |
+
datetime(2014, 7, 5, 1): datetime(2014, 7, 14, 23),
|
| 899 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 14, 23),
|
| 900 |
+
datetime(2014, 7, 6, 18): datetime(2014, 7, 14, 23),
|
| 901 |
+
datetime(2014, 7, 7, 1): datetime(2014, 7, 14, 23),
|
| 902 |
+
datetime(2014, 7, 7, 23, 30): datetime(2014, 7, 15, 21, 30),
|
| 903 |
+
},
|
| 904 |
+
),
|
| 905 |
+
]
|
| 906 |
+
|
| 907 |
+
@pytest.mark.parametrize("case", apply_large_n_cases)
|
| 908 |
+
def test_apply_large_n(self, case):
|
| 909 |
+
offset, cases = case
|
| 910 |
+
for base, expected in cases.items():
|
| 911 |
+
assert_offset_equal(offset, base, expected)
|
| 912 |
+
|
| 913 |
+
def test_apply_nanoseconds(self):
|
| 914 |
+
tests = [
|
| 915 |
+
(
|
| 916 |
+
BusinessHour(),
|
| 917 |
+
{
|
| 918 |
+
Timestamp("2014-07-04 15:00")
|
| 919 |
+
+ Nano(5): Timestamp("2014-07-04 16:00")
|
| 920 |
+
+ Nano(5),
|
| 921 |
+
Timestamp("2014-07-04 16:00")
|
| 922 |
+
+ Nano(5): Timestamp("2014-07-07 09:00")
|
| 923 |
+
+ Nano(5),
|
| 924 |
+
Timestamp("2014-07-04 16:00")
|
| 925 |
+
- Nano(5): Timestamp("2014-07-04 17:00")
|
| 926 |
+
- Nano(5),
|
| 927 |
+
},
|
| 928 |
+
),
|
| 929 |
+
(
|
| 930 |
+
BusinessHour(-1),
|
| 931 |
+
{
|
| 932 |
+
Timestamp("2014-07-04 15:00")
|
| 933 |
+
+ Nano(5): Timestamp("2014-07-04 14:00")
|
| 934 |
+
+ Nano(5),
|
| 935 |
+
Timestamp("2014-07-04 10:00")
|
| 936 |
+
+ Nano(5): Timestamp("2014-07-04 09:00")
|
| 937 |
+
+ Nano(5),
|
| 938 |
+
Timestamp("2014-07-04 10:00")
|
| 939 |
+
- Nano(5): Timestamp("2014-07-03 17:00")
|
| 940 |
+
- Nano(5),
|
| 941 |
+
},
|
| 942 |
+
),
|
| 943 |
+
]
|
| 944 |
+
|
| 945 |
+
for offset, cases in tests:
|
| 946 |
+
for base, expected in cases.items():
|
| 947 |
+
assert_offset_equal(offset, base, expected)
|
| 948 |
+
|
| 949 |
+
@pytest.mark.parametrize("td_unit", ["s", "ms", "us", "ns"])
|
| 950 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
| 951 |
+
def test_bday_ignores_timedeltas(self, unit, td_unit):
|
| 952 |
+
# GH#55608
|
| 953 |
+
idx = date_range("2010/02/01", "2010/02/10", freq="12h", unit=unit)
|
| 954 |
+
td = Timedelta(3, unit="h").as_unit(td_unit)
|
| 955 |
+
off = BDay(offset=td)
|
| 956 |
+
t1 = idx + off
|
| 957 |
+
|
| 958 |
+
exp_unit = tm.get_finest_unit(td.unit, idx.unit)
|
| 959 |
+
|
| 960 |
+
expected = DatetimeIndex(
|
| 961 |
+
[
|
| 962 |
+
"2010-02-02 03:00:00",
|
| 963 |
+
"2010-02-02 15:00:00",
|
| 964 |
+
"2010-02-03 03:00:00",
|
| 965 |
+
"2010-02-03 15:00:00",
|
| 966 |
+
"2010-02-04 03:00:00",
|
| 967 |
+
"2010-02-04 15:00:00",
|
| 968 |
+
"2010-02-05 03:00:00",
|
| 969 |
+
"2010-02-05 15:00:00",
|
| 970 |
+
"2010-02-08 03:00:00",
|
| 971 |
+
"2010-02-08 15:00:00",
|
| 972 |
+
"2010-02-08 03:00:00",
|
| 973 |
+
"2010-02-08 15:00:00",
|
| 974 |
+
"2010-02-08 03:00:00",
|
| 975 |
+
"2010-02-08 15:00:00",
|
| 976 |
+
"2010-02-09 03:00:00",
|
| 977 |
+
"2010-02-09 15:00:00",
|
| 978 |
+
"2010-02-10 03:00:00",
|
| 979 |
+
"2010-02-10 15:00:00",
|
| 980 |
+
"2010-02-11 03:00:00",
|
| 981 |
+
],
|
| 982 |
+
freq=None,
|
| 983 |
+
).as_unit(exp_unit)
|
| 984 |
+
tm.assert_index_equal(t1, expected)
|
| 985 |
+
|
| 986 |
+
# TODO(GH#55564): as_unit will be unnecessary
|
| 987 |
+
pointwise = DatetimeIndex([x + off for x in idx]).as_unit(exp_unit)
|
| 988 |
+
tm.assert_index_equal(pointwise, expected)
|
| 989 |
+
|
| 990 |
+
def test_add_bday_offset_nanos(self):
|
| 991 |
+
# GH#55608
|
| 992 |
+
idx = date_range("2010/02/01", "2010/02/10", freq="12h", unit="ns")
|
| 993 |
+
off = BDay(offset=Timedelta(3, unit="ns"))
|
| 994 |
+
|
| 995 |
+
result = idx + off
|
| 996 |
+
expected = DatetimeIndex([x + off for x in idx])
|
| 997 |
+
tm.assert_index_equal(result, expected)
|
| 998 |
+
|
| 999 |
+
|
| 1000 |
+
class TestOpeningTimes:
|
| 1001 |
+
# opening time should be affected by sign of n, not by n's value and end
|
| 1002 |
+
opening_time_cases = [
|
| 1003 |
+
(
|
| 1004 |
+
[
|
| 1005 |
+
BusinessHour(),
|
| 1006 |
+
BusinessHour(n=2),
|
| 1007 |
+
BusinessHour(n=4),
|
| 1008 |
+
BusinessHour(end="10:00"),
|
| 1009 |
+
BusinessHour(n=2, end="4:00"),
|
| 1010 |
+
BusinessHour(n=4, end="15:00"),
|
| 1011 |
+
],
|
| 1012 |
+
{
|
| 1013 |
+
datetime(2014, 7, 1, 11): (
|
| 1014 |
+
datetime(2014, 7, 2, 9),
|
| 1015 |
+
datetime(2014, 7, 1, 9),
|
| 1016 |
+
),
|
| 1017 |
+
datetime(2014, 7, 1, 18): (
|
| 1018 |
+
datetime(2014, 7, 2, 9),
|
| 1019 |
+
datetime(2014, 7, 1, 9),
|
| 1020 |
+
),
|
| 1021 |
+
datetime(2014, 7, 1, 23): (
|
| 1022 |
+
datetime(2014, 7, 2, 9),
|
| 1023 |
+
datetime(2014, 7, 1, 9),
|
| 1024 |
+
),
|
| 1025 |
+
datetime(2014, 7, 2, 8): (
|
| 1026 |
+
datetime(2014, 7, 2, 9),
|
| 1027 |
+
datetime(2014, 7, 1, 9),
|
| 1028 |
+
),
|
| 1029 |
+
# if timestamp is on opening time, next opening time is
|
| 1030 |
+
# as it is
|
| 1031 |
+
datetime(2014, 7, 2, 9): (
|
| 1032 |
+
datetime(2014, 7, 2, 9),
|
| 1033 |
+
datetime(2014, 7, 2, 9),
|
| 1034 |
+
),
|
| 1035 |
+
datetime(2014, 7, 2, 10): (
|
| 1036 |
+
datetime(2014, 7, 3, 9),
|
| 1037 |
+
datetime(2014, 7, 2, 9),
|
| 1038 |
+
),
|
| 1039 |
+
# 2014-07-05 is saturday
|
| 1040 |
+
datetime(2014, 7, 5, 10): (
|
| 1041 |
+
datetime(2014, 7, 7, 9),
|
| 1042 |
+
datetime(2014, 7, 4, 9),
|
| 1043 |
+
),
|
| 1044 |
+
datetime(2014, 7, 4, 10): (
|
| 1045 |
+
datetime(2014, 7, 7, 9),
|
| 1046 |
+
datetime(2014, 7, 4, 9),
|
| 1047 |
+
),
|
| 1048 |
+
datetime(2014, 7, 4, 23): (
|
| 1049 |
+
datetime(2014, 7, 7, 9),
|
| 1050 |
+
datetime(2014, 7, 4, 9),
|
| 1051 |
+
),
|
| 1052 |
+
datetime(2014, 7, 6, 10): (
|
| 1053 |
+
datetime(2014, 7, 7, 9),
|
| 1054 |
+
datetime(2014, 7, 4, 9),
|
| 1055 |
+
),
|
| 1056 |
+
datetime(2014, 7, 7, 5): (
|
| 1057 |
+
datetime(2014, 7, 7, 9),
|
| 1058 |
+
datetime(2014, 7, 4, 9),
|
| 1059 |
+
),
|
| 1060 |
+
datetime(2014, 7, 7, 9, 1): (
|
| 1061 |
+
datetime(2014, 7, 8, 9),
|
| 1062 |
+
datetime(2014, 7, 7, 9),
|
| 1063 |
+
),
|
| 1064 |
+
},
|
| 1065 |
+
),
|
| 1066 |
+
(
|
| 1067 |
+
[
|
| 1068 |
+
BusinessHour(start="11:15"),
|
| 1069 |
+
BusinessHour(n=2, start="11:15"),
|
| 1070 |
+
BusinessHour(n=3, start="11:15"),
|
| 1071 |
+
BusinessHour(start="11:15", end="10:00"),
|
| 1072 |
+
BusinessHour(n=2, start="11:15", end="4:00"),
|
| 1073 |
+
BusinessHour(n=3, start="11:15", end="15:00"),
|
| 1074 |
+
],
|
| 1075 |
+
{
|
| 1076 |
+
datetime(2014, 7, 1, 11): (
|
| 1077 |
+
datetime(2014, 7, 1, 11, 15),
|
| 1078 |
+
datetime(2014, 6, 30, 11, 15),
|
| 1079 |
+
),
|
| 1080 |
+
datetime(2014, 7, 1, 18): (
|
| 1081 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1082 |
+
datetime(2014, 7, 1, 11, 15),
|
| 1083 |
+
),
|
| 1084 |
+
datetime(2014, 7, 1, 23): (
|
| 1085 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1086 |
+
datetime(2014, 7, 1, 11, 15),
|
| 1087 |
+
),
|
| 1088 |
+
datetime(2014, 7, 2, 8): (
|
| 1089 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1090 |
+
datetime(2014, 7, 1, 11, 15),
|
| 1091 |
+
),
|
| 1092 |
+
datetime(2014, 7, 2, 9): (
|
| 1093 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1094 |
+
datetime(2014, 7, 1, 11, 15),
|
| 1095 |
+
),
|
| 1096 |
+
datetime(2014, 7, 2, 10): (
|
| 1097 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1098 |
+
datetime(2014, 7, 1, 11, 15),
|
| 1099 |
+
),
|
| 1100 |
+
datetime(2014, 7, 2, 11, 15): (
|
| 1101 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1102 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1103 |
+
),
|
| 1104 |
+
datetime(2014, 7, 2, 11, 15, 1): (
|
| 1105 |
+
datetime(2014, 7, 3, 11, 15),
|
| 1106 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1107 |
+
),
|
| 1108 |
+
datetime(2014, 7, 5, 10): (
|
| 1109 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1110 |
+
datetime(2014, 7, 4, 11, 15),
|
| 1111 |
+
),
|
| 1112 |
+
datetime(2014, 7, 4, 10): (
|
| 1113 |
+
datetime(2014, 7, 4, 11, 15),
|
| 1114 |
+
datetime(2014, 7, 3, 11, 15),
|
| 1115 |
+
),
|
| 1116 |
+
datetime(2014, 7, 4, 23): (
|
| 1117 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1118 |
+
datetime(2014, 7, 4, 11, 15),
|
| 1119 |
+
),
|
| 1120 |
+
datetime(2014, 7, 6, 10): (
|
| 1121 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1122 |
+
datetime(2014, 7, 4, 11, 15),
|
| 1123 |
+
),
|
| 1124 |
+
datetime(2014, 7, 7, 5): (
|
| 1125 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1126 |
+
datetime(2014, 7, 4, 11, 15),
|
| 1127 |
+
),
|
| 1128 |
+
datetime(2014, 7, 7, 9, 1): (
|
| 1129 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1130 |
+
datetime(2014, 7, 4, 11, 15),
|
| 1131 |
+
),
|
| 1132 |
+
},
|
| 1133 |
+
),
|
| 1134 |
+
(
|
| 1135 |
+
[
|
| 1136 |
+
BusinessHour(-1),
|
| 1137 |
+
BusinessHour(n=-2),
|
| 1138 |
+
BusinessHour(n=-4),
|
| 1139 |
+
BusinessHour(n=-1, end="10:00"),
|
| 1140 |
+
BusinessHour(n=-2, end="4:00"),
|
| 1141 |
+
BusinessHour(n=-4, end="15:00"),
|
| 1142 |
+
],
|
| 1143 |
+
{
|
| 1144 |
+
datetime(2014, 7, 1, 11): (
|
| 1145 |
+
datetime(2014, 7, 1, 9),
|
| 1146 |
+
datetime(2014, 7, 2, 9),
|
| 1147 |
+
),
|
| 1148 |
+
datetime(2014, 7, 1, 18): (
|
| 1149 |
+
datetime(2014, 7, 1, 9),
|
| 1150 |
+
datetime(2014, 7, 2, 9),
|
| 1151 |
+
),
|
| 1152 |
+
datetime(2014, 7, 1, 23): (
|
| 1153 |
+
datetime(2014, 7, 1, 9),
|
| 1154 |
+
datetime(2014, 7, 2, 9),
|
| 1155 |
+
),
|
| 1156 |
+
datetime(2014, 7, 2, 8): (
|
| 1157 |
+
datetime(2014, 7, 1, 9),
|
| 1158 |
+
datetime(2014, 7, 2, 9),
|
| 1159 |
+
),
|
| 1160 |
+
datetime(2014, 7, 2, 9): (
|
| 1161 |
+
datetime(2014, 7, 2, 9),
|
| 1162 |
+
datetime(2014, 7, 2, 9),
|
| 1163 |
+
),
|
| 1164 |
+
datetime(2014, 7, 2, 10): (
|
| 1165 |
+
datetime(2014, 7, 2, 9),
|
| 1166 |
+
datetime(2014, 7, 3, 9),
|
| 1167 |
+
),
|
| 1168 |
+
datetime(2014, 7, 5, 10): (
|
| 1169 |
+
datetime(2014, 7, 4, 9),
|
| 1170 |
+
datetime(2014, 7, 7, 9),
|
| 1171 |
+
),
|
| 1172 |
+
datetime(2014, 7, 4, 10): (
|
| 1173 |
+
datetime(2014, 7, 4, 9),
|
| 1174 |
+
datetime(2014, 7, 7, 9),
|
| 1175 |
+
),
|
| 1176 |
+
datetime(2014, 7, 4, 23): (
|
| 1177 |
+
datetime(2014, 7, 4, 9),
|
| 1178 |
+
datetime(2014, 7, 7, 9),
|
| 1179 |
+
),
|
| 1180 |
+
datetime(2014, 7, 6, 10): (
|
| 1181 |
+
datetime(2014, 7, 4, 9),
|
| 1182 |
+
datetime(2014, 7, 7, 9),
|
| 1183 |
+
),
|
| 1184 |
+
datetime(2014, 7, 7, 5): (
|
| 1185 |
+
datetime(2014, 7, 4, 9),
|
| 1186 |
+
datetime(2014, 7, 7, 9),
|
| 1187 |
+
),
|
| 1188 |
+
datetime(2014, 7, 7, 9): (
|
| 1189 |
+
datetime(2014, 7, 7, 9),
|
| 1190 |
+
datetime(2014, 7, 7, 9),
|
| 1191 |
+
),
|
| 1192 |
+
datetime(2014, 7, 7, 9, 1): (
|
| 1193 |
+
datetime(2014, 7, 7, 9),
|
| 1194 |
+
datetime(2014, 7, 8, 9),
|
| 1195 |
+
),
|
| 1196 |
+
},
|
| 1197 |
+
),
|
| 1198 |
+
(
|
| 1199 |
+
[
|
| 1200 |
+
BusinessHour(start="17:00", end="05:00"),
|
| 1201 |
+
BusinessHour(n=3, start="17:00", end="03:00"),
|
| 1202 |
+
],
|
| 1203 |
+
{
|
| 1204 |
+
datetime(2014, 7, 1, 11): (
|
| 1205 |
+
datetime(2014, 7, 1, 17),
|
| 1206 |
+
datetime(2014, 6, 30, 17),
|
| 1207 |
+
),
|
| 1208 |
+
datetime(2014, 7, 1, 18): (
|
| 1209 |
+
datetime(2014, 7, 2, 17),
|
| 1210 |
+
datetime(2014, 7, 1, 17),
|
| 1211 |
+
),
|
| 1212 |
+
datetime(2014, 7, 1, 23): (
|
| 1213 |
+
datetime(2014, 7, 2, 17),
|
| 1214 |
+
datetime(2014, 7, 1, 17),
|
| 1215 |
+
),
|
| 1216 |
+
datetime(2014, 7, 2, 8): (
|
| 1217 |
+
datetime(2014, 7, 2, 17),
|
| 1218 |
+
datetime(2014, 7, 1, 17),
|
| 1219 |
+
),
|
| 1220 |
+
datetime(2014, 7, 2, 9): (
|
| 1221 |
+
datetime(2014, 7, 2, 17),
|
| 1222 |
+
datetime(2014, 7, 1, 17),
|
| 1223 |
+
),
|
| 1224 |
+
datetime(2014, 7, 4, 17): (
|
| 1225 |
+
datetime(2014, 7, 4, 17),
|
| 1226 |
+
datetime(2014, 7, 4, 17),
|
| 1227 |
+
),
|
| 1228 |
+
datetime(2014, 7, 5, 10): (
|
| 1229 |
+
datetime(2014, 7, 7, 17),
|
| 1230 |
+
datetime(2014, 7, 4, 17),
|
| 1231 |
+
),
|
| 1232 |
+
datetime(2014, 7, 4, 10): (
|
| 1233 |
+
datetime(2014, 7, 4, 17),
|
| 1234 |
+
datetime(2014, 7, 3, 17),
|
| 1235 |
+
),
|
| 1236 |
+
datetime(2014, 7, 4, 23): (
|
| 1237 |
+
datetime(2014, 7, 7, 17),
|
| 1238 |
+
datetime(2014, 7, 4, 17),
|
| 1239 |
+
),
|
| 1240 |
+
datetime(2014, 7, 6, 10): (
|
| 1241 |
+
datetime(2014, 7, 7, 17),
|
| 1242 |
+
datetime(2014, 7, 4, 17),
|
| 1243 |
+
),
|
| 1244 |
+
datetime(2014, 7, 7, 5): (
|
| 1245 |
+
datetime(2014, 7, 7, 17),
|
| 1246 |
+
datetime(2014, 7, 4, 17),
|
| 1247 |
+
),
|
| 1248 |
+
datetime(2014, 7, 7, 17, 1): (
|
| 1249 |
+
datetime(2014, 7, 8, 17),
|
| 1250 |
+
datetime(2014, 7, 7, 17),
|
| 1251 |
+
),
|
| 1252 |
+
},
|
| 1253 |
+
),
|
| 1254 |
+
(
|
| 1255 |
+
[
|
| 1256 |
+
BusinessHour(-1, start="17:00", end="05:00"),
|
| 1257 |
+
BusinessHour(n=-2, start="17:00", end="03:00"),
|
| 1258 |
+
],
|
| 1259 |
+
{
|
| 1260 |
+
datetime(2014, 7, 1, 11): (
|
| 1261 |
+
datetime(2014, 6, 30, 17),
|
| 1262 |
+
datetime(2014, 7, 1, 17),
|
| 1263 |
+
),
|
| 1264 |
+
datetime(2014, 7, 1, 18): (
|
| 1265 |
+
datetime(2014, 7, 1, 17),
|
| 1266 |
+
datetime(2014, 7, 2, 17),
|
| 1267 |
+
),
|
| 1268 |
+
datetime(2014, 7, 1, 23): (
|
| 1269 |
+
datetime(2014, 7, 1, 17),
|
| 1270 |
+
datetime(2014, 7, 2, 17),
|
| 1271 |
+
),
|
| 1272 |
+
datetime(2014, 7, 2, 8): (
|
| 1273 |
+
datetime(2014, 7, 1, 17),
|
| 1274 |
+
datetime(2014, 7, 2, 17),
|
| 1275 |
+
),
|
| 1276 |
+
datetime(2014, 7, 2, 9): (
|
| 1277 |
+
datetime(2014, 7, 1, 17),
|
| 1278 |
+
datetime(2014, 7, 2, 17),
|
| 1279 |
+
),
|
| 1280 |
+
datetime(2014, 7, 2, 16, 59): (
|
| 1281 |
+
datetime(2014, 7, 1, 17),
|
| 1282 |
+
datetime(2014, 7, 2, 17),
|
| 1283 |
+
),
|
| 1284 |
+
datetime(2014, 7, 5, 10): (
|
| 1285 |
+
datetime(2014, 7, 4, 17),
|
| 1286 |
+
datetime(2014, 7, 7, 17),
|
| 1287 |
+
),
|
| 1288 |
+
datetime(2014, 7, 4, 10): (
|
| 1289 |
+
datetime(2014, 7, 3, 17),
|
| 1290 |
+
datetime(2014, 7, 4, 17),
|
| 1291 |
+
),
|
| 1292 |
+
datetime(2014, 7, 4, 23): (
|
| 1293 |
+
datetime(2014, 7, 4, 17),
|
| 1294 |
+
datetime(2014, 7, 7, 17),
|
| 1295 |
+
),
|
| 1296 |
+
datetime(2014, 7, 6, 10): (
|
| 1297 |
+
datetime(2014, 7, 4, 17),
|
| 1298 |
+
datetime(2014, 7, 7, 17),
|
| 1299 |
+
),
|
| 1300 |
+
datetime(2014, 7, 7, 5): (
|
| 1301 |
+
datetime(2014, 7, 4, 17),
|
| 1302 |
+
datetime(2014, 7, 7, 17),
|
| 1303 |
+
),
|
| 1304 |
+
datetime(2014, 7, 7, 18): (
|
| 1305 |
+
datetime(2014, 7, 7, 17),
|
| 1306 |
+
datetime(2014, 7, 8, 17),
|
| 1307 |
+
),
|
| 1308 |
+
},
|
| 1309 |
+
),
|
| 1310 |
+
(
|
| 1311 |
+
[
|
| 1312 |
+
BusinessHour(start=["11:15", "15:00"], end=["13:00", "20:00"]),
|
| 1313 |
+
BusinessHour(n=3, start=["11:15", "15:00"], end=["12:00", "20:00"]),
|
| 1314 |
+
BusinessHour(start=["11:15", "15:00"], end=["13:00", "17:00"]),
|
| 1315 |
+
BusinessHour(n=2, start=["11:15", "15:00"], end=["12:00", "03:00"]),
|
| 1316 |
+
BusinessHour(n=3, start=["11:15", "15:00"], end=["13:00", "16:00"]),
|
| 1317 |
+
],
|
| 1318 |
+
{
|
| 1319 |
+
datetime(2014, 7, 1, 11): (
|
| 1320 |
+
datetime(2014, 7, 1, 11, 15),
|
| 1321 |
+
datetime(2014, 6, 30, 15),
|
| 1322 |
+
),
|
| 1323 |
+
datetime(2014, 7, 1, 18): (
|
| 1324 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1325 |
+
datetime(2014, 7, 1, 15),
|
| 1326 |
+
),
|
| 1327 |
+
datetime(2014, 7, 1, 23): (
|
| 1328 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1329 |
+
datetime(2014, 7, 1, 15),
|
| 1330 |
+
),
|
| 1331 |
+
datetime(2014, 7, 2, 8): (
|
| 1332 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1333 |
+
datetime(2014, 7, 1, 15),
|
| 1334 |
+
),
|
| 1335 |
+
datetime(2014, 7, 2, 9): (
|
| 1336 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1337 |
+
datetime(2014, 7, 1, 15),
|
| 1338 |
+
),
|
| 1339 |
+
datetime(2014, 7, 2, 10): (
|
| 1340 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1341 |
+
datetime(2014, 7, 1, 15),
|
| 1342 |
+
),
|
| 1343 |
+
datetime(2014, 7, 2, 11, 15): (
|
| 1344 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1345 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1346 |
+
),
|
| 1347 |
+
datetime(2014, 7, 2, 11, 15, 1): (
|
| 1348 |
+
datetime(2014, 7, 2, 15),
|
| 1349 |
+
datetime(2014, 7, 2, 11, 15),
|
| 1350 |
+
),
|
| 1351 |
+
datetime(2014, 7, 5, 10): (
|
| 1352 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1353 |
+
datetime(2014, 7, 4, 15),
|
| 1354 |
+
),
|
| 1355 |
+
datetime(2014, 7, 4, 10): (
|
| 1356 |
+
datetime(2014, 7, 4, 11, 15),
|
| 1357 |
+
datetime(2014, 7, 3, 15),
|
| 1358 |
+
),
|
| 1359 |
+
datetime(2014, 7, 4, 23): (
|
| 1360 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1361 |
+
datetime(2014, 7, 4, 15),
|
| 1362 |
+
),
|
| 1363 |
+
datetime(2014, 7, 6, 10): (
|
| 1364 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1365 |
+
datetime(2014, 7, 4, 15),
|
| 1366 |
+
),
|
| 1367 |
+
datetime(2014, 7, 7, 5): (
|
| 1368 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1369 |
+
datetime(2014, 7, 4, 15),
|
| 1370 |
+
),
|
| 1371 |
+
datetime(2014, 7, 7, 9, 1): (
|
| 1372 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1373 |
+
datetime(2014, 7, 4, 15),
|
| 1374 |
+
),
|
| 1375 |
+
datetime(2014, 7, 7, 12): (
|
| 1376 |
+
datetime(2014, 7, 7, 15),
|
| 1377 |
+
datetime(2014, 7, 7, 11, 15),
|
| 1378 |
+
),
|
| 1379 |
+
},
|
| 1380 |
+
),
|
| 1381 |
+
(
|
| 1382 |
+
[
|
| 1383 |
+
BusinessHour(n=-1, start=["17:00", "08:00"], end=["05:00", "10:00"]),
|
| 1384 |
+
BusinessHour(n=-2, start=["08:00", "17:00"], end=["10:00", "03:00"]),
|
| 1385 |
+
],
|
| 1386 |
+
{
|
| 1387 |
+
datetime(2014, 7, 1, 11): (
|
| 1388 |
+
datetime(2014, 7, 1, 8),
|
| 1389 |
+
datetime(2014, 7, 1, 17),
|
| 1390 |
+
),
|
| 1391 |
+
datetime(2014, 7, 1, 18): (
|
| 1392 |
+
datetime(2014, 7, 1, 17),
|
| 1393 |
+
datetime(2014, 7, 2, 8),
|
| 1394 |
+
),
|
| 1395 |
+
datetime(2014, 7, 1, 23): (
|
| 1396 |
+
datetime(2014, 7, 1, 17),
|
| 1397 |
+
datetime(2014, 7, 2, 8),
|
| 1398 |
+
),
|
| 1399 |
+
datetime(2014, 7, 2, 8): (
|
| 1400 |
+
datetime(2014, 7, 2, 8),
|
| 1401 |
+
datetime(2014, 7, 2, 8),
|
| 1402 |
+
),
|
| 1403 |
+
datetime(2014, 7, 2, 9): (
|
| 1404 |
+
datetime(2014, 7, 2, 8),
|
| 1405 |
+
datetime(2014, 7, 2, 17),
|
| 1406 |
+
),
|
| 1407 |
+
datetime(2014, 7, 2, 16, 59): (
|
| 1408 |
+
datetime(2014, 7, 2, 8),
|
| 1409 |
+
datetime(2014, 7, 2, 17),
|
| 1410 |
+
),
|
| 1411 |
+
datetime(2014, 7, 5, 10): (
|
| 1412 |
+
datetime(2014, 7, 4, 17),
|
| 1413 |
+
datetime(2014, 7, 7, 8),
|
| 1414 |
+
),
|
| 1415 |
+
datetime(2014, 7, 4, 10): (
|
| 1416 |
+
datetime(2014, 7, 4, 8),
|
| 1417 |
+
datetime(2014, 7, 4, 17),
|
| 1418 |
+
),
|
| 1419 |
+
datetime(2014, 7, 4, 23): (
|
| 1420 |
+
datetime(2014, 7, 4, 17),
|
| 1421 |
+
datetime(2014, 7, 7, 8),
|
| 1422 |
+
),
|
| 1423 |
+
datetime(2014, 7, 6, 10): (
|
| 1424 |
+
datetime(2014, 7, 4, 17),
|
| 1425 |
+
datetime(2014, 7, 7, 8),
|
| 1426 |
+
),
|
| 1427 |
+
datetime(2014, 7, 7, 5): (
|
| 1428 |
+
datetime(2014, 7, 4, 17),
|
| 1429 |
+
datetime(2014, 7, 7, 8),
|
| 1430 |
+
),
|
| 1431 |
+
datetime(2014, 7, 7, 18): (
|
| 1432 |
+
datetime(2014, 7, 7, 17),
|
| 1433 |
+
datetime(2014, 7, 8, 8),
|
| 1434 |
+
),
|
| 1435 |
+
},
|
| 1436 |
+
),
|
| 1437 |
+
]
|
| 1438 |
+
|
| 1439 |
+
@pytest.mark.parametrize("case", opening_time_cases)
|
| 1440 |
+
def test_opening_time(self, case):
|
| 1441 |
+
_offsets, cases = case
|
| 1442 |
+
for offset in _offsets:
|
| 1443 |
+
for dt, (exp_next, exp_prev) in cases.items():
|
| 1444 |
+
assert offset._next_opening_time(dt) == exp_next
|
| 1445 |
+
assert offset._prev_opening_time(dt) == exp_prev
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_month.py
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the following offsets:
|
| 3 |
+
- BMonthBegin
|
| 4 |
+
- BMonthEnd
|
| 5 |
+
"""
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
import pandas as pd
|
| 13 |
+
from pandas.tests.tseries.offsets.common import (
|
| 14 |
+
assert_is_on_offset,
|
| 15 |
+
assert_offset_equal,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
from pandas.tseries.offsets import (
|
| 19 |
+
BMonthBegin,
|
| 20 |
+
BMonthEnd,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@pytest.mark.parametrize("n", [-2, 1])
|
| 25 |
+
@pytest.mark.parametrize(
|
| 26 |
+
"cls",
|
| 27 |
+
[
|
| 28 |
+
BMonthBegin,
|
| 29 |
+
BMonthEnd,
|
| 30 |
+
],
|
| 31 |
+
)
|
| 32 |
+
def test_apply_index(cls, n):
|
| 33 |
+
offset = cls(n=n)
|
| 34 |
+
rng = pd.date_range(start="1/1/2000", periods=100000, freq="min")
|
| 35 |
+
ser = pd.Series(rng)
|
| 36 |
+
|
| 37 |
+
res = rng + offset
|
| 38 |
+
assert res.freq is None # not retained
|
| 39 |
+
assert res[0] == rng[0] + offset
|
| 40 |
+
assert res[-1] == rng[-1] + offset
|
| 41 |
+
res2 = ser + offset
|
| 42 |
+
# apply_index is only for indexes, not series, so no res2_v2
|
| 43 |
+
assert res2.iloc[0] == ser.iloc[0] + offset
|
| 44 |
+
assert res2.iloc[-1] == ser.iloc[-1] + offset
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class TestBMonthBegin:
|
| 48 |
+
def test_offsets_compare_equal(self):
|
| 49 |
+
# root cause of #456
|
| 50 |
+
offset1 = BMonthBegin()
|
| 51 |
+
offset2 = BMonthBegin()
|
| 52 |
+
assert not offset1 != offset2
|
| 53 |
+
|
| 54 |
+
offset_cases = []
|
| 55 |
+
offset_cases.append(
|
| 56 |
+
(
|
| 57 |
+
BMonthBegin(),
|
| 58 |
+
{
|
| 59 |
+
datetime(2008, 1, 1): datetime(2008, 2, 1),
|
| 60 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 61 |
+
datetime(2006, 12, 29): datetime(2007, 1, 1),
|
| 62 |
+
datetime(2006, 12, 31): datetime(2007, 1, 1),
|
| 63 |
+
datetime(2006, 9, 1): datetime(2006, 10, 2),
|
| 64 |
+
datetime(2007, 1, 1): datetime(2007, 2, 1),
|
| 65 |
+
datetime(2006, 12, 1): datetime(2007, 1, 1),
|
| 66 |
+
},
|
| 67 |
+
)
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
offset_cases.append(
|
| 71 |
+
(
|
| 72 |
+
BMonthBegin(0),
|
| 73 |
+
{
|
| 74 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 75 |
+
datetime(2006, 10, 2): datetime(2006, 10, 2),
|
| 76 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 77 |
+
datetime(2006, 12, 29): datetime(2007, 1, 1),
|
| 78 |
+
datetime(2006, 12, 31): datetime(2007, 1, 1),
|
| 79 |
+
datetime(2006, 9, 15): datetime(2006, 10, 2),
|
| 80 |
+
},
|
| 81 |
+
)
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
offset_cases.append(
|
| 85 |
+
(
|
| 86 |
+
BMonthBegin(2),
|
| 87 |
+
{
|
| 88 |
+
datetime(2008, 1, 1): datetime(2008, 3, 3),
|
| 89 |
+
datetime(2008, 1, 15): datetime(2008, 3, 3),
|
| 90 |
+
datetime(2006, 12, 29): datetime(2007, 2, 1),
|
| 91 |
+
datetime(2006, 12, 31): datetime(2007, 2, 1),
|
| 92 |
+
datetime(2007, 1, 1): datetime(2007, 3, 1),
|
| 93 |
+
datetime(2006, 11, 1): datetime(2007, 1, 1),
|
| 94 |
+
},
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
offset_cases.append(
|
| 99 |
+
(
|
| 100 |
+
BMonthBegin(-1),
|
| 101 |
+
{
|
| 102 |
+
datetime(2007, 1, 1): datetime(2006, 12, 1),
|
| 103 |
+
datetime(2008, 6, 30): datetime(2008, 6, 2),
|
| 104 |
+
datetime(2008, 6, 1): datetime(2008, 5, 1),
|
| 105 |
+
datetime(2008, 3, 10): datetime(2008, 3, 3),
|
| 106 |
+
datetime(2008, 12, 31): datetime(2008, 12, 1),
|
| 107 |
+
datetime(2006, 12, 29): datetime(2006, 12, 1),
|
| 108 |
+
datetime(2006, 12, 30): datetime(2006, 12, 1),
|
| 109 |
+
datetime(2007, 1, 1): datetime(2006, 12, 1),
|
| 110 |
+
},
|
| 111 |
+
)
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 115 |
+
def test_offset(self, case):
|
| 116 |
+
offset, cases = case
|
| 117 |
+
for base, expected in cases.items():
|
| 118 |
+
assert_offset_equal(offset, base, expected)
|
| 119 |
+
|
| 120 |
+
on_offset_cases = [
|
| 121 |
+
(BMonthBegin(), datetime(2007, 12, 31), False),
|
| 122 |
+
(BMonthBegin(), datetime(2008, 1, 1), True),
|
| 123 |
+
(BMonthBegin(), datetime(2001, 4, 2), True),
|
| 124 |
+
(BMonthBegin(), datetime(2008, 3, 3), True),
|
| 125 |
+
]
|
| 126 |
+
|
| 127 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 128 |
+
def test_is_on_offset(self, case):
|
| 129 |
+
offset, dt, expected = case
|
| 130 |
+
assert_is_on_offset(offset, dt, expected)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class TestBMonthEnd:
|
| 134 |
+
def test_normalize(self):
|
| 135 |
+
dt = datetime(2007, 1, 1, 3)
|
| 136 |
+
|
| 137 |
+
result = dt + BMonthEnd(normalize=True)
|
| 138 |
+
expected = dt.replace(hour=0) + BMonthEnd()
|
| 139 |
+
assert result == expected
|
| 140 |
+
|
| 141 |
+
def test_offsets_compare_equal(self):
|
| 142 |
+
# root cause of #456
|
| 143 |
+
offset1 = BMonthEnd()
|
| 144 |
+
offset2 = BMonthEnd()
|
| 145 |
+
assert not offset1 != offset2
|
| 146 |
+
|
| 147 |
+
offset_cases = []
|
| 148 |
+
offset_cases.append(
|
| 149 |
+
(
|
| 150 |
+
BMonthEnd(),
|
| 151 |
+
{
|
| 152 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 153 |
+
datetime(2008, 1, 31): datetime(2008, 2, 29),
|
| 154 |
+
datetime(2006, 12, 29): datetime(2007, 1, 31),
|
| 155 |
+
datetime(2006, 12, 31): datetime(2007, 1, 31),
|
| 156 |
+
datetime(2007, 1, 1): datetime(2007, 1, 31),
|
| 157 |
+
datetime(2006, 12, 1): datetime(2006, 12, 29),
|
| 158 |
+
},
|
| 159 |
+
)
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
offset_cases.append(
|
| 163 |
+
(
|
| 164 |
+
BMonthEnd(0),
|
| 165 |
+
{
|
| 166 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 167 |
+
datetime(2008, 1, 31): datetime(2008, 1, 31),
|
| 168 |
+
datetime(2006, 12, 29): datetime(2006, 12, 29),
|
| 169 |
+
datetime(2006, 12, 31): datetime(2007, 1, 31),
|
| 170 |
+
datetime(2007, 1, 1): datetime(2007, 1, 31),
|
| 171 |
+
},
|
| 172 |
+
)
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
offset_cases.append(
|
| 176 |
+
(
|
| 177 |
+
BMonthEnd(2),
|
| 178 |
+
{
|
| 179 |
+
datetime(2008, 1, 1): datetime(2008, 2, 29),
|
| 180 |
+
datetime(2008, 1, 31): datetime(2008, 3, 31),
|
| 181 |
+
datetime(2006, 12, 29): datetime(2007, 2, 28),
|
| 182 |
+
datetime(2006, 12, 31): datetime(2007, 2, 28),
|
| 183 |
+
datetime(2007, 1, 1): datetime(2007, 2, 28),
|
| 184 |
+
datetime(2006, 11, 1): datetime(2006, 12, 29),
|
| 185 |
+
},
|
| 186 |
+
)
|
| 187 |
+
)
|
| 188 |
+
|
| 189 |
+
offset_cases.append(
|
| 190 |
+
(
|
| 191 |
+
BMonthEnd(-1),
|
| 192 |
+
{
|
| 193 |
+
datetime(2007, 1, 1): datetime(2006, 12, 29),
|
| 194 |
+
datetime(2008, 6, 30): datetime(2008, 5, 30),
|
| 195 |
+
datetime(2008, 12, 31): datetime(2008, 11, 28),
|
| 196 |
+
datetime(2006, 12, 29): datetime(2006, 11, 30),
|
| 197 |
+
datetime(2006, 12, 30): datetime(2006, 12, 29),
|
| 198 |
+
datetime(2007, 1, 1): datetime(2006, 12, 29),
|
| 199 |
+
},
|
| 200 |
+
)
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 204 |
+
def test_offset(self, case):
|
| 205 |
+
offset, cases = case
|
| 206 |
+
for base, expected in cases.items():
|
| 207 |
+
assert_offset_equal(offset, base, expected)
|
| 208 |
+
|
| 209 |
+
on_offset_cases = [
|
| 210 |
+
(BMonthEnd(), datetime(2007, 12, 31), True),
|
| 211 |
+
(BMonthEnd(), datetime(2008, 1, 1), False),
|
| 212 |
+
]
|
| 213 |
+
|
| 214 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 215 |
+
def test_is_on_offset(self, case):
|
| 216 |
+
offset, dt, expected = case
|
| 217 |
+
assert_is_on_offset(offset, dt, expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_quarter.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the following offsets:
|
| 3 |
+
- BQuarterBegin
|
| 4 |
+
- BQuarterEnd
|
| 5 |
+
"""
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
import pandas._testing as tm
|
| 13 |
+
from pandas.tests.tseries.offsets.common import (
|
| 14 |
+
assert_is_on_offset,
|
| 15 |
+
assert_offset_equal,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
from pandas.tseries.offsets import (
|
| 19 |
+
BQuarterBegin,
|
| 20 |
+
BQuarterEnd,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def test_quarterly_dont_normalize():
|
| 25 |
+
date = datetime(2012, 3, 31, 5, 30)
|
| 26 |
+
|
| 27 |
+
offsets = (BQuarterEnd, BQuarterBegin)
|
| 28 |
+
|
| 29 |
+
for klass in offsets:
|
| 30 |
+
result = date + klass()
|
| 31 |
+
assert result.time() == date.time()
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@pytest.mark.parametrize("offset", [BQuarterBegin(), BQuarterEnd()])
|
| 35 |
+
def test_on_offset(offset):
|
| 36 |
+
dates = [
|
| 37 |
+
datetime(2016, m, d)
|
| 38 |
+
for m in [10, 11, 12]
|
| 39 |
+
for d in [1, 2, 3, 28, 29, 30, 31]
|
| 40 |
+
if not (m == 11 and d == 31)
|
| 41 |
+
]
|
| 42 |
+
for date in dates:
|
| 43 |
+
res = offset.is_on_offset(date)
|
| 44 |
+
slow_version = date == (date + offset) - offset
|
| 45 |
+
assert res == slow_version
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TestBQuarterBegin:
|
| 49 |
+
def test_repr(self):
|
| 50 |
+
expected = "<BusinessQuarterBegin: startingMonth=3>"
|
| 51 |
+
assert repr(BQuarterBegin()) == expected
|
| 52 |
+
expected = "<BusinessQuarterBegin: startingMonth=3>"
|
| 53 |
+
assert repr(BQuarterBegin(startingMonth=3)) == expected
|
| 54 |
+
expected = "<BusinessQuarterBegin: startingMonth=1>"
|
| 55 |
+
assert repr(BQuarterBegin(startingMonth=1)) == expected
|
| 56 |
+
|
| 57 |
+
def test_is_anchored(self):
|
| 58 |
+
msg = "BQuarterBegin.is_anchored is deprecated "
|
| 59 |
+
|
| 60 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 61 |
+
assert BQuarterBegin(startingMonth=1).is_anchored()
|
| 62 |
+
assert BQuarterBegin().is_anchored()
|
| 63 |
+
assert not BQuarterBegin(2, startingMonth=1).is_anchored()
|
| 64 |
+
|
| 65 |
+
def test_offset_corner_case(self):
|
| 66 |
+
# corner
|
| 67 |
+
offset = BQuarterBegin(n=-1, startingMonth=1)
|
| 68 |
+
assert datetime(2007, 4, 3) + offset == datetime(2007, 4, 2)
|
| 69 |
+
|
| 70 |
+
offset_cases = []
|
| 71 |
+
offset_cases.append(
|
| 72 |
+
(
|
| 73 |
+
BQuarterBegin(startingMonth=1),
|
| 74 |
+
{
|
| 75 |
+
datetime(2008, 1, 1): datetime(2008, 4, 1),
|
| 76 |
+
datetime(2008, 1, 31): datetime(2008, 4, 1),
|
| 77 |
+
datetime(2008, 2, 15): datetime(2008, 4, 1),
|
| 78 |
+
datetime(2008, 2, 29): datetime(2008, 4, 1),
|
| 79 |
+
datetime(2008, 3, 15): datetime(2008, 4, 1),
|
| 80 |
+
datetime(2008, 3, 31): datetime(2008, 4, 1),
|
| 81 |
+
datetime(2008, 4, 15): datetime(2008, 7, 1),
|
| 82 |
+
datetime(2007, 3, 15): datetime(2007, 4, 2),
|
| 83 |
+
datetime(2007, 2, 28): datetime(2007, 4, 2),
|
| 84 |
+
datetime(2007, 1, 1): datetime(2007, 4, 2),
|
| 85 |
+
datetime(2007, 4, 15): datetime(2007, 7, 2),
|
| 86 |
+
datetime(2007, 7, 1): datetime(2007, 7, 2),
|
| 87 |
+
datetime(2007, 4, 1): datetime(2007, 4, 2),
|
| 88 |
+
datetime(2007, 4, 2): datetime(2007, 7, 2),
|
| 89 |
+
datetime(2008, 4, 30): datetime(2008, 7, 1),
|
| 90 |
+
},
|
| 91 |
+
)
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
offset_cases.append(
|
| 95 |
+
(
|
| 96 |
+
BQuarterBegin(startingMonth=2),
|
| 97 |
+
{
|
| 98 |
+
datetime(2008, 1, 1): datetime(2008, 2, 1),
|
| 99 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 100 |
+
datetime(2008, 1, 15): datetime(2008, 2, 1),
|
| 101 |
+
datetime(2008, 2, 29): datetime(2008, 5, 1),
|
| 102 |
+
datetime(2008, 3, 15): datetime(2008, 5, 1),
|
| 103 |
+
datetime(2008, 3, 31): datetime(2008, 5, 1),
|
| 104 |
+
datetime(2008, 4, 15): datetime(2008, 5, 1),
|
| 105 |
+
datetime(2008, 8, 15): datetime(2008, 11, 3),
|
| 106 |
+
datetime(2008, 9, 15): datetime(2008, 11, 3),
|
| 107 |
+
datetime(2008, 11, 1): datetime(2008, 11, 3),
|
| 108 |
+
datetime(2008, 4, 30): datetime(2008, 5, 1),
|
| 109 |
+
},
|
| 110 |
+
)
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
offset_cases.append(
|
| 114 |
+
(
|
| 115 |
+
BQuarterBegin(startingMonth=1, n=0),
|
| 116 |
+
{
|
| 117 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 118 |
+
datetime(2007, 12, 31): datetime(2008, 1, 1),
|
| 119 |
+
datetime(2008, 2, 15): datetime(2008, 4, 1),
|
| 120 |
+
datetime(2008, 2, 29): datetime(2008, 4, 1),
|
| 121 |
+
datetime(2008, 1, 15): datetime(2008, 4, 1),
|
| 122 |
+
datetime(2008, 2, 27): datetime(2008, 4, 1),
|
| 123 |
+
datetime(2008, 3, 15): datetime(2008, 4, 1),
|
| 124 |
+
datetime(2007, 4, 1): datetime(2007, 4, 2),
|
| 125 |
+
datetime(2007, 4, 2): datetime(2007, 4, 2),
|
| 126 |
+
datetime(2007, 7, 1): datetime(2007, 7, 2),
|
| 127 |
+
datetime(2007, 4, 15): datetime(2007, 7, 2),
|
| 128 |
+
datetime(2007, 7, 2): datetime(2007, 7, 2),
|
| 129 |
+
},
|
| 130 |
+
)
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
offset_cases.append(
|
| 134 |
+
(
|
| 135 |
+
BQuarterBegin(startingMonth=1, n=-1),
|
| 136 |
+
{
|
| 137 |
+
datetime(2008, 1, 1): datetime(2007, 10, 1),
|
| 138 |
+
datetime(2008, 1, 31): datetime(2008, 1, 1),
|
| 139 |
+
datetime(2008, 2, 15): datetime(2008, 1, 1),
|
| 140 |
+
datetime(2008, 2, 29): datetime(2008, 1, 1),
|
| 141 |
+
datetime(2008, 3, 15): datetime(2008, 1, 1),
|
| 142 |
+
datetime(2008, 3, 31): datetime(2008, 1, 1),
|
| 143 |
+
datetime(2008, 4, 15): datetime(2008, 4, 1),
|
| 144 |
+
datetime(2007, 7, 3): datetime(2007, 7, 2),
|
| 145 |
+
datetime(2007, 4, 3): datetime(2007, 4, 2),
|
| 146 |
+
datetime(2007, 7, 2): datetime(2007, 4, 2),
|
| 147 |
+
datetime(2008, 4, 1): datetime(2008, 1, 1),
|
| 148 |
+
},
|
| 149 |
+
)
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
offset_cases.append(
|
| 153 |
+
(
|
| 154 |
+
BQuarterBegin(startingMonth=1, n=2),
|
| 155 |
+
{
|
| 156 |
+
datetime(2008, 1, 1): datetime(2008, 7, 1),
|
| 157 |
+
datetime(2008, 1, 15): datetime(2008, 7, 1),
|
| 158 |
+
datetime(2008, 2, 29): datetime(2008, 7, 1),
|
| 159 |
+
datetime(2008, 3, 15): datetime(2008, 7, 1),
|
| 160 |
+
datetime(2007, 3, 31): datetime(2007, 7, 2),
|
| 161 |
+
datetime(2007, 4, 15): datetime(2007, 10, 1),
|
| 162 |
+
datetime(2008, 4, 30): datetime(2008, 10, 1),
|
| 163 |
+
},
|
| 164 |
+
)
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 168 |
+
def test_offset(self, case):
|
| 169 |
+
offset, cases = case
|
| 170 |
+
for base, expected in cases.items():
|
| 171 |
+
assert_offset_equal(offset, base, expected)
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
class TestBQuarterEnd:
|
| 175 |
+
def test_repr(self):
|
| 176 |
+
expected = "<BusinessQuarterEnd: startingMonth=3>"
|
| 177 |
+
assert repr(BQuarterEnd()) == expected
|
| 178 |
+
expected = "<BusinessQuarterEnd: startingMonth=3>"
|
| 179 |
+
assert repr(BQuarterEnd(startingMonth=3)) == expected
|
| 180 |
+
expected = "<BusinessQuarterEnd: startingMonth=1>"
|
| 181 |
+
assert repr(BQuarterEnd(startingMonth=1)) == expected
|
| 182 |
+
|
| 183 |
+
def test_is_anchored(self):
|
| 184 |
+
msg = "BQuarterEnd.is_anchored is deprecated "
|
| 185 |
+
|
| 186 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 187 |
+
assert BQuarterEnd(startingMonth=1).is_anchored()
|
| 188 |
+
assert BQuarterEnd().is_anchored()
|
| 189 |
+
assert not BQuarterEnd(2, startingMonth=1).is_anchored()
|
| 190 |
+
|
| 191 |
+
def test_offset_corner_case(self):
|
| 192 |
+
# corner
|
| 193 |
+
offset = BQuarterEnd(n=-1, startingMonth=1)
|
| 194 |
+
assert datetime(2010, 1, 31) + offset == datetime(2010, 1, 29)
|
| 195 |
+
|
| 196 |
+
offset_cases = []
|
| 197 |
+
offset_cases.append(
|
| 198 |
+
(
|
| 199 |
+
BQuarterEnd(startingMonth=1),
|
| 200 |
+
{
|
| 201 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 202 |
+
datetime(2008, 1, 31): datetime(2008, 4, 30),
|
| 203 |
+
datetime(2008, 2, 15): datetime(2008, 4, 30),
|
| 204 |
+
datetime(2008, 2, 29): datetime(2008, 4, 30),
|
| 205 |
+
datetime(2008, 3, 15): datetime(2008, 4, 30),
|
| 206 |
+
datetime(2008, 3, 31): datetime(2008, 4, 30),
|
| 207 |
+
datetime(2008, 4, 15): datetime(2008, 4, 30),
|
| 208 |
+
datetime(2008, 4, 30): datetime(2008, 7, 31),
|
| 209 |
+
},
|
| 210 |
+
)
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
offset_cases.append(
|
| 214 |
+
(
|
| 215 |
+
BQuarterEnd(startingMonth=2),
|
| 216 |
+
{
|
| 217 |
+
datetime(2008, 1, 1): datetime(2008, 2, 29),
|
| 218 |
+
datetime(2008, 1, 31): datetime(2008, 2, 29),
|
| 219 |
+
datetime(2008, 2, 15): datetime(2008, 2, 29),
|
| 220 |
+
datetime(2008, 2, 29): datetime(2008, 5, 30),
|
| 221 |
+
datetime(2008, 3, 15): datetime(2008, 5, 30),
|
| 222 |
+
datetime(2008, 3, 31): datetime(2008, 5, 30),
|
| 223 |
+
datetime(2008, 4, 15): datetime(2008, 5, 30),
|
| 224 |
+
datetime(2008, 4, 30): datetime(2008, 5, 30),
|
| 225 |
+
},
|
| 226 |
+
)
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
offset_cases.append(
|
| 230 |
+
(
|
| 231 |
+
BQuarterEnd(startingMonth=1, n=0),
|
| 232 |
+
{
|
| 233 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 234 |
+
datetime(2008, 1, 31): datetime(2008, 1, 31),
|
| 235 |
+
datetime(2008, 2, 15): datetime(2008, 4, 30),
|
| 236 |
+
datetime(2008, 2, 29): datetime(2008, 4, 30),
|
| 237 |
+
datetime(2008, 3, 15): datetime(2008, 4, 30),
|
| 238 |
+
datetime(2008, 3, 31): datetime(2008, 4, 30),
|
| 239 |
+
datetime(2008, 4, 15): datetime(2008, 4, 30),
|
| 240 |
+
datetime(2008, 4, 30): datetime(2008, 4, 30),
|
| 241 |
+
},
|
| 242 |
+
)
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
offset_cases.append(
|
| 246 |
+
(
|
| 247 |
+
BQuarterEnd(startingMonth=1, n=-1),
|
| 248 |
+
{
|
| 249 |
+
datetime(2008, 1, 1): datetime(2007, 10, 31),
|
| 250 |
+
datetime(2008, 1, 31): datetime(2007, 10, 31),
|
| 251 |
+
datetime(2008, 2, 15): datetime(2008, 1, 31),
|
| 252 |
+
datetime(2008, 2, 29): datetime(2008, 1, 31),
|
| 253 |
+
datetime(2008, 3, 15): datetime(2008, 1, 31),
|
| 254 |
+
datetime(2008, 3, 31): datetime(2008, 1, 31),
|
| 255 |
+
datetime(2008, 4, 15): datetime(2008, 1, 31),
|
| 256 |
+
datetime(2008, 4, 30): datetime(2008, 1, 31),
|
| 257 |
+
},
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
offset_cases.append(
|
| 262 |
+
(
|
| 263 |
+
BQuarterEnd(startingMonth=1, n=2),
|
| 264 |
+
{
|
| 265 |
+
datetime(2008, 1, 31): datetime(2008, 7, 31),
|
| 266 |
+
datetime(2008, 2, 15): datetime(2008, 7, 31),
|
| 267 |
+
datetime(2008, 2, 29): datetime(2008, 7, 31),
|
| 268 |
+
datetime(2008, 3, 15): datetime(2008, 7, 31),
|
| 269 |
+
datetime(2008, 3, 31): datetime(2008, 7, 31),
|
| 270 |
+
datetime(2008, 4, 15): datetime(2008, 7, 31),
|
| 271 |
+
datetime(2008, 4, 30): datetime(2008, 10, 31),
|
| 272 |
+
},
|
| 273 |
+
)
|
| 274 |
+
)
|
| 275 |
+
|
| 276 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 277 |
+
def test_offset(self, case):
|
| 278 |
+
offset, cases = case
|
| 279 |
+
for base, expected in cases.items():
|
| 280 |
+
assert_offset_equal(offset, base, expected)
|
| 281 |
+
|
| 282 |
+
on_offset_cases = [
|
| 283 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2008, 1, 31), True),
|
| 284 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2007, 12, 31), False),
|
| 285 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2008, 2, 29), False),
|
| 286 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2007, 3, 30), False),
|
| 287 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2007, 3, 31), False),
|
| 288 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2008, 4, 30), True),
|
| 289 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2008, 5, 30), False),
|
| 290 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2007, 6, 29), False),
|
| 291 |
+
(BQuarterEnd(1, startingMonth=1), datetime(2007, 6, 30), False),
|
| 292 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2008, 1, 31), False),
|
| 293 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2007, 12, 31), False),
|
| 294 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2008, 2, 29), True),
|
| 295 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2007, 3, 30), False),
|
| 296 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2007, 3, 31), False),
|
| 297 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2008, 4, 30), False),
|
| 298 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2008, 5, 30), True),
|
| 299 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2007, 6, 29), False),
|
| 300 |
+
(BQuarterEnd(1, startingMonth=2), datetime(2007, 6, 30), False),
|
| 301 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2008, 1, 31), False),
|
| 302 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2007, 12, 31), True),
|
| 303 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2008, 2, 29), False),
|
| 304 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2007, 3, 30), True),
|
| 305 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2007, 3, 31), False),
|
| 306 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2008, 4, 30), False),
|
| 307 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2008, 5, 30), False),
|
| 308 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2007, 6, 29), True),
|
| 309 |
+
(BQuarterEnd(1, startingMonth=3), datetime(2007, 6, 30), False),
|
| 310 |
+
]
|
| 311 |
+
|
| 312 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 313 |
+
def test_is_on_offset(self, case):
|
| 314 |
+
offset, dt, expected = case
|
| 315 |
+
assert_is_on_offset(offset, dt, expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_business_year.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the following offsets:
|
| 3 |
+
- BYearBegin
|
| 4 |
+
- BYearEnd
|
| 5 |
+
"""
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
from pandas.tests.tseries.offsets.common import (
|
| 13 |
+
assert_is_on_offset,
|
| 14 |
+
assert_offset_equal,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
from pandas.tseries.offsets import (
|
| 18 |
+
BYearBegin,
|
| 19 |
+
BYearEnd,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class TestBYearBegin:
|
| 24 |
+
def test_misspecified(self):
|
| 25 |
+
msg = "Month must go from 1 to 12"
|
| 26 |
+
with pytest.raises(ValueError, match=msg):
|
| 27 |
+
BYearBegin(month=13)
|
| 28 |
+
with pytest.raises(ValueError, match=msg):
|
| 29 |
+
BYearEnd(month=13)
|
| 30 |
+
|
| 31 |
+
offset_cases = []
|
| 32 |
+
offset_cases.append(
|
| 33 |
+
(
|
| 34 |
+
BYearBegin(),
|
| 35 |
+
{
|
| 36 |
+
datetime(2008, 1, 1): datetime(2009, 1, 1),
|
| 37 |
+
datetime(2008, 6, 30): datetime(2009, 1, 1),
|
| 38 |
+
datetime(2008, 12, 31): datetime(2009, 1, 1),
|
| 39 |
+
datetime(2011, 1, 1): datetime(2011, 1, 3),
|
| 40 |
+
datetime(2011, 1, 3): datetime(2012, 1, 2),
|
| 41 |
+
datetime(2005, 12, 30): datetime(2006, 1, 2),
|
| 42 |
+
datetime(2005, 12, 31): datetime(2006, 1, 2),
|
| 43 |
+
},
|
| 44 |
+
)
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
offset_cases.append(
|
| 48 |
+
(
|
| 49 |
+
BYearBegin(0),
|
| 50 |
+
{
|
| 51 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 52 |
+
datetime(2008, 6, 30): datetime(2009, 1, 1),
|
| 53 |
+
datetime(2008, 12, 31): datetime(2009, 1, 1),
|
| 54 |
+
datetime(2005, 12, 30): datetime(2006, 1, 2),
|
| 55 |
+
datetime(2005, 12, 31): datetime(2006, 1, 2),
|
| 56 |
+
},
|
| 57 |
+
)
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
offset_cases.append(
|
| 61 |
+
(
|
| 62 |
+
BYearBegin(-1),
|
| 63 |
+
{
|
| 64 |
+
datetime(2007, 1, 1): datetime(2006, 1, 2),
|
| 65 |
+
datetime(2009, 1, 4): datetime(2009, 1, 1),
|
| 66 |
+
datetime(2009, 1, 1): datetime(2008, 1, 1),
|
| 67 |
+
datetime(2008, 6, 30): datetime(2008, 1, 1),
|
| 68 |
+
datetime(2008, 12, 31): datetime(2008, 1, 1),
|
| 69 |
+
datetime(2006, 12, 29): datetime(2006, 1, 2),
|
| 70 |
+
datetime(2006, 12, 30): datetime(2006, 1, 2),
|
| 71 |
+
datetime(2006, 1, 1): datetime(2005, 1, 3),
|
| 72 |
+
},
|
| 73 |
+
)
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
offset_cases.append(
|
| 77 |
+
(
|
| 78 |
+
BYearBegin(-2),
|
| 79 |
+
{
|
| 80 |
+
datetime(2007, 1, 1): datetime(2005, 1, 3),
|
| 81 |
+
datetime(2007, 6, 30): datetime(2006, 1, 2),
|
| 82 |
+
datetime(2008, 12, 31): datetime(2007, 1, 1),
|
| 83 |
+
},
|
| 84 |
+
)
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 88 |
+
def test_offset(self, case):
|
| 89 |
+
offset, cases = case
|
| 90 |
+
for base, expected in cases.items():
|
| 91 |
+
assert_offset_equal(offset, base, expected)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class TestBYearEnd:
|
| 95 |
+
offset_cases = []
|
| 96 |
+
offset_cases.append(
|
| 97 |
+
(
|
| 98 |
+
BYearEnd(),
|
| 99 |
+
{
|
| 100 |
+
datetime(2008, 1, 1): datetime(2008, 12, 31),
|
| 101 |
+
datetime(2008, 6, 30): datetime(2008, 12, 31),
|
| 102 |
+
datetime(2008, 12, 31): datetime(2009, 12, 31),
|
| 103 |
+
datetime(2005, 12, 30): datetime(2006, 12, 29),
|
| 104 |
+
datetime(2005, 12, 31): datetime(2006, 12, 29),
|
| 105 |
+
},
|
| 106 |
+
)
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
offset_cases.append(
|
| 110 |
+
(
|
| 111 |
+
BYearEnd(0),
|
| 112 |
+
{
|
| 113 |
+
datetime(2008, 1, 1): datetime(2008, 12, 31),
|
| 114 |
+
datetime(2008, 6, 30): datetime(2008, 12, 31),
|
| 115 |
+
datetime(2008, 12, 31): datetime(2008, 12, 31),
|
| 116 |
+
datetime(2005, 12, 31): datetime(2006, 12, 29),
|
| 117 |
+
},
|
| 118 |
+
)
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
offset_cases.append(
|
| 122 |
+
(
|
| 123 |
+
BYearEnd(-1),
|
| 124 |
+
{
|
| 125 |
+
datetime(2007, 1, 1): datetime(2006, 12, 29),
|
| 126 |
+
datetime(2008, 6, 30): datetime(2007, 12, 31),
|
| 127 |
+
datetime(2008, 12, 31): datetime(2007, 12, 31),
|
| 128 |
+
datetime(2006, 12, 29): datetime(2005, 12, 30),
|
| 129 |
+
datetime(2006, 12, 30): datetime(2006, 12, 29),
|
| 130 |
+
datetime(2007, 1, 1): datetime(2006, 12, 29),
|
| 131 |
+
},
|
| 132 |
+
)
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
offset_cases.append(
|
| 136 |
+
(
|
| 137 |
+
BYearEnd(-2),
|
| 138 |
+
{
|
| 139 |
+
datetime(2007, 1, 1): datetime(2005, 12, 30),
|
| 140 |
+
datetime(2008, 6, 30): datetime(2006, 12, 29),
|
| 141 |
+
datetime(2008, 12, 31): datetime(2006, 12, 29),
|
| 142 |
+
},
|
| 143 |
+
)
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 147 |
+
def test_offset(self, case):
|
| 148 |
+
offset, cases = case
|
| 149 |
+
for base, expected in cases.items():
|
| 150 |
+
assert_offset_equal(offset, base, expected)
|
| 151 |
+
|
| 152 |
+
on_offset_cases = [
|
| 153 |
+
(BYearEnd(), datetime(2007, 12, 31), True),
|
| 154 |
+
(BYearEnd(), datetime(2008, 1, 1), False),
|
| 155 |
+
(BYearEnd(), datetime(2006, 12, 31), False),
|
| 156 |
+
(BYearEnd(), datetime(2006, 12, 29), True),
|
| 157 |
+
]
|
| 158 |
+
|
| 159 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 160 |
+
def test_is_on_offset(self, case):
|
| 161 |
+
offset, dt, expected = case
|
| 162 |
+
assert_is_on_offset(offset, dt, expected)
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
class TestBYearEndLagged:
|
| 166 |
+
def test_bad_month_fail(self):
|
| 167 |
+
msg = "Month must go from 1 to 12"
|
| 168 |
+
with pytest.raises(ValueError, match=msg):
|
| 169 |
+
BYearEnd(month=13)
|
| 170 |
+
with pytest.raises(ValueError, match=msg):
|
| 171 |
+
BYearEnd(month=0)
|
| 172 |
+
|
| 173 |
+
offset_cases = []
|
| 174 |
+
offset_cases.append(
|
| 175 |
+
(
|
| 176 |
+
BYearEnd(month=6),
|
| 177 |
+
{
|
| 178 |
+
datetime(2008, 1, 1): datetime(2008, 6, 30),
|
| 179 |
+
datetime(2007, 6, 30): datetime(2008, 6, 30),
|
| 180 |
+
},
|
| 181 |
+
)
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
offset_cases.append(
|
| 185 |
+
(
|
| 186 |
+
BYearEnd(n=-1, month=6),
|
| 187 |
+
{
|
| 188 |
+
datetime(2008, 1, 1): datetime(2007, 6, 29),
|
| 189 |
+
datetime(2007, 6, 30): datetime(2007, 6, 29),
|
| 190 |
+
},
|
| 191 |
+
)
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 195 |
+
def test_offset(self, case):
|
| 196 |
+
offset, cases = case
|
| 197 |
+
for base, expected in cases.items():
|
| 198 |
+
assert_offset_equal(offset, base, expected)
|
| 199 |
+
|
| 200 |
+
def test_roll(self):
|
| 201 |
+
offset = BYearEnd(month=6)
|
| 202 |
+
date = datetime(2009, 11, 30)
|
| 203 |
+
|
| 204 |
+
assert offset.rollforward(date) == datetime(2010, 6, 30)
|
| 205 |
+
assert offset.rollback(date) == datetime(2009, 6, 30)
|
| 206 |
+
|
| 207 |
+
on_offset_cases = [
|
| 208 |
+
(BYearEnd(month=2), datetime(2007, 2, 28), True),
|
| 209 |
+
(BYearEnd(month=6), datetime(2007, 6, 30), False),
|
| 210 |
+
]
|
| 211 |
+
|
| 212 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 213 |
+
def test_is_on_offset(self, case):
|
| 214 |
+
offset, dt, expected = case
|
| 215 |
+
assert_is_on_offset(offset, dt, expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_common.py
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
|
| 3 |
+
from dateutil.tz.tz import tzlocal
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
from pandas._libs.tslibs import (
|
| 7 |
+
OutOfBoundsDatetime,
|
| 8 |
+
Timestamp,
|
| 9 |
+
)
|
| 10 |
+
from pandas.compat import (
|
| 11 |
+
IS64,
|
| 12 |
+
is_platform_windows,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
from pandas.tseries.offsets import (
|
| 16 |
+
FY5253,
|
| 17 |
+
BDay,
|
| 18 |
+
BMonthBegin,
|
| 19 |
+
BMonthEnd,
|
| 20 |
+
BQuarterBegin,
|
| 21 |
+
BQuarterEnd,
|
| 22 |
+
BusinessHour,
|
| 23 |
+
BYearBegin,
|
| 24 |
+
BYearEnd,
|
| 25 |
+
CBMonthBegin,
|
| 26 |
+
CBMonthEnd,
|
| 27 |
+
CDay,
|
| 28 |
+
CustomBusinessHour,
|
| 29 |
+
DateOffset,
|
| 30 |
+
FY5253Quarter,
|
| 31 |
+
LastWeekOfMonth,
|
| 32 |
+
MonthBegin,
|
| 33 |
+
MonthEnd,
|
| 34 |
+
QuarterEnd,
|
| 35 |
+
SemiMonthBegin,
|
| 36 |
+
SemiMonthEnd,
|
| 37 |
+
Week,
|
| 38 |
+
WeekOfMonth,
|
| 39 |
+
YearBegin,
|
| 40 |
+
YearEnd,
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def _get_offset(klass, value=1, normalize=False):
|
| 45 |
+
# create instance from offset class
|
| 46 |
+
if klass is FY5253:
|
| 47 |
+
klass = klass(
|
| 48 |
+
n=value,
|
| 49 |
+
startingMonth=1,
|
| 50 |
+
weekday=1,
|
| 51 |
+
variation="last",
|
| 52 |
+
normalize=normalize,
|
| 53 |
+
)
|
| 54 |
+
elif klass is FY5253Quarter:
|
| 55 |
+
klass = klass(
|
| 56 |
+
n=value,
|
| 57 |
+
startingMonth=1,
|
| 58 |
+
weekday=1,
|
| 59 |
+
qtr_with_extra_week=1,
|
| 60 |
+
variation="last",
|
| 61 |
+
normalize=normalize,
|
| 62 |
+
)
|
| 63 |
+
elif klass is LastWeekOfMonth:
|
| 64 |
+
klass = klass(n=value, weekday=5, normalize=normalize)
|
| 65 |
+
elif klass is WeekOfMonth:
|
| 66 |
+
klass = klass(n=value, week=1, weekday=5, normalize=normalize)
|
| 67 |
+
elif klass is Week:
|
| 68 |
+
klass = klass(n=value, weekday=5, normalize=normalize)
|
| 69 |
+
elif klass is DateOffset:
|
| 70 |
+
klass = klass(days=value, normalize=normalize)
|
| 71 |
+
else:
|
| 72 |
+
klass = klass(value, normalize=normalize)
|
| 73 |
+
return klass
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
@pytest.fixture(
|
| 77 |
+
params=[
|
| 78 |
+
BDay,
|
| 79 |
+
BusinessHour,
|
| 80 |
+
BMonthEnd,
|
| 81 |
+
BMonthBegin,
|
| 82 |
+
BQuarterEnd,
|
| 83 |
+
BQuarterBegin,
|
| 84 |
+
BYearEnd,
|
| 85 |
+
BYearBegin,
|
| 86 |
+
CDay,
|
| 87 |
+
CustomBusinessHour,
|
| 88 |
+
CBMonthEnd,
|
| 89 |
+
CBMonthBegin,
|
| 90 |
+
MonthEnd,
|
| 91 |
+
MonthBegin,
|
| 92 |
+
SemiMonthBegin,
|
| 93 |
+
SemiMonthEnd,
|
| 94 |
+
QuarterEnd,
|
| 95 |
+
LastWeekOfMonth,
|
| 96 |
+
WeekOfMonth,
|
| 97 |
+
Week,
|
| 98 |
+
YearBegin,
|
| 99 |
+
YearEnd,
|
| 100 |
+
FY5253,
|
| 101 |
+
FY5253Quarter,
|
| 102 |
+
DateOffset,
|
| 103 |
+
]
|
| 104 |
+
)
|
| 105 |
+
def _offset(request):
|
| 106 |
+
return request.param
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
@pytest.fixture
|
| 110 |
+
def dt(_offset):
|
| 111 |
+
if _offset in (CBMonthBegin, CBMonthEnd, BDay):
|
| 112 |
+
return Timestamp(2008, 1, 1)
|
| 113 |
+
elif _offset is (CustomBusinessHour, BusinessHour):
|
| 114 |
+
return Timestamp(2014, 7, 1, 10, 00)
|
| 115 |
+
return Timestamp(2008, 1, 2)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def test_apply_out_of_range(request, tz_naive_fixture, _offset):
|
| 119 |
+
tz = tz_naive_fixture
|
| 120 |
+
|
| 121 |
+
# try to create an out-of-bounds result timestamp; if we can't create
|
| 122 |
+
# the offset skip
|
| 123 |
+
try:
|
| 124 |
+
if _offset in (BusinessHour, CustomBusinessHour):
|
| 125 |
+
# Using 10000 in BusinessHour fails in tz check because of DST
|
| 126 |
+
# difference
|
| 127 |
+
offset = _get_offset(_offset, value=100000)
|
| 128 |
+
else:
|
| 129 |
+
offset = _get_offset(_offset, value=10000)
|
| 130 |
+
|
| 131 |
+
result = Timestamp("20080101") + offset
|
| 132 |
+
assert isinstance(result, datetime)
|
| 133 |
+
assert result.tzinfo is None
|
| 134 |
+
|
| 135 |
+
# Check tz is preserved
|
| 136 |
+
t = Timestamp("20080101", tz=tz)
|
| 137 |
+
result = t + offset
|
| 138 |
+
assert isinstance(result, datetime)
|
| 139 |
+
if tz is not None:
|
| 140 |
+
assert t.tzinfo is not None
|
| 141 |
+
|
| 142 |
+
if isinstance(tz, tzlocal) and not IS64 and _offset is not DateOffset:
|
| 143 |
+
# If we hit OutOfBoundsDatetime on non-64 bit machines
|
| 144 |
+
# we'll drop out of the try clause before the next test
|
| 145 |
+
request.applymarker(
|
| 146 |
+
pytest.mark.xfail(reason="OverflowError inside tzlocal past 2038")
|
| 147 |
+
)
|
| 148 |
+
elif (
|
| 149 |
+
isinstance(tz, tzlocal)
|
| 150 |
+
and is_platform_windows()
|
| 151 |
+
and _offset in (QuarterEnd, BQuarterBegin, BQuarterEnd)
|
| 152 |
+
):
|
| 153 |
+
request.applymarker(
|
| 154 |
+
pytest.mark.xfail(reason="After GH#49737 t.tzinfo is None on CI")
|
| 155 |
+
)
|
| 156 |
+
assert str(t.tzinfo) == str(result.tzinfo)
|
| 157 |
+
|
| 158 |
+
except OutOfBoundsDatetime:
|
| 159 |
+
pass
|
| 160 |
+
except (ValueError, KeyError):
|
| 161 |
+
# we are creating an invalid offset
|
| 162 |
+
# so ignore
|
| 163 |
+
pass
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def test_offsets_compare_equal(_offset):
|
| 167 |
+
# root cause of GH#456: __ne__ was not implemented
|
| 168 |
+
offset1 = _offset()
|
| 169 |
+
offset2 = _offset()
|
| 170 |
+
assert not offset1 != offset2
|
| 171 |
+
assert offset1 == offset2
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
@pytest.mark.parametrize(
|
| 175 |
+
"date, offset2",
|
| 176 |
+
[
|
| 177 |
+
[Timestamp(2008, 1, 1), BDay(2)],
|
| 178 |
+
[Timestamp(2014, 7, 1, 10, 00), BusinessHour(n=3)],
|
| 179 |
+
[
|
| 180 |
+
Timestamp(2014, 7, 1, 10),
|
| 181 |
+
CustomBusinessHour(
|
| 182 |
+
holidays=["2014-06-27", Timestamp(2014, 6, 30), Timestamp("2014-07-02")]
|
| 183 |
+
),
|
| 184 |
+
],
|
| 185 |
+
[Timestamp(2008, 1, 2), SemiMonthEnd(2)],
|
| 186 |
+
[Timestamp(2008, 1, 2), SemiMonthBegin(2)],
|
| 187 |
+
[Timestamp(2008, 1, 2), Week(2)],
|
| 188 |
+
[Timestamp(2008, 1, 2), WeekOfMonth(2)],
|
| 189 |
+
[Timestamp(2008, 1, 2), LastWeekOfMonth(2)],
|
| 190 |
+
],
|
| 191 |
+
)
|
| 192 |
+
def test_rsub(date, offset2):
|
| 193 |
+
assert date - offset2 == (-offset2)._apply(date)
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
@pytest.mark.parametrize(
|
| 197 |
+
"date, offset2",
|
| 198 |
+
[
|
| 199 |
+
[Timestamp(2008, 1, 1), BDay(2)],
|
| 200 |
+
[Timestamp(2014, 7, 1, 10, 00), BusinessHour(n=3)],
|
| 201 |
+
[
|
| 202 |
+
Timestamp(2014, 7, 1, 10),
|
| 203 |
+
CustomBusinessHour(
|
| 204 |
+
holidays=["2014-06-27", Timestamp(2014, 6, 30), Timestamp("2014-07-02")]
|
| 205 |
+
),
|
| 206 |
+
],
|
| 207 |
+
[Timestamp(2008, 1, 2), SemiMonthEnd(2)],
|
| 208 |
+
[Timestamp(2008, 1, 2), SemiMonthBegin(2)],
|
| 209 |
+
[Timestamp(2008, 1, 2), Week(2)],
|
| 210 |
+
[Timestamp(2008, 1, 2), WeekOfMonth(2)],
|
| 211 |
+
[Timestamp(2008, 1, 2), LastWeekOfMonth(2)],
|
| 212 |
+
],
|
| 213 |
+
)
|
| 214 |
+
def test_radd(date, offset2):
|
| 215 |
+
assert date + offset2 == offset2 + date
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
@pytest.mark.parametrize(
|
| 219 |
+
"date, offset_box, offset2",
|
| 220 |
+
[
|
| 221 |
+
[Timestamp(2008, 1, 1), BDay, BDay(2)],
|
| 222 |
+
[Timestamp(2008, 1, 2), SemiMonthEnd, SemiMonthEnd(2)],
|
| 223 |
+
[Timestamp(2008, 1, 2), SemiMonthBegin, SemiMonthBegin(2)],
|
| 224 |
+
[Timestamp(2008, 1, 2), Week, Week(2)],
|
| 225 |
+
[Timestamp(2008, 1, 2), WeekOfMonth, WeekOfMonth(2)],
|
| 226 |
+
[Timestamp(2008, 1, 2), LastWeekOfMonth, LastWeekOfMonth(2)],
|
| 227 |
+
],
|
| 228 |
+
)
|
| 229 |
+
def test_sub(date, offset_box, offset2):
|
| 230 |
+
off = offset2
|
| 231 |
+
msg = "Cannot subtract datetime from offset"
|
| 232 |
+
with pytest.raises(TypeError, match=msg):
|
| 233 |
+
off - date
|
| 234 |
+
|
| 235 |
+
assert 2 * off - off == off
|
| 236 |
+
assert date - offset2 == date + offset_box(-2)
|
| 237 |
+
assert date - offset2 == date - (2 * off - off)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
@pytest.mark.parametrize(
|
| 241 |
+
"offset_box, offset1",
|
| 242 |
+
[
|
| 243 |
+
[BDay, BDay()],
|
| 244 |
+
[LastWeekOfMonth, LastWeekOfMonth()],
|
| 245 |
+
[WeekOfMonth, WeekOfMonth()],
|
| 246 |
+
[Week, Week()],
|
| 247 |
+
[SemiMonthBegin, SemiMonthBegin()],
|
| 248 |
+
[SemiMonthEnd, SemiMonthEnd()],
|
| 249 |
+
[CustomBusinessHour, CustomBusinessHour(weekmask="Tue Wed Thu Fri")],
|
| 250 |
+
[BusinessHour, BusinessHour()],
|
| 251 |
+
],
|
| 252 |
+
)
|
| 253 |
+
def test_Mult1(offset_box, offset1):
|
| 254 |
+
dt = Timestamp(2008, 1, 2)
|
| 255 |
+
assert dt + 10 * offset1 == dt + offset_box(10)
|
| 256 |
+
assert dt + 5 * offset1 == dt + offset_box(5)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def test_compare_str(_offset):
|
| 260 |
+
# GH#23524
|
| 261 |
+
# comparing to strings that cannot be cast to DateOffsets should
|
| 262 |
+
# not raise for __eq__ or __ne__
|
| 263 |
+
off = _get_offset(_offset)
|
| 264 |
+
|
| 265 |
+
assert not off == "infer"
|
| 266 |
+
assert off != "foo"
|
| 267 |
+
# Note: inequalities are only implemented for Tick subclasses;
|
| 268 |
+
# tests for this are in test_ticks
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_custom_business_day.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for offsets.CustomBusinessDay / CDay
|
| 3 |
+
"""
|
| 4 |
+
from datetime import (
|
| 5 |
+
datetime,
|
| 6 |
+
timedelta,
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
import numpy as np
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
from pandas._libs.tslibs.offsets import CDay
|
| 13 |
+
|
| 14 |
+
from pandas import (
|
| 15 |
+
_testing as tm,
|
| 16 |
+
read_pickle,
|
| 17 |
+
)
|
| 18 |
+
from pandas.tests.tseries.offsets.common import assert_offset_equal
|
| 19 |
+
|
| 20 |
+
from pandas.tseries.holiday import USFederalHolidayCalendar
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@pytest.fixture
|
| 24 |
+
def offset():
|
| 25 |
+
return CDay()
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
@pytest.fixture
|
| 29 |
+
def offset2():
|
| 30 |
+
return CDay(2)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class TestCustomBusinessDay:
|
| 34 |
+
def test_repr(self, offset, offset2):
|
| 35 |
+
assert repr(offset) == "<CustomBusinessDay>"
|
| 36 |
+
assert repr(offset2) == "<2 * CustomBusinessDays>"
|
| 37 |
+
|
| 38 |
+
expected = "<BusinessDay: offset=datetime.timedelta(days=1)>"
|
| 39 |
+
assert repr(offset + timedelta(1)) == expected
|
| 40 |
+
|
| 41 |
+
def test_holidays(self):
|
| 42 |
+
# Define a TradingDay offset
|
| 43 |
+
holidays = ["2012-05-01", datetime(2013, 5, 1), np.datetime64("2014-05-01")]
|
| 44 |
+
tday = CDay(holidays=holidays)
|
| 45 |
+
for year in range(2012, 2015):
|
| 46 |
+
dt = datetime(year, 4, 30)
|
| 47 |
+
xp = datetime(year, 5, 2)
|
| 48 |
+
rs = dt + tday
|
| 49 |
+
assert rs == xp
|
| 50 |
+
|
| 51 |
+
def test_weekmask(self):
|
| 52 |
+
weekmask_saudi = "Sat Sun Mon Tue Wed" # Thu-Fri Weekend
|
| 53 |
+
weekmask_uae = "1111001" # Fri-Sat Weekend
|
| 54 |
+
weekmask_egypt = [1, 1, 1, 1, 0, 0, 1] # Fri-Sat Weekend
|
| 55 |
+
bday_saudi = CDay(weekmask=weekmask_saudi)
|
| 56 |
+
bday_uae = CDay(weekmask=weekmask_uae)
|
| 57 |
+
bday_egypt = CDay(weekmask=weekmask_egypt)
|
| 58 |
+
dt = datetime(2013, 5, 1)
|
| 59 |
+
xp_saudi = datetime(2013, 5, 4)
|
| 60 |
+
xp_uae = datetime(2013, 5, 2)
|
| 61 |
+
xp_egypt = datetime(2013, 5, 2)
|
| 62 |
+
assert xp_saudi == dt + bday_saudi
|
| 63 |
+
assert xp_uae == dt + bday_uae
|
| 64 |
+
assert xp_egypt == dt + bday_egypt
|
| 65 |
+
xp2 = datetime(2013, 5, 5)
|
| 66 |
+
assert xp2 == dt + 2 * bday_saudi
|
| 67 |
+
assert xp2 == dt + 2 * bday_uae
|
| 68 |
+
assert xp2 == dt + 2 * bday_egypt
|
| 69 |
+
|
| 70 |
+
def test_weekmask_and_holidays(self):
|
| 71 |
+
weekmask_egypt = "Sun Mon Tue Wed Thu" # Fri-Sat Weekend
|
| 72 |
+
holidays = ["2012-05-01", datetime(2013, 5, 1), np.datetime64("2014-05-01")]
|
| 73 |
+
bday_egypt = CDay(holidays=holidays, weekmask=weekmask_egypt)
|
| 74 |
+
dt = datetime(2013, 4, 30)
|
| 75 |
+
xp_egypt = datetime(2013, 5, 5)
|
| 76 |
+
assert xp_egypt == dt + 2 * bday_egypt
|
| 77 |
+
|
| 78 |
+
@pytest.mark.filterwarnings("ignore:Non:pandas.errors.PerformanceWarning")
|
| 79 |
+
def test_calendar(self):
|
| 80 |
+
calendar = USFederalHolidayCalendar()
|
| 81 |
+
dt = datetime(2014, 1, 17)
|
| 82 |
+
assert_offset_equal(CDay(calendar=calendar), dt, datetime(2014, 1, 21))
|
| 83 |
+
|
| 84 |
+
def test_roundtrip_pickle(self, offset, offset2):
|
| 85 |
+
def _check_roundtrip(obj):
|
| 86 |
+
unpickled = tm.round_trip_pickle(obj)
|
| 87 |
+
assert unpickled == obj
|
| 88 |
+
|
| 89 |
+
_check_roundtrip(offset)
|
| 90 |
+
_check_roundtrip(offset2)
|
| 91 |
+
_check_roundtrip(offset * 2)
|
| 92 |
+
|
| 93 |
+
def test_pickle_compat_0_14_1(self, datapath):
|
| 94 |
+
hdays = [datetime(2013, 1, 1) for ele in range(4)]
|
| 95 |
+
pth = datapath("tseries", "offsets", "data", "cday-0.14.1.pickle")
|
| 96 |
+
cday0_14_1 = read_pickle(pth)
|
| 97 |
+
cday = CDay(holidays=hdays)
|
| 98 |
+
assert cday == cday0_14_1
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_custom_business_hour.py
ADDED
|
@@ -0,0 +1,329 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for offsets.CustomBusinessHour
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from datetime import (
|
| 7 |
+
datetime,
|
| 8 |
+
time as dt_time,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
import numpy as np
|
| 12 |
+
import pytest
|
| 13 |
+
|
| 14 |
+
from pandas._libs.tslibs import Timestamp
|
| 15 |
+
from pandas._libs.tslibs.offsets import (
|
| 16 |
+
BusinessHour,
|
| 17 |
+
CustomBusinessHour,
|
| 18 |
+
Nano,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from pandas.tests.tseries.offsets.common import assert_offset_equal
|
| 22 |
+
|
| 23 |
+
from pandas.tseries.holiday import USFederalHolidayCalendar
|
| 24 |
+
|
| 25 |
+
holidays = ["2014-06-27", datetime(2014, 6, 30), np.datetime64("2014-07-02")]
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
@pytest.fixture
|
| 29 |
+
def dt():
|
| 30 |
+
return datetime(2014, 7, 1, 10, 00)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture
|
| 34 |
+
def _offset():
|
| 35 |
+
return CustomBusinessHour
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# 2014 Calendar to check custom holidays
|
| 39 |
+
# Sun Mon Tue Wed Thu Fri Sat
|
| 40 |
+
# 6/22 23 24 25 26 27 28
|
| 41 |
+
# 29 30 7/1 2 3 4 5
|
| 42 |
+
# 6 7 8 9 10 11 12
|
| 43 |
+
@pytest.fixture
|
| 44 |
+
def offset1():
|
| 45 |
+
return CustomBusinessHour(weekmask="Tue Wed Thu Fri")
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@pytest.fixture
|
| 49 |
+
def offset2():
|
| 50 |
+
return CustomBusinessHour(holidays=holidays)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class TestCustomBusinessHour:
|
| 54 |
+
def test_constructor_errors(self):
|
| 55 |
+
msg = "time data must be specified only with hour and minute"
|
| 56 |
+
with pytest.raises(ValueError, match=msg):
|
| 57 |
+
CustomBusinessHour(start=dt_time(11, 0, 5))
|
| 58 |
+
msg = "time data must match '%H:%M' format"
|
| 59 |
+
with pytest.raises(ValueError, match=msg):
|
| 60 |
+
CustomBusinessHour(start="AAA")
|
| 61 |
+
msg = "time data must match '%H:%M' format"
|
| 62 |
+
with pytest.raises(ValueError, match=msg):
|
| 63 |
+
CustomBusinessHour(start="14:00:05")
|
| 64 |
+
|
| 65 |
+
def test_different_normalize_equals(self, _offset):
|
| 66 |
+
# GH#21404 changed __eq__ to return False when `normalize` does not match
|
| 67 |
+
offset = _offset()
|
| 68 |
+
offset2 = _offset(normalize=True)
|
| 69 |
+
assert offset != offset2
|
| 70 |
+
|
| 71 |
+
def test_repr(self, offset1, offset2):
|
| 72 |
+
assert repr(offset1) == "<CustomBusinessHour: cbh=09:00-17:00>"
|
| 73 |
+
assert repr(offset2) == "<CustomBusinessHour: cbh=09:00-17:00>"
|
| 74 |
+
|
| 75 |
+
def test_with_offset(self, dt):
|
| 76 |
+
expected = Timestamp("2014-07-01 13:00")
|
| 77 |
+
|
| 78 |
+
assert dt + CustomBusinessHour() * 3 == expected
|
| 79 |
+
assert dt + CustomBusinessHour(n=3) == expected
|
| 80 |
+
|
| 81 |
+
def test_eq(self, offset1, offset2):
|
| 82 |
+
for offset in [offset1, offset2]:
|
| 83 |
+
assert offset == offset
|
| 84 |
+
|
| 85 |
+
assert CustomBusinessHour() != CustomBusinessHour(-1)
|
| 86 |
+
assert CustomBusinessHour(start="09:00") == CustomBusinessHour()
|
| 87 |
+
assert CustomBusinessHour(start="09:00") != CustomBusinessHour(start="09:01")
|
| 88 |
+
assert CustomBusinessHour(start="09:00", end="17:00") != CustomBusinessHour(
|
| 89 |
+
start="17:00", end="09:01"
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
assert CustomBusinessHour(weekmask="Tue Wed Thu Fri") != CustomBusinessHour(
|
| 93 |
+
weekmask="Mon Tue Wed Thu Fri"
|
| 94 |
+
)
|
| 95 |
+
assert CustomBusinessHour(holidays=["2014-06-27"]) != CustomBusinessHour(
|
| 96 |
+
holidays=["2014-06-28"]
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
def test_hash(self, offset1, offset2):
|
| 100 |
+
assert hash(offset1) == hash(offset1)
|
| 101 |
+
assert hash(offset2) == hash(offset2)
|
| 102 |
+
|
| 103 |
+
def test_add_dateime(self, dt, offset1, offset2):
|
| 104 |
+
assert offset1 + dt == datetime(2014, 7, 1, 11)
|
| 105 |
+
assert offset2 + dt == datetime(2014, 7, 1, 11)
|
| 106 |
+
|
| 107 |
+
def testRollback1(self, dt, offset1, offset2):
|
| 108 |
+
assert offset1.rollback(dt) == dt
|
| 109 |
+
assert offset2.rollback(dt) == dt
|
| 110 |
+
|
| 111 |
+
d = datetime(2014, 7, 1, 0)
|
| 112 |
+
|
| 113 |
+
# 2014/07/01 is Tuesday, 06/30 is Monday(holiday)
|
| 114 |
+
assert offset1.rollback(d) == datetime(2014, 6, 27, 17)
|
| 115 |
+
|
| 116 |
+
# 2014/6/30 and 2014/6/27 are holidays
|
| 117 |
+
assert offset2.rollback(d) == datetime(2014, 6, 26, 17)
|
| 118 |
+
|
| 119 |
+
def testRollback2(self, _offset):
|
| 120 |
+
assert _offset(-3).rollback(datetime(2014, 7, 5, 15, 0)) == datetime(
|
| 121 |
+
2014, 7, 4, 17, 0
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
def testRollforward1(self, dt, offset1, offset2):
|
| 125 |
+
assert offset1.rollforward(dt) == dt
|
| 126 |
+
assert offset2.rollforward(dt) == dt
|
| 127 |
+
|
| 128 |
+
d = datetime(2014, 7, 1, 0)
|
| 129 |
+
assert offset1.rollforward(d) == datetime(2014, 7, 1, 9)
|
| 130 |
+
assert offset2.rollforward(d) == datetime(2014, 7, 1, 9)
|
| 131 |
+
|
| 132 |
+
def testRollforward2(self, _offset):
|
| 133 |
+
assert _offset(-3).rollforward(datetime(2014, 7, 5, 16, 0)) == datetime(
|
| 134 |
+
2014, 7, 7, 9
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
def test_roll_date_object(self):
|
| 138 |
+
offset = BusinessHour()
|
| 139 |
+
|
| 140 |
+
dt = datetime(2014, 7, 6, 15, 0)
|
| 141 |
+
|
| 142 |
+
result = offset.rollback(dt)
|
| 143 |
+
assert result == datetime(2014, 7, 4, 17)
|
| 144 |
+
|
| 145 |
+
result = offset.rollforward(dt)
|
| 146 |
+
assert result == datetime(2014, 7, 7, 9)
|
| 147 |
+
|
| 148 |
+
normalize_cases = [
|
| 149 |
+
(
|
| 150 |
+
CustomBusinessHour(normalize=True, holidays=holidays),
|
| 151 |
+
{
|
| 152 |
+
datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
|
| 153 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 3),
|
| 154 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 3),
|
| 155 |
+
datetime(2014, 7, 1, 23): datetime(2014, 7, 3),
|
| 156 |
+
datetime(2014, 7, 1, 0): datetime(2014, 7, 1),
|
| 157 |
+
datetime(2014, 7, 4, 15): datetime(2014, 7, 4),
|
| 158 |
+
datetime(2014, 7, 4, 15, 59): datetime(2014, 7, 4),
|
| 159 |
+
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7),
|
| 160 |
+
datetime(2014, 7, 5, 23): datetime(2014, 7, 7),
|
| 161 |
+
datetime(2014, 7, 6, 10): datetime(2014, 7, 7),
|
| 162 |
+
},
|
| 163 |
+
),
|
| 164 |
+
(
|
| 165 |
+
CustomBusinessHour(-1, normalize=True, holidays=holidays),
|
| 166 |
+
{
|
| 167 |
+
datetime(2014, 7, 1, 8): datetime(2014, 6, 26),
|
| 168 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
|
| 169 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 1),
|
| 170 |
+
datetime(2014, 7, 1, 10): datetime(2014, 6, 26),
|
| 171 |
+
datetime(2014, 7, 1, 0): datetime(2014, 6, 26),
|
| 172 |
+
datetime(2014, 7, 7, 10): datetime(2014, 7, 4),
|
| 173 |
+
datetime(2014, 7, 7, 10, 1): datetime(2014, 7, 7),
|
| 174 |
+
datetime(2014, 7, 5, 23): datetime(2014, 7, 4),
|
| 175 |
+
datetime(2014, 7, 6, 10): datetime(2014, 7, 4),
|
| 176 |
+
},
|
| 177 |
+
),
|
| 178 |
+
(
|
| 179 |
+
CustomBusinessHour(
|
| 180 |
+
1, normalize=True, start="17:00", end="04:00", holidays=holidays
|
| 181 |
+
),
|
| 182 |
+
{
|
| 183 |
+
datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
|
| 184 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
|
| 185 |
+
datetime(2014, 7, 1, 23): datetime(2014, 7, 2),
|
| 186 |
+
datetime(2014, 7, 2, 2): datetime(2014, 7, 2),
|
| 187 |
+
datetime(2014, 7, 2, 3): datetime(2014, 7, 3),
|
| 188 |
+
datetime(2014, 7, 4, 23): datetime(2014, 7, 5),
|
| 189 |
+
datetime(2014, 7, 5, 2): datetime(2014, 7, 5),
|
| 190 |
+
datetime(2014, 7, 7, 2): datetime(2014, 7, 7),
|
| 191 |
+
datetime(2014, 7, 7, 17): datetime(2014, 7, 7),
|
| 192 |
+
},
|
| 193 |
+
),
|
| 194 |
+
]
|
| 195 |
+
|
| 196 |
+
@pytest.mark.parametrize("norm_cases", normalize_cases)
|
| 197 |
+
def test_normalize(self, norm_cases):
|
| 198 |
+
offset, cases = norm_cases
|
| 199 |
+
for dt, expected in cases.items():
|
| 200 |
+
assert offset._apply(dt) == expected
|
| 201 |
+
|
| 202 |
+
@pytest.mark.parametrize(
|
| 203 |
+
"dt, expected",
|
| 204 |
+
[
|
| 205 |
+
[datetime(2014, 7, 1, 9), False],
|
| 206 |
+
[datetime(2014, 7, 1, 10), True],
|
| 207 |
+
[datetime(2014, 7, 1, 15), True],
|
| 208 |
+
[datetime(2014, 7, 1, 15, 1), False],
|
| 209 |
+
[datetime(2014, 7, 5, 12), False],
|
| 210 |
+
[datetime(2014, 7, 6, 12), False],
|
| 211 |
+
],
|
| 212 |
+
)
|
| 213 |
+
def test_is_on_offset(self, dt, expected):
|
| 214 |
+
offset = CustomBusinessHour(start="10:00", end="15:00", holidays=holidays)
|
| 215 |
+
assert offset.is_on_offset(dt) == expected
|
| 216 |
+
|
| 217 |
+
apply_cases = [
|
| 218 |
+
(
|
| 219 |
+
CustomBusinessHour(holidays=holidays),
|
| 220 |
+
{
|
| 221 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 12),
|
| 222 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 1, 14),
|
| 223 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 1, 16),
|
| 224 |
+
datetime(2014, 7, 1, 19): datetime(2014, 7, 3, 10),
|
| 225 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 3, 9),
|
| 226 |
+
datetime(2014, 7, 1, 16, 30, 15): datetime(2014, 7, 3, 9, 30, 15),
|
| 227 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 3, 10),
|
| 228 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 3, 10),
|
| 229 |
+
# out of business hours
|
| 230 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 3, 10),
|
| 231 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 10),
|
| 232 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 10),
|
| 233 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 10),
|
| 234 |
+
# saturday
|
| 235 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 10),
|
| 236 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 10),
|
| 237 |
+
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 9, 30),
|
| 238 |
+
datetime(2014, 7, 4, 16, 30, 30): datetime(2014, 7, 7, 9, 30, 30),
|
| 239 |
+
},
|
| 240 |
+
),
|
| 241 |
+
(
|
| 242 |
+
CustomBusinessHour(4, holidays=holidays),
|
| 243 |
+
{
|
| 244 |
+
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 15),
|
| 245 |
+
datetime(2014, 7, 1, 13): datetime(2014, 7, 3, 9),
|
| 246 |
+
datetime(2014, 7, 1, 15): datetime(2014, 7, 3, 11),
|
| 247 |
+
datetime(2014, 7, 1, 16): datetime(2014, 7, 3, 12),
|
| 248 |
+
datetime(2014, 7, 1, 17): datetime(2014, 7, 3, 13),
|
| 249 |
+
datetime(2014, 7, 2, 11): datetime(2014, 7, 3, 13),
|
| 250 |
+
datetime(2014, 7, 2, 8): datetime(2014, 7, 3, 13),
|
| 251 |
+
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 13),
|
| 252 |
+
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 13),
|
| 253 |
+
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 13),
|
| 254 |
+
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 13),
|
| 255 |
+
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 13),
|
| 256 |
+
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 12, 30),
|
| 257 |
+
datetime(2014, 7, 4, 16, 30, 30): datetime(2014, 7, 7, 12, 30, 30),
|
| 258 |
+
},
|
| 259 |
+
),
|
| 260 |
+
]
|
| 261 |
+
|
| 262 |
+
@pytest.mark.parametrize("apply_case", apply_cases)
|
| 263 |
+
def test_apply(self, apply_case):
|
| 264 |
+
offset, cases = apply_case
|
| 265 |
+
for base, expected in cases.items():
|
| 266 |
+
assert_offset_equal(offset, base, expected)
|
| 267 |
+
|
| 268 |
+
nano_cases = [
|
| 269 |
+
(
|
| 270 |
+
CustomBusinessHour(holidays=holidays),
|
| 271 |
+
{
|
| 272 |
+
Timestamp("2014-07-01 15:00")
|
| 273 |
+
+ Nano(5): Timestamp("2014-07-01 16:00")
|
| 274 |
+
+ Nano(5),
|
| 275 |
+
Timestamp("2014-07-01 16:00")
|
| 276 |
+
+ Nano(5): Timestamp("2014-07-03 09:00")
|
| 277 |
+
+ Nano(5),
|
| 278 |
+
Timestamp("2014-07-01 16:00")
|
| 279 |
+
- Nano(5): Timestamp("2014-07-01 17:00")
|
| 280 |
+
- Nano(5),
|
| 281 |
+
},
|
| 282 |
+
),
|
| 283 |
+
(
|
| 284 |
+
CustomBusinessHour(-1, holidays=holidays),
|
| 285 |
+
{
|
| 286 |
+
Timestamp("2014-07-01 15:00")
|
| 287 |
+
+ Nano(5): Timestamp("2014-07-01 14:00")
|
| 288 |
+
+ Nano(5),
|
| 289 |
+
Timestamp("2014-07-01 10:00")
|
| 290 |
+
+ Nano(5): Timestamp("2014-07-01 09:00")
|
| 291 |
+
+ Nano(5),
|
| 292 |
+
Timestamp("2014-07-01 10:00")
|
| 293 |
+
- Nano(5): Timestamp("2014-06-26 17:00")
|
| 294 |
+
- Nano(5),
|
| 295 |
+
},
|
| 296 |
+
),
|
| 297 |
+
]
|
| 298 |
+
|
| 299 |
+
@pytest.mark.parametrize("nano_case", nano_cases)
|
| 300 |
+
def test_apply_nanoseconds(self, nano_case):
|
| 301 |
+
offset, cases = nano_case
|
| 302 |
+
for base, expected in cases.items():
|
| 303 |
+
assert_offset_equal(offset, base, expected)
|
| 304 |
+
|
| 305 |
+
def test_us_federal_holiday_with_datetime(self):
|
| 306 |
+
# GH 16867
|
| 307 |
+
bhour_us = CustomBusinessHour(calendar=USFederalHolidayCalendar())
|
| 308 |
+
t0 = datetime(2014, 1, 17, 15)
|
| 309 |
+
result = t0 + bhour_us * 8
|
| 310 |
+
expected = Timestamp("2014-01-21 15:00:00")
|
| 311 |
+
assert result == expected
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
@pytest.mark.parametrize(
|
| 315 |
+
"weekmask, expected_time, mult",
|
| 316 |
+
[
|
| 317 |
+
["Mon Tue Wed Thu Fri Sat", "2018-11-10 09:00:00", 10],
|
| 318 |
+
["Tue Wed Thu Fri Sat", "2018-11-13 08:00:00", 18],
|
| 319 |
+
],
|
| 320 |
+
)
|
| 321 |
+
def test_custom_businesshour_weekmask_and_holidays(weekmask, expected_time, mult):
|
| 322 |
+
# GH 23542
|
| 323 |
+
holidays = ["2018-11-09"]
|
| 324 |
+
bh = CustomBusinessHour(
|
| 325 |
+
start="08:00", end="17:00", weekmask=weekmask, holidays=holidays
|
| 326 |
+
)
|
| 327 |
+
result = Timestamp("2018-11-08 08:00") + mult * bh
|
| 328 |
+
expected = Timestamp(expected_time)
|
| 329 |
+
assert result == expected
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_custom_business_month.py
ADDED
|
@@ -0,0 +1,437 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the following offsets:
|
| 3 |
+
- CustomBusinessMonthBase
|
| 4 |
+
- CustomBusinessMonthBegin
|
| 5 |
+
- CustomBusinessMonthEnd
|
| 6 |
+
"""
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
from datetime import (
|
| 10 |
+
date,
|
| 11 |
+
datetime,
|
| 12 |
+
timedelta,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
import numpy as np
|
| 16 |
+
import pytest
|
| 17 |
+
|
| 18 |
+
from pandas._libs.tslibs.offsets import (
|
| 19 |
+
CBMonthBegin,
|
| 20 |
+
CBMonthEnd,
|
| 21 |
+
CDay,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
import pandas._testing as tm
|
| 25 |
+
from pandas.tests.tseries.offsets.common import (
|
| 26 |
+
assert_is_on_offset,
|
| 27 |
+
assert_offset_equal,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
from pandas.tseries import offsets
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture
|
| 34 |
+
def dt():
|
| 35 |
+
return datetime(2008, 1, 1)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class TestCommonCBM:
|
| 39 |
+
@pytest.mark.parametrize("offset2", [CBMonthBegin(2), CBMonthEnd(2)])
|
| 40 |
+
def test_eq(self, offset2):
|
| 41 |
+
assert offset2 == offset2
|
| 42 |
+
|
| 43 |
+
@pytest.mark.parametrize("offset2", [CBMonthBegin(2), CBMonthEnd(2)])
|
| 44 |
+
def test_hash(self, offset2):
|
| 45 |
+
assert hash(offset2) == hash(offset2)
|
| 46 |
+
|
| 47 |
+
@pytest.mark.parametrize("_offset", [CBMonthBegin, CBMonthEnd])
|
| 48 |
+
def test_roundtrip_pickle(self, _offset):
|
| 49 |
+
def _check_roundtrip(obj):
|
| 50 |
+
unpickled = tm.round_trip_pickle(obj)
|
| 51 |
+
assert unpickled == obj
|
| 52 |
+
|
| 53 |
+
_check_roundtrip(_offset())
|
| 54 |
+
_check_roundtrip(_offset(2))
|
| 55 |
+
_check_roundtrip(_offset() * 2)
|
| 56 |
+
|
| 57 |
+
@pytest.mark.parametrize("_offset", [CBMonthBegin, CBMonthEnd])
|
| 58 |
+
def test_copy(self, _offset):
|
| 59 |
+
# GH 17452
|
| 60 |
+
off = _offset(weekmask="Mon Wed Fri")
|
| 61 |
+
assert off == off.copy()
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class TestCustomBusinessMonthBegin:
|
| 65 |
+
@pytest.fixture
|
| 66 |
+
def _offset(self):
|
| 67 |
+
return CBMonthBegin
|
| 68 |
+
|
| 69 |
+
@pytest.fixture
|
| 70 |
+
def offset(self):
|
| 71 |
+
return CBMonthBegin()
|
| 72 |
+
|
| 73 |
+
@pytest.fixture
|
| 74 |
+
def offset2(self):
|
| 75 |
+
return CBMonthBegin(2)
|
| 76 |
+
|
| 77 |
+
def test_different_normalize_equals(self, _offset):
|
| 78 |
+
# GH#21404 changed __eq__ to return False when `normalize` does not match
|
| 79 |
+
offset = _offset()
|
| 80 |
+
offset2 = _offset(normalize=True)
|
| 81 |
+
assert offset != offset2
|
| 82 |
+
|
| 83 |
+
def test_repr(self, offset, offset2):
|
| 84 |
+
assert repr(offset) == "<CustomBusinessMonthBegin>"
|
| 85 |
+
assert repr(offset2) == "<2 * CustomBusinessMonthBegins>"
|
| 86 |
+
|
| 87 |
+
def test_add_datetime(self, dt, offset2):
|
| 88 |
+
assert offset2 + dt == datetime(2008, 3, 3)
|
| 89 |
+
|
| 90 |
+
def testRollback1(self):
|
| 91 |
+
assert CDay(10).rollback(datetime(2007, 12, 31)) == datetime(2007, 12, 31)
|
| 92 |
+
|
| 93 |
+
def testRollback2(self, dt):
|
| 94 |
+
assert CBMonthBegin(10).rollback(dt) == datetime(2008, 1, 1)
|
| 95 |
+
|
| 96 |
+
def testRollforward1(self, dt):
|
| 97 |
+
assert CBMonthBegin(10).rollforward(dt) == datetime(2008, 1, 1)
|
| 98 |
+
|
| 99 |
+
def test_roll_date_object(self):
|
| 100 |
+
offset = CBMonthBegin()
|
| 101 |
+
|
| 102 |
+
dt = date(2012, 9, 15)
|
| 103 |
+
|
| 104 |
+
result = offset.rollback(dt)
|
| 105 |
+
assert result == datetime(2012, 9, 3)
|
| 106 |
+
|
| 107 |
+
result = offset.rollforward(dt)
|
| 108 |
+
assert result == datetime(2012, 10, 1)
|
| 109 |
+
|
| 110 |
+
offset = offsets.Day()
|
| 111 |
+
result = offset.rollback(dt)
|
| 112 |
+
assert result == datetime(2012, 9, 15)
|
| 113 |
+
|
| 114 |
+
result = offset.rollforward(dt)
|
| 115 |
+
assert result == datetime(2012, 9, 15)
|
| 116 |
+
|
| 117 |
+
on_offset_cases = [
|
| 118 |
+
(CBMonthBegin(), datetime(2008, 1, 1), True),
|
| 119 |
+
(CBMonthBegin(), datetime(2008, 1, 31), False),
|
| 120 |
+
]
|
| 121 |
+
|
| 122 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 123 |
+
def test_is_on_offset(self, case):
|
| 124 |
+
offset, dt, expected = case
|
| 125 |
+
assert_is_on_offset(offset, dt, expected)
|
| 126 |
+
|
| 127 |
+
apply_cases = [
|
| 128 |
+
(
|
| 129 |
+
CBMonthBegin(),
|
| 130 |
+
{
|
| 131 |
+
datetime(2008, 1, 1): datetime(2008, 2, 1),
|
| 132 |
+
datetime(2008, 2, 7): datetime(2008, 3, 3),
|
| 133 |
+
},
|
| 134 |
+
),
|
| 135 |
+
(
|
| 136 |
+
2 * CBMonthBegin(),
|
| 137 |
+
{
|
| 138 |
+
datetime(2008, 1, 1): datetime(2008, 3, 3),
|
| 139 |
+
datetime(2008, 2, 7): datetime(2008, 4, 1),
|
| 140 |
+
},
|
| 141 |
+
),
|
| 142 |
+
(
|
| 143 |
+
-CBMonthBegin(),
|
| 144 |
+
{
|
| 145 |
+
datetime(2008, 1, 1): datetime(2007, 12, 3),
|
| 146 |
+
datetime(2008, 2, 8): datetime(2008, 2, 1),
|
| 147 |
+
},
|
| 148 |
+
),
|
| 149 |
+
(
|
| 150 |
+
-2 * CBMonthBegin(),
|
| 151 |
+
{
|
| 152 |
+
datetime(2008, 1, 1): datetime(2007, 11, 1),
|
| 153 |
+
datetime(2008, 2, 9): datetime(2008, 1, 1),
|
| 154 |
+
},
|
| 155 |
+
),
|
| 156 |
+
(
|
| 157 |
+
CBMonthBegin(0),
|
| 158 |
+
{
|
| 159 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 160 |
+
datetime(2008, 1, 7): datetime(2008, 2, 1),
|
| 161 |
+
},
|
| 162 |
+
),
|
| 163 |
+
]
|
| 164 |
+
|
| 165 |
+
@pytest.mark.parametrize("case", apply_cases)
|
| 166 |
+
def test_apply(self, case):
|
| 167 |
+
offset, cases = case
|
| 168 |
+
for base, expected in cases.items():
|
| 169 |
+
assert_offset_equal(offset, base, expected)
|
| 170 |
+
|
| 171 |
+
def test_apply_large_n(self):
|
| 172 |
+
dt = datetime(2012, 10, 23)
|
| 173 |
+
|
| 174 |
+
result = dt + CBMonthBegin(10)
|
| 175 |
+
assert result == datetime(2013, 8, 1)
|
| 176 |
+
|
| 177 |
+
result = dt + CDay(100) - CDay(100)
|
| 178 |
+
assert result == dt
|
| 179 |
+
|
| 180 |
+
off = CBMonthBegin() * 6
|
| 181 |
+
rs = datetime(2012, 1, 1) - off
|
| 182 |
+
xp = datetime(2011, 7, 1)
|
| 183 |
+
assert rs == xp
|
| 184 |
+
|
| 185 |
+
st = datetime(2011, 12, 18)
|
| 186 |
+
rs = st + off
|
| 187 |
+
|
| 188 |
+
xp = datetime(2012, 6, 1)
|
| 189 |
+
assert rs == xp
|
| 190 |
+
|
| 191 |
+
def test_holidays(self):
|
| 192 |
+
# Define a TradingDay offset
|
| 193 |
+
holidays = ["2012-02-01", datetime(2012, 2, 2), np.datetime64("2012-03-01")]
|
| 194 |
+
bm_offset = CBMonthBegin(holidays=holidays)
|
| 195 |
+
dt = datetime(2012, 1, 1)
|
| 196 |
+
|
| 197 |
+
assert dt + bm_offset == datetime(2012, 1, 2)
|
| 198 |
+
assert dt + 2 * bm_offset == datetime(2012, 2, 3)
|
| 199 |
+
|
| 200 |
+
@pytest.mark.parametrize(
|
| 201 |
+
"case",
|
| 202 |
+
[
|
| 203 |
+
(
|
| 204 |
+
CBMonthBegin(n=1, offset=timedelta(days=5)),
|
| 205 |
+
{
|
| 206 |
+
datetime(2021, 3, 1): datetime(2021, 4, 1) + timedelta(days=5),
|
| 207 |
+
datetime(2021, 4, 17): datetime(2021, 5, 3) + timedelta(days=5),
|
| 208 |
+
},
|
| 209 |
+
),
|
| 210 |
+
(
|
| 211 |
+
CBMonthBegin(n=2, offset=timedelta(days=40)),
|
| 212 |
+
{
|
| 213 |
+
datetime(2021, 3, 10): datetime(2021, 5, 3) + timedelta(days=40),
|
| 214 |
+
datetime(2021, 4, 30): datetime(2021, 6, 1) + timedelta(days=40),
|
| 215 |
+
},
|
| 216 |
+
),
|
| 217 |
+
(
|
| 218 |
+
CBMonthBegin(n=1, offset=timedelta(days=-5)),
|
| 219 |
+
{
|
| 220 |
+
datetime(2021, 3, 1): datetime(2021, 4, 1) - timedelta(days=5),
|
| 221 |
+
datetime(2021, 4, 11): datetime(2021, 5, 3) - timedelta(days=5),
|
| 222 |
+
},
|
| 223 |
+
),
|
| 224 |
+
(
|
| 225 |
+
-2 * CBMonthBegin(n=1, offset=timedelta(days=10)),
|
| 226 |
+
{
|
| 227 |
+
datetime(2021, 3, 1): datetime(2021, 1, 1) + timedelta(days=10),
|
| 228 |
+
datetime(2021, 4, 3): datetime(2021, 3, 1) + timedelta(days=10),
|
| 229 |
+
},
|
| 230 |
+
),
|
| 231 |
+
(
|
| 232 |
+
CBMonthBegin(n=0, offset=timedelta(days=1)),
|
| 233 |
+
{
|
| 234 |
+
datetime(2021, 3, 2): datetime(2021, 4, 1) + timedelta(days=1),
|
| 235 |
+
datetime(2021, 4, 1): datetime(2021, 4, 1) + timedelta(days=1),
|
| 236 |
+
},
|
| 237 |
+
),
|
| 238 |
+
(
|
| 239 |
+
CBMonthBegin(
|
| 240 |
+
n=1, holidays=["2021-04-01", "2021-04-02"], offset=timedelta(days=1)
|
| 241 |
+
),
|
| 242 |
+
{
|
| 243 |
+
datetime(2021, 3, 2): datetime(2021, 4, 5) + timedelta(days=1),
|
| 244 |
+
},
|
| 245 |
+
),
|
| 246 |
+
],
|
| 247 |
+
)
|
| 248 |
+
def test_apply_with_extra_offset(self, case):
|
| 249 |
+
offset, cases = case
|
| 250 |
+
for base, expected in cases.items():
|
| 251 |
+
assert_offset_equal(offset, base, expected)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
class TestCustomBusinessMonthEnd:
|
| 255 |
+
@pytest.fixture
|
| 256 |
+
def _offset(self):
|
| 257 |
+
return CBMonthEnd
|
| 258 |
+
|
| 259 |
+
@pytest.fixture
|
| 260 |
+
def offset(self):
|
| 261 |
+
return CBMonthEnd()
|
| 262 |
+
|
| 263 |
+
@pytest.fixture
|
| 264 |
+
def offset2(self):
|
| 265 |
+
return CBMonthEnd(2)
|
| 266 |
+
|
| 267 |
+
def test_different_normalize_equals(self, _offset):
|
| 268 |
+
# GH#21404 changed __eq__ to return False when `normalize` does not match
|
| 269 |
+
offset = _offset()
|
| 270 |
+
offset2 = _offset(normalize=True)
|
| 271 |
+
assert offset != offset2
|
| 272 |
+
|
| 273 |
+
def test_repr(self, offset, offset2):
|
| 274 |
+
assert repr(offset) == "<CustomBusinessMonthEnd>"
|
| 275 |
+
assert repr(offset2) == "<2 * CustomBusinessMonthEnds>"
|
| 276 |
+
|
| 277 |
+
def test_add_datetime(self, dt, offset2):
|
| 278 |
+
assert offset2 + dt == datetime(2008, 2, 29)
|
| 279 |
+
|
| 280 |
+
def testRollback1(self):
|
| 281 |
+
assert CDay(10).rollback(datetime(2007, 12, 31)) == datetime(2007, 12, 31)
|
| 282 |
+
|
| 283 |
+
def testRollback2(self, dt):
|
| 284 |
+
assert CBMonthEnd(10).rollback(dt) == datetime(2007, 12, 31)
|
| 285 |
+
|
| 286 |
+
def testRollforward1(self, dt):
|
| 287 |
+
assert CBMonthEnd(10).rollforward(dt) == datetime(2008, 1, 31)
|
| 288 |
+
|
| 289 |
+
def test_roll_date_object(self):
|
| 290 |
+
offset = CBMonthEnd()
|
| 291 |
+
|
| 292 |
+
dt = date(2012, 9, 15)
|
| 293 |
+
|
| 294 |
+
result = offset.rollback(dt)
|
| 295 |
+
assert result == datetime(2012, 8, 31)
|
| 296 |
+
|
| 297 |
+
result = offset.rollforward(dt)
|
| 298 |
+
assert result == datetime(2012, 9, 28)
|
| 299 |
+
|
| 300 |
+
offset = offsets.Day()
|
| 301 |
+
result = offset.rollback(dt)
|
| 302 |
+
assert result == datetime(2012, 9, 15)
|
| 303 |
+
|
| 304 |
+
result = offset.rollforward(dt)
|
| 305 |
+
assert result == datetime(2012, 9, 15)
|
| 306 |
+
|
| 307 |
+
on_offset_cases = [
|
| 308 |
+
(CBMonthEnd(), datetime(2008, 1, 31), True),
|
| 309 |
+
(CBMonthEnd(), datetime(2008, 1, 1), False),
|
| 310 |
+
]
|
| 311 |
+
|
| 312 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 313 |
+
def test_is_on_offset(self, case):
|
| 314 |
+
offset, dt, expected = case
|
| 315 |
+
assert_is_on_offset(offset, dt, expected)
|
| 316 |
+
|
| 317 |
+
apply_cases = [
|
| 318 |
+
(
|
| 319 |
+
CBMonthEnd(),
|
| 320 |
+
{
|
| 321 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 322 |
+
datetime(2008, 2, 7): datetime(2008, 2, 29),
|
| 323 |
+
},
|
| 324 |
+
),
|
| 325 |
+
(
|
| 326 |
+
2 * CBMonthEnd(),
|
| 327 |
+
{
|
| 328 |
+
datetime(2008, 1, 1): datetime(2008, 2, 29),
|
| 329 |
+
datetime(2008, 2, 7): datetime(2008, 3, 31),
|
| 330 |
+
},
|
| 331 |
+
),
|
| 332 |
+
(
|
| 333 |
+
-CBMonthEnd(),
|
| 334 |
+
{
|
| 335 |
+
datetime(2008, 1, 1): datetime(2007, 12, 31),
|
| 336 |
+
datetime(2008, 2, 8): datetime(2008, 1, 31),
|
| 337 |
+
},
|
| 338 |
+
),
|
| 339 |
+
(
|
| 340 |
+
-2 * CBMonthEnd(),
|
| 341 |
+
{
|
| 342 |
+
datetime(2008, 1, 1): datetime(2007, 11, 30),
|
| 343 |
+
datetime(2008, 2, 9): datetime(2007, 12, 31),
|
| 344 |
+
},
|
| 345 |
+
),
|
| 346 |
+
(
|
| 347 |
+
CBMonthEnd(0),
|
| 348 |
+
{
|
| 349 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 350 |
+
datetime(2008, 2, 7): datetime(2008, 2, 29),
|
| 351 |
+
},
|
| 352 |
+
),
|
| 353 |
+
]
|
| 354 |
+
|
| 355 |
+
@pytest.mark.parametrize("case", apply_cases)
|
| 356 |
+
def test_apply(self, case):
|
| 357 |
+
offset, cases = case
|
| 358 |
+
for base, expected in cases.items():
|
| 359 |
+
assert_offset_equal(offset, base, expected)
|
| 360 |
+
|
| 361 |
+
def test_apply_large_n(self):
|
| 362 |
+
dt = datetime(2012, 10, 23)
|
| 363 |
+
|
| 364 |
+
result = dt + CBMonthEnd(10)
|
| 365 |
+
assert result == datetime(2013, 7, 31)
|
| 366 |
+
|
| 367 |
+
result = dt + CDay(100) - CDay(100)
|
| 368 |
+
assert result == dt
|
| 369 |
+
|
| 370 |
+
off = CBMonthEnd() * 6
|
| 371 |
+
rs = datetime(2012, 1, 1) - off
|
| 372 |
+
xp = datetime(2011, 7, 29)
|
| 373 |
+
assert rs == xp
|
| 374 |
+
|
| 375 |
+
st = datetime(2011, 12, 18)
|
| 376 |
+
rs = st + off
|
| 377 |
+
xp = datetime(2012, 5, 31)
|
| 378 |
+
assert rs == xp
|
| 379 |
+
|
| 380 |
+
def test_holidays(self):
|
| 381 |
+
# Define a TradingDay offset
|
| 382 |
+
holidays = ["2012-01-31", datetime(2012, 2, 28), np.datetime64("2012-02-29")]
|
| 383 |
+
bm_offset = CBMonthEnd(holidays=holidays)
|
| 384 |
+
dt = datetime(2012, 1, 1)
|
| 385 |
+
assert dt + bm_offset == datetime(2012, 1, 30)
|
| 386 |
+
assert dt + 2 * bm_offset == datetime(2012, 2, 27)
|
| 387 |
+
|
| 388 |
+
@pytest.mark.parametrize(
|
| 389 |
+
"case",
|
| 390 |
+
[
|
| 391 |
+
(
|
| 392 |
+
CBMonthEnd(n=1, offset=timedelta(days=5)),
|
| 393 |
+
{
|
| 394 |
+
datetime(2021, 3, 1): datetime(2021, 3, 31) + timedelta(days=5),
|
| 395 |
+
datetime(2021, 4, 17): datetime(2021, 4, 30) + timedelta(days=5),
|
| 396 |
+
},
|
| 397 |
+
),
|
| 398 |
+
(
|
| 399 |
+
CBMonthEnd(n=2, offset=timedelta(days=40)),
|
| 400 |
+
{
|
| 401 |
+
datetime(2021, 3, 10): datetime(2021, 4, 30) + timedelta(days=40),
|
| 402 |
+
datetime(2021, 4, 30): datetime(2021, 6, 30) + timedelta(days=40),
|
| 403 |
+
},
|
| 404 |
+
),
|
| 405 |
+
(
|
| 406 |
+
CBMonthEnd(n=1, offset=timedelta(days=-5)),
|
| 407 |
+
{
|
| 408 |
+
datetime(2021, 3, 1): datetime(2021, 3, 31) - timedelta(days=5),
|
| 409 |
+
datetime(2021, 4, 11): datetime(2021, 4, 30) - timedelta(days=5),
|
| 410 |
+
},
|
| 411 |
+
),
|
| 412 |
+
(
|
| 413 |
+
-2 * CBMonthEnd(n=1, offset=timedelta(days=10)),
|
| 414 |
+
{
|
| 415 |
+
datetime(2021, 3, 1): datetime(2021, 1, 29) + timedelta(days=10),
|
| 416 |
+
datetime(2021, 4, 3): datetime(2021, 2, 26) + timedelta(days=10),
|
| 417 |
+
},
|
| 418 |
+
),
|
| 419 |
+
(
|
| 420 |
+
CBMonthEnd(n=0, offset=timedelta(days=1)),
|
| 421 |
+
{
|
| 422 |
+
datetime(2021, 3, 2): datetime(2021, 3, 31) + timedelta(days=1),
|
| 423 |
+
datetime(2021, 4, 1): datetime(2021, 4, 30) + timedelta(days=1),
|
| 424 |
+
},
|
| 425 |
+
),
|
| 426 |
+
(
|
| 427 |
+
CBMonthEnd(n=1, holidays=["2021-03-31"], offset=timedelta(days=1)),
|
| 428 |
+
{
|
| 429 |
+
datetime(2021, 3, 2): datetime(2021, 3, 30) + timedelta(days=1),
|
| 430 |
+
},
|
| 431 |
+
),
|
| 432 |
+
],
|
| 433 |
+
)
|
| 434 |
+
def test_apply_with_extra_offset(self, case):
|
| 435 |
+
offset, cases = case
|
| 436 |
+
for base, expected in cases.items():
|
| 437 |
+
assert_offset_equal(offset, base, expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_dst.py
ADDED
|
@@ -0,0 +1,260 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for DateOffset additions over Daylight Savings Time
|
| 3 |
+
"""
|
| 4 |
+
from datetime import timedelta
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
import pytz
|
| 8 |
+
|
| 9 |
+
from pandas._libs.tslibs import Timestamp
|
| 10 |
+
from pandas._libs.tslibs.offsets import (
|
| 11 |
+
BMonthBegin,
|
| 12 |
+
BMonthEnd,
|
| 13 |
+
BQuarterBegin,
|
| 14 |
+
BQuarterEnd,
|
| 15 |
+
BYearBegin,
|
| 16 |
+
BYearEnd,
|
| 17 |
+
CBMonthBegin,
|
| 18 |
+
CBMonthEnd,
|
| 19 |
+
CustomBusinessDay,
|
| 20 |
+
DateOffset,
|
| 21 |
+
Day,
|
| 22 |
+
MonthBegin,
|
| 23 |
+
MonthEnd,
|
| 24 |
+
QuarterBegin,
|
| 25 |
+
QuarterEnd,
|
| 26 |
+
SemiMonthBegin,
|
| 27 |
+
SemiMonthEnd,
|
| 28 |
+
Week,
|
| 29 |
+
YearBegin,
|
| 30 |
+
YearEnd,
|
| 31 |
+
)
|
| 32 |
+
from pandas.errors import PerformanceWarning
|
| 33 |
+
|
| 34 |
+
from pandas import DatetimeIndex
|
| 35 |
+
import pandas._testing as tm
|
| 36 |
+
from pandas.util.version import Version
|
| 37 |
+
|
| 38 |
+
# error: Module has no attribute "__version__"
|
| 39 |
+
pytz_version = Version(pytz.__version__) # type: ignore[attr-defined]
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def get_utc_offset_hours(ts):
|
| 43 |
+
# take a Timestamp and compute total hours of utc offset
|
| 44 |
+
o = ts.utcoffset()
|
| 45 |
+
return (o.days * 24 * 3600 + o.seconds) / 3600.0
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TestDST:
|
| 49 |
+
# one microsecond before the DST transition
|
| 50 |
+
ts_pre_fallback = "2013-11-03 01:59:59.999999"
|
| 51 |
+
ts_pre_springfwd = "2013-03-10 01:59:59.999999"
|
| 52 |
+
|
| 53 |
+
# test both basic names and dateutil timezones
|
| 54 |
+
timezone_utc_offsets = {
|
| 55 |
+
"US/Eastern": {"utc_offset_daylight": -4, "utc_offset_standard": -5},
|
| 56 |
+
"dateutil/US/Pacific": {"utc_offset_daylight": -7, "utc_offset_standard": -8},
|
| 57 |
+
}
|
| 58 |
+
valid_date_offsets_singular = [
|
| 59 |
+
"weekday",
|
| 60 |
+
"day",
|
| 61 |
+
"hour",
|
| 62 |
+
"minute",
|
| 63 |
+
"second",
|
| 64 |
+
"microsecond",
|
| 65 |
+
]
|
| 66 |
+
valid_date_offsets_plural = [
|
| 67 |
+
"weeks",
|
| 68 |
+
"days",
|
| 69 |
+
"hours",
|
| 70 |
+
"minutes",
|
| 71 |
+
"seconds",
|
| 72 |
+
"milliseconds",
|
| 73 |
+
"microseconds",
|
| 74 |
+
]
|
| 75 |
+
|
| 76 |
+
def _test_all_offsets(self, n, **kwds):
|
| 77 |
+
valid_offsets = (
|
| 78 |
+
self.valid_date_offsets_plural
|
| 79 |
+
if n > 1
|
| 80 |
+
else self.valid_date_offsets_singular
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
for name in valid_offsets:
|
| 84 |
+
self._test_offset(offset_name=name, offset_n=n, **kwds)
|
| 85 |
+
|
| 86 |
+
def _test_offset(self, offset_name, offset_n, tstart, expected_utc_offset):
|
| 87 |
+
offset = DateOffset(**{offset_name: offset_n})
|
| 88 |
+
|
| 89 |
+
if (
|
| 90 |
+
offset_name in ["hour", "minute", "second", "microsecond"]
|
| 91 |
+
and offset_n == 1
|
| 92 |
+
and tstart == Timestamp("2013-11-03 01:59:59.999999-0500", tz="US/Eastern")
|
| 93 |
+
):
|
| 94 |
+
# This addition results in an ambiguous wall time
|
| 95 |
+
err_msg = {
|
| 96 |
+
"hour": "2013-11-03 01:59:59.999999",
|
| 97 |
+
"minute": "2013-11-03 01:01:59.999999",
|
| 98 |
+
"second": "2013-11-03 01:59:01.999999",
|
| 99 |
+
"microsecond": "2013-11-03 01:59:59.000001",
|
| 100 |
+
}[offset_name]
|
| 101 |
+
with pytest.raises(pytz.AmbiguousTimeError, match=err_msg):
|
| 102 |
+
tstart + offset
|
| 103 |
+
# While we're here, let's check that we get the same behavior in a
|
| 104 |
+
# vectorized path
|
| 105 |
+
dti = DatetimeIndex([tstart])
|
| 106 |
+
warn_msg = "Non-vectorized DateOffset"
|
| 107 |
+
with pytest.raises(pytz.AmbiguousTimeError, match=err_msg):
|
| 108 |
+
with tm.assert_produces_warning(PerformanceWarning, match=warn_msg):
|
| 109 |
+
dti + offset
|
| 110 |
+
return
|
| 111 |
+
|
| 112 |
+
t = tstart + offset
|
| 113 |
+
if expected_utc_offset is not None:
|
| 114 |
+
assert get_utc_offset_hours(t) == expected_utc_offset
|
| 115 |
+
|
| 116 |
+
if offset_name == "weeks":
|
| 117 |
+
# dates should match
|
| 118 |
+
assert t.date() == timedelta(days=7 * offset.kwds["weeks"]) + tstart.date()
|
| 119 |
+
# expect the same day of week, hour of day, minute, second, ...
|
| 120 |
+
assert (
|
| 121 |
+
t.dayofweek == tstart.dayofweek
|
| 122 |
+
and t.hour == tstart.hour
|
| 123 |
+
and t.minute == tstart.minute
|
| 124 |
+
and t.second == tstart.second
|
| 125 |
+
)
|
| 126 |
+
elif offset_name == "days":
|
| 127 |
+
# dates should match
|
| 128 |
+
assert timedelta(offset.kwds["days"]) + tstart.date() == t.date()
|
| 129 |
+
# expect the same hour of day, minute, second, ...
|
| 130 |
+
assert (
|
| 131 |
+
t.hour == tstart.hour
|
| 132 |
+
and t.minute == tstart.minute
|
| 133 |
+
and t.second == tstart.second
|
| 134 |
+
)
|
| 135 |
+
elif offset_name in self.valid_date_offsets_singular:
|
| 136 |
+
# expect the singular offset value to match between tstart and t
|
| 137 |
+
datepart_offset = getattr(
|
| 138 |
+
t, offset_name if offset_name != "weekday" else "dayofweek"
|
| 139 |
+
)
|
| 140 |
+
assert datepart_offset == offset.kwds[offset_name]
|
| 141 |
+
else:
|
| 142 |
+
# the offset should be the same as if it was done in UTC
|
| 143 |
+
assert t == (tstart.tz_convert("UTC") + offset).tz_convert("US/Pacific")
|
| 144 |
+
|
| 145 |
+
def _make_timestamp(self, string, hrs_offset, tz):
|
| 146 |
+
if hrs_offset >= 0:
|
| 147 |
+
offset_string = f"{hrs_offset:02d}00"
|
| 148 |
+
else:
|
| 149 |
+
offset_string = f"-{(hrs_offset * -1):02}00"
|
| 150 |
+
return Timestamp(string + offset_string).tz_convert(tz)
|
| 151 |
+
|
| 152 |
+
def test_springforward_plural(self):
|
| 153 |
+
# test moving from standard to daylight savings
|
| 154 |
+
for tz, utc_offsets in self.timezone_utc_offsets.items():
|
| 155 |
+
hrs_pre = utc_offsets["utc_offset_standard"]
|
| 156 |
+
hrs_post = utc_offsets["utc_offset_daylight"]
|
| 157 |
+
self._test_all_offsets(
|
| 158 |
+
n=3,
|
| 159 |
+
tstart=self._make_timestamp(self.ts_pre_springfwd, hrs_pre, tz),
|
| 160 |
+
expected_utc_offset=hrs_post,
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
def test_fallback_singular(self):
|
| 164 |
+
# in the case of singular offsets, we don't necessarily know which utc
|
| 165 |
+
# offset the new Timestamp will wind up in (the tz for 1 month may be
|
| 166 |
+
# different from 1 second) so we don't specify an expected_utc_offset
|
| 167 |
+
for tz, utc_offsets in self.timezone_utc_offsets.items():
|
| 168 |
+
hrs_pre = utc_offsets["utc_offset_standard"]
|
| 169 |
+
self._test_all_offsets(
|
| 170 |
+
n=1,
|
| 171 |
+
tstart=self._make_timestamp(self.ts_pre_fallback, hrs_pre, tz),
|
| 172 |
+
expected_utc_offset=None,
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
def test_springforward_singular(self):
|
| 176 |
+
for tz, utc_offsets in self.timezone_utc_offsets.items():
|
| 177 |
+
hrs_pre = utc_offsets["utc_offset_standard"]
|
| 178 |
+
self._test_all_offsets(
|
| 179 |
+
n=1,
|
| 180 |
+
tstart=self._make_timestamp(self.ts_pre_springfwd, hrs_pre, tz),
|
| 181 |
+
expected_utc_offset=None,
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
offset_classes = {
|
| 185 |
+
MonthBegin: ["11/2/2012", "12/1/2012"],
|
| 186 |
+
MonthEnd: ["11/2/2012", "11/30/2012"],
|
| 187 |
+
BMonthBegin: ["11/2/2012", "12/3/2012"],
|
| 188 |
+
BMonthEnd: ["11/2/2012", "11/30/2012"],
|
| 189 |
+
CBMonthBegin: ["11/2/2012", "12/3/2012"],
|
| 190 |
+
CBMonthEnd: ["11/2/2012", "11/30/2012"],
|
| 191 |
+
SemiMonthBegin: ["11/2/2012", "11/15/2012"],
|
| 192 |
+
SemiMonthEnd: ["11/2/2012", "11/15/2012"],
|
| 193 |
+
Week: ["11/2/2012", "11/9/2012"],
|
| 194 |
+
YearBegin: ["11/2/2012", "1/1/2013"],
|
| 195 |
+
YearEnd: ["11/2/2012", "12/31/2012"],
|
| 196 |
+
BYearBegin: ["11/2/2012", "1/1/2013"],
|
| 197 |
+
BYearEnd: ["11/2/2012", "12/31/2012"],
|
| 198 |
+
QuarterBegin: ["11/2/2012", "12/1/2012"],
|
| 199 |
+
QuarterEnd: ["11/2/2012", "12/31/2012"],
|
| 200 |
+
BQuarterBegin: ["11/2/2012", "12/3/2012"],
|
| 201 |
+
BQuarterEnd: ["11/2/2012", "12/31/2012"],
|
| 202 |
+
Day: ["11/4/2012", "11/4/2012 23:00"],
|
| 203 |
+
}.items()
|
| 204 |
+
|
| 205 |
+
@pytest.mark.parametrize("tup", offset_classes)
|
| 206 |
+
def test_all_offset_classes(self, tup):
|
| 207 |
+
offset, test_values = tup
|
| 208 |
+
|
| 209 |
+
first = Timestamp(test_values[0], tz="US/Eastern") + offset()
|
| 210 |
+
second = Timestamp(test_values[1], tz="US/Eastern")
|
| 211 |
+
assert first == second
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
@pytest.mark.parametrize(
|
| 215 |
+
"original_dt, target_dt, offset, tz",
|
| 216 |
+
[
|
| 217 |
+
pytest.param(
|
| 218 |
+
Timestamp("1900-01-01"),
|
| 219 |
+
Timestamp("1905-07-01"),
|
| 220 |
+
MonthBegin(66),
|
| 221 |
+
"Africa/Lagos",
|
| 222 |
+
marks=pytest.mark.xfail(
|
| 223 |
+
pytz_version < Version("2020.5") or pytz_version == Version("2022.2"),
|
| 224 |
+
reason="GH#41906: pytz utc transition dates changed",
|
| 225 |
+
),
|
| 226 |
+
),
|
| 227 |
+
(
|
| 228 |
+
Timestamp("2021-10-01 01:15"),
|
| 229 |
+
Timestamp("2021-10-31 01:15"),
|
| 230 |
+
MonthEnd(1),
|
| 231 |
+
"Europe/London",
|
| 232 |
+
),
|
| 233 |
+
(
|
| 234 |
+
Timestamp("2010-12-05 02:59"),
|
| 235 |
+
Timestamp("2010-10-31 02:59"),
|
| 236 |
+
SemiMonthEnd(-3),
|
| 237 |
+
"Europe/Paris",
|
| 238 |
+
),
|
| 239 |
+
(
|
| 240 |
+
Timestamp("2021-10-31 01:20"),
|
| 241 |
+
Timestamp("2021-11-07 01:20"),
|
| 242 |
+
CustomBusinessDay(2, weekmask="Sun Mon"),
|
| 243 |
+
"US/Eastern",
|
| 244 |
+
),
|
| 245 |
+
(
|
| 246 |
+
Timestamp("2020-04-03 01:30"),
|
| 247 |
+
Timestamp("2020-11-01 01:30"),
|
| 248 |
+
YearBegin(1, month=11),
|
| 249 |
+
"America/Chicago",
|
| 250 |
+
),
|
| 251 |
+
],
|
| 252 |
+
)
|
| 253 |
+
def test_nontick_offset_with_ambiguous_time_error(original_dt, target_dt, offset, tz):
|
| 254 |
+
# .apply for non-Tick offsets throws AmbiguousTimeError when the target dt
|
| 255 |
+
# is dst-ambiguous
|
| 256 |
+
localized_dt = original_dt.tz_localize(tz)
|
| 257 |
+
|
| 258 |
+
msg = f"Cannot infer dst time from {target_dt}, try using the 'ambiguous' argument"
|
| 259 |
+
with pytest.raises(pytz.AmbiguousTimeError, match=msg):
|
| 260 |
+
localized_dt + offset
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_easter.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the following offsets:
|
| 3 |
+
- Easter
|
| 4 |
+
"""
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from pandas.tests.tseries.offsets.common import assert_offset_equal
|
| 12 |
+
|
| 13 |
+
from pandas.tseries.offsets import Easter
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TestEaster:
|
| 17 |
+
@pytest.mark.parametrize(
|
| 18 |
+
"offset,date,expected",
|
| 19 |
+
[
|
| 20 |
+
(Easter(), datetime(2010, 1, 1), datetime(2010, 4, 4)),
|
| 21 |
+
(Easter(), datetime(2010, 4, 5), datetime(2011, 4, 24)),
|
| 22 |
+
(Easter(2), datetime(2010, 1, 1), datetime(2011, 4, 24)),
|
| 23 |
+
(Easter(), datetime(2010, 4, 4), datetime(2011, 4, 24)),
|
| 24 |
+
(Easter(2), datetime(2010, 4, 4), datetime(2012, 4, 8)),
|
| 25 |
+
(-Easter(), datetime(2011, 1, 1), datetime(2010, 4, 4)),
|
| 26 |
+
(-Easter(), datetime(2010, 4, 5), datetime(2010, 4, 4)),
|
| 27 |
+
(-Easter(2), datetime(2011, 1, 1), datetime(2009, 4, 12)),
|
| 28 |
+
(-Easter(), datetime(2010, 4, 4), datetime(2009, 4, 12)),
|
| 29 |
+
(-Easter(2), datetime(2010, 4, 4), datetime(2008, 3, 23)),
|
| 30 |
+
],
|
| 31 |
+
)
|
| 32 |
+
def test_offset(self, offset, date, expected):
|
| 33 |
+
assert_offset_equal(offset, date, expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_fiscal.py
ADDED
|
@@ -0,0 +1,656 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for Fiscal Year and Fiscal Quarter offset classes
|
| 3 |
+
"""
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
|
| 6 |
+
from dateutil.relativedelta import relativedelta
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from pandas import Timestamp
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
from pandas.tests.tseries.offsets.common import (
|
| 12 |
+
WeekDay,
|
| 13 |
+
assert_is_on_offset,
|
| 14 |
+
assert_offset_equal,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
from pandas.tseries.offsets import (
|
| 18 |
+
FY5253,
|
| 19 |
+
FY5253Quarter,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def makeFY5253LastOfMonthQuarter(*args, **kwds):
|
| 24 |
+
return FY5253Quarter(*args, variation="last", **kwds)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def makeFY5253NearestEndMonthQuarter(*args, **kwds):
|
| 28 |
+
return FY5253Quarter(*args, variation="nearest", **kwds)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def makeFY5253NearestEndMonth(*args, **kwds):
|
| 32 |
+
return FY5253(*args, variation="nearest", **kwds)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def makeFY5253LastOfMonth(*args, **kwds):
|
| 36 |
+
return FY5253(*args, variation="last", **kwds)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def test_get_offset_name():
|
| 40 |
+
assert (
|
| 41 |
+
makeFY5253LastOfMonthQuarter(
|
| 42 |
+
weekday=1, startingMonth=3, qtr_with_extra_week=4
|
| 43 |
+
).freqstr
|
| 44 |
+
== "REQ-L-MAR-TUE-4"
|
| 45 |
+
)
|
| 46 |
+
assert (
|
| 47 |
+
makeFY5253NearestEndMonthQuarter(
|
| 48 |
+
weekday=1, startingMonth=3, qtr_with_extra_week=3
|
| 49 |
+
).freqstr
|
| 50 |
+
== "REQ-N-MAR-TUE-3"
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class TestFY5253LastOfMonth:
|
| 55 |
+
offset_lom_sat_aug = makeFY5253LastOfMonth(1, startingMonth=8, weekday=WeekDay.SAT)
|
| 56 |
+
offset_lom_sat_sep = makeFY5253LastOfMonth(1, startingMonth=9, weekday=WeekDay.SAT)
|
| 57 |
+
|
| 58 |
+
on_offset_cases = [
|
| 59 |
+
# From Wikipedia (see:
|
| 60 |
+
# https://en.wikipedia.org/wiki/4%E2%80%934%E2%80%935_calendar#Last_Saturday_of_the_month_at_fiscal_year_end)
|
| 61 |
+
(offset_lom_sat_aug, datetime(2006, 8, 26), True),
|
| 62 |
+
(offset_lom_sat_aug, datetime(2007, 8, 25), True),
|
| 63 |
+
(offset_lom_sat_aug, datetime(2008, 8, 30), True),
|
| 64 |
+
(offset_lom_sat_aug, datetime(2009, 8, 29), True),
|
| 65 |
+
(offset_lom_sat_aug, datetime(2010, 8, 28), True),
|
| 66 |
+
(offset_lom_sat_aug, datetime(2011, 8, 27), True),
|
| 67 |
+
(offset_lom_sat_aug, datetime(2012, 8, 25), True),
|
| 68 |
+
(offset_lom_sat_aug, datetime(2013, 8, 31), True),
|
| 69 |
+
(offset_lom_sat_aug, datetime(2014, 8, 30), True),
|
| 70 |
+
(offset_lom_sat_aug, datetime(2015, 8, 29), True),
|
| 71 |
+
(offset_lom_sat_aug, datetime(2016, 8, 27), True),
|
| 72 |
+
(offset_lom_sat_aug, datetime(2017, 8, 26), True),
|
| 73 |
+
(offset_lom_sat_aug, datetime(2018, 8, 25), True),
|
| 74 |
+
(offset_lom_sat_aug, datetime(2019, 8, 31), True),
|
| 75 |
+
(offset_lom_sat_aug, datetime(2006, 8, 27), False),
|
| 76 |
+
(offset_lom_sat_aug, datetime(2007, 8, 28), False),
|
| 77 |
+
(offset_lom_sat_aug, datetime(2008, 8, 31), False),
|
| 78 |
+
(offset_lom_sat_aug, datetime(2009, 8, 30), False),
|
| 79 |
+
(offset_lom_sat_aug, datetime(2010, 8, 29), False),
|
| 80 |
+
(offset_lom_sat_aug, datetime(2011, 8, 28), False),
|
| 81 |
+
(offset_lom_sat_aug, datetime(2006, 8, 25), False),
|
| 82 |
+
(offset_lom_sat_aug, datetime(2007, 8, 24), False),
|
| 83 |
+
(offset_lom_sat_aug, datetime(2008, 8, 29), False),
|
| 84 |
+
(offset_lom_sat_aug, datetime(2009, 8, 28), False),
|
| 85 |
+
(offset_lom_sat_aug, datetime(2010, 8, 27), False),
|
| 86 |
+
(offset_lom_sat_aug, datetime(2011, 8, 26), False),
|
| 87 |
+
(offset_lom_sat_aug, datetime(2019, 8, 30), False),
|
| 88 |
+
# From GMCR (see for example:
|
| 89 |
+
# http://yahoo.brand.edgar-online.com/Default.aspx?
|
| 90 |
+
# companyid=3184&formtypeID=7)
|
| 91 |
+
(offset_lom_sat_sep, datetime(2010, 9, 25), True),
|
| 92 |
+
(offset_lom_sat_sep, datetime(2011, 9, 24), True),
|
| 93 |
+
(offset_lom_sat_sep, datetime(2012, 9, 29), True),
|
| 94 |
+
]
|
| 95 |
+
|
| 96 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 97 |
+
def test_is_on_offset(self, case):
|
| 98 |
+
offset, dt, expected = case
|
| 99 |
+
assert_is_on_offset(offset, dt, expected)
|
| 100 |
+
|
| 101 |
+
def test_apply(self):
|
| 102 |
+
offset_lom_aug_sat = makeFY5253LastOfMonth(startingMonth=8, weekday=WeekDay.SAT)
|
| 103 |
+
offset_lom_aug_sat_1 = makeFY5253LastOfMonth(
|
| 104 |
+
n=1, startingMonth=8, weekday=WeekDay.SAT
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
date_seq_lom_aug_sat = [
|
| 108 |
+
datetime(2006, 8, 26),
|
| 109 |
+
datetime(2007, 8, 25),
|
| 110 |
+
datetime(2008, 8, 30),
|
| 111 |
+
datetime(2009, 8, 29),
|
| 112 |
+
datetime(2010, 8, 28),
|
| 113 |
+
datetime(2011, 8, 27),
|
| 114 |
+
datetime(2012, 8, 25),
|
| 115 |
+
datetime(2013, 8, 31),
|
| 116 |
+
datetime(2014, 8, 30),
|
| 117 |
+
datetime(2015, 8, 29),
|
| 118 |
+
datetime(2016, 8, 27),
|
| 119 |
+
]
|
| 120 |
+
|
| 121 |
+
tests = [
|
| 122 |
+
(offset_lom_aug_sat, date_seq_lom_aug_sat),
|
| 123 |
+
(offset_lom_aug_sat_1, date_seq_lom_aug_sat),
|
| 124 |
+
(offset_lom_aug_sat, [datetime(2006, 8, 25)] + date_seq_lom_aug_sat),
|
| 125 |
+
(offset_lom_aug_sat_1, [datetime(2006, 8, 27)] + date_seq_lom_aug_sat[1:]),
|
| 126 |
+
(
|
| 127 |
+
makeFY5253LastOfMonth(n=-1, startingMonth=8, weekday=WeekDay.SAT),
|
| 128 |
+
list(reversed(date_seq_lom_aug_sat)),
|
| 129 |
+
),
|
| 130 |
+
]
|
| 131 |
+
for test in tests:
|
| 132 |
+
offset, data = test
|
| 133 |
+
current = data[0]
|
| 134 |
+
for datum in data[1:]:
|
| 135 |
+
current = current + offset
|
| 136 |
+
assert current == datum
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class TestFY5253NearestEndMonth:
|
| 140 |
+
def test_get_year_end(self):
|
| 141 |
+
assert makeFY5253NearestEndMonth(
|
| 142 |
+
startingMonth=8, weekday=WeekDay.SAT
|
| 143 |
+
).get_year_end(datetime(2013, 1, 1)) == datetime(2013, 8, 31)
|
| 144 |
+
assert makeFY5253NearestEndMonth(
|
| 145 |
+
startingMonth=8, weekday=WeekDay.SUN
|
| 146 |
+
).get_year_end(datetime(2013, 1, 1)) == datetime(2013, 9, 1)
|
| 147 |
+
assert makeFY5253NearestEndMonth(
|
| 148 |
+
startingMonth=8, weekday=WeekDay.FRI
|
| 149 |
+
).get_year_end(datetime(2013, 1, 1)) == datetime(2013, 8, 30)
|
| 150 |
+
|
| 151 |
+
offset_n = FY5253(weekday=WeekDay.TUE, startingMonth=12, variation="nearest")
|
| 152 |
+
assert offset_n.get_year_end(datetime(2012, 1, 1)) == datetime(2013, 1, 1)
|
| 153 |
+
assert offset_n.get_year_end(datetime(2012, 1, 10)) == datetime(2013, 1, 1)
|
| 154 |
+
|
| 155 |
+
assert offset_n.get_year_end(datetime(2013, 1, 1)) == datetime(2013, 12, 31)
|
| 156 |
+
assert offset_n.get_year_end(datetime(2013, 1, 2)) == datetime(2013, 12, 31)
|
| 157 |
+
assert offset_n.get_year_end(datetime(2013, 1, 3)) == datetime(2013, 12, 31)
|
| 158 |
+
assert offset_n.get_year_end(datetime(2013, 1, 10)) == datetime(2013, 12, 31)
|
| 159 |
+
|
| 160 |
+
JNJ = FY5253(n=1, startingMonth=12, weekday=6, variation="nearest")
|
| 161 |
+
assert JNJ.get_year_end(datetime(2006, 1, 1)) == datetime(2006, 12, 31)
|
| 162 |
+
|
| 163 |
+
offset_lom_aug_sat = makeFY5253NearestEndMonth(
|
| 164 |
+
1, startingMonth=8, weekday=WeekDay.SAT
|
| 165 |
+
)
|
| 166 |
+
offset_lom_aug_thu = makeFY5253NearestEndMonth(
|
| 167 |
+
1, startingMonth=8, weekday=WeekDay.THU
|
| 168 |
+
)
|
| 169 |
+
offset_n = FY5253(weekday=WeekDay.TUE, startingMonth=12, variation="nearest")
|
| 170 |
+
|
| 171 |
+
on_offset_cases = [
|
| 172 |
+
# From Wikipedia (see:
|
| 173 |
+
# https://en.wikipedia.org/wiki/4%E2%80%934%E2%80%935_calendar
|
| 174 |
+
# #Saturday_nearest_the_end_of_month)
|
| 175 |
+
# 2006-09-02 2006 September 2
|
| 176 |
+
# 2007-09-01 2007 September 1
|
| 177 |
+
# 2008-08-30 2008 August 30 (leap year)
|
| 178 |
+
# 2009-08-29 2009 August 29
|
| 179 |
+
# 2010-08-28 2010 August 28
|
| 180 |
+
# 2011-09-03 2011 September 3
|
| 181 |
+
# 2012-09-01 2012 September 1 (leap year)
|
| 182 |
+
# 2013-08-31 2013 August 31
|
| 183 |
+
# 2014-08-30 2014 August 30
|
| 184 |
+
# 2015-08-29 2015 August 29
|
| 185 |
+
# 2016-09-03 2016 September 3 (leap year)
|
| 186 |
+
# 2017-09-02 2017 September 2
|
| 187 |
+
# 2018-09-01 2018 September 1
|
| 188 |
+
# 2019-08-31 2019 August 31
|
| 189 |
+
(offset_lom_aug_sat, datetime(2006, 9, 2), True),
|
| 190 |
+
(offset_lom_aug_sat, datetime(2007, 9, 1), True),
|
| 191 |
+
(offset_lom_aug_sat, datetime(2008, 8, 30), True),
|
| 192 |
+
(offset_lom_aug_sat, datetime(2009, 8, 29), True),
|
| 193 |
+
(offset_lom_aug_sat, datetime(2010, 8, 28), True),
|
| 194 |
+
(offset_lom_aug_sat, datetime(2011, 9, 3), True),
|
| 195 |
+
(offset_lom_aug_sat, datetime(2016, 9, 3), True),
|
| 196 |
+
(offset_lom_aug_sat, datetime(2017, 9, 2), True),
|
| 197 |
+
(offset_lom_aug_sat, datetime(2018, 9, 1), True),
|
| 198 |
+
(offset_lom_aug_sat, datetime(2019, 8, 31), True),
|
| 199 |
+
(offset_lom_aug_sat, datetime(2006, 8, 27), False),
|
| 200 |
+
(offset_lom_aug_sat, datetime(2007, 8, 28), False),
|
| 201 |
+
(offset_lom_aug_sat, datetime(2008, 8, 31), False),
|
| 202 |
+
(offset_lom_aug_sat, datetime(2009, 8, 30), False),
|
| 203 |
+
(offset_lom_aug_sat, datetime(2010, 8, 29), False),
|
| 204 |
+
(offset_lom_aug_sat, datetime(2011, 8, 28), False),
|
| 205 |
+
(offset_lom_aug_sat, datetime(2006, 8, 25), False),
|
| 206 |
+
(offset_lom_aug_sat, datetime(2007, 8, 24), False),
|
| 207 |
+
(offset_lom_aug_sat, datetime(2008, 8, 29), False),
|
| 208 |
+
(offset_lom_aug_sat, datetime(2009, 8, 28), False),
|
| 209 |
+
(offset_lom_aug_sat, datetime(2010, 8, 27), False),
|
| 210 |
+
(offset_lom_aug_sat, datetime(2011, 8, 26), False),
|
| 211 |
+
(offset_lom_aug_sat, datetime(2019, 8, 30), False),
|
| 212 |
+
# From Micron, see:
|
| 213 |
+
# http://google.brand.edgar-online.com/?sym=MU&formtypeID=7
|
| 214 |
+
(offset_lom_aug_thu, datetime(2012, 8, 30), True),
|
| 215 |
+
(offset_lom_aug_thu, datetime(2011, 9, 1), True),
|
| 216 |
+
(offset_n, datetime(2012, 12, 31), False),
|
| 217 |
+
(offset_n, datetime(2013, 1, 1), True),
|
| 218 |
+
(offset_n, datetime(2013, 1, 2), False),
|
| 219 |
+
]
|
| 220 |
+
|
| 221 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 222 |
+
def test_is_on_offset(self, case):
|
| 223 |
+
offset, dt, expected = case
|
| 224 |
+
assert_is_on_offset(offset, dt, expected)
|
| 225 |
+
|
| 226 |
+
def test_apply(self):
|
| 227 |
+
date_seq_nem_8_sat = [
|
| 228 |
+
datetime(2006, 9, 2),
|
| 229 |
+
datetime(2007, 9, 1),
|
| 230 |
+
datetime(2008, 8, 30),
|
| 231 |
+
datetime(2009, 8, 29),
|
| 232 |
+
datetime(2010, 8, 28),
|
| 233 |
+
datetime(2011, 9, 3),
|
| 234 |
+
]
|
| 235 |
+
|
| 236 |
+
JNJ = [
|
| 237 |
+
datetime(2005, 1, 2),
|
| 238 |
+
datetime(2006, 1, 1),
|
| 239 |
+
datetime(2006, 12, 31),
|
| 240 |
+
datetime(2007, 12, 30),
|
| 241 |
+
datetime(2008, 12, 28),
|
| 242 |
+
datetime(2010, 1, 3),
|
| 243 |
+
datetime(2011, 1, 2),
|
| 244 |
+
datetime(2012, 1, 1),
|
| 245 |
+
datetime(2012, 12, 30),
|
| 246 |
+
]
|
| 247 |
+
|
| 248 |
+
DEC_SAT = FY5253(n=-1, startingMonth=12, weekday=5, variation="nearest")
|
| 249 |
+
|
| 250 |
+
tests = [
|
| 251 |
+
(
|
| 252 |
+
makeFY5253NearestEndMonth(startingMonth=8, weekday=WeekDay.SAT),
|
| 253 |
+
date_seq_nem_8_sat,
|
| 254 |
+
),
|
| 255 |
+
(
|
| 256 |
+
makeFY5253NearestEndMonth(n=1, startingMonth=8, weekday=WeekDay.SAT),
|
| 257 |
+
date_seq_nem_8_sat,
|
| 258 |
+
),
|
| 259 |
+
(
|
| 260 |
+
makeFY5253NearestEndMonth(startingMonth=8, weekday=WeekDay.SAT),
|
| 261 |
+
[datetime(2006, 9, 1)] + date_seq_nem_8_sat,
|
| 262 |
+
),
|
| 263 |
+
(
|
| 264 |
+
makeFY5253NearestEndMonth(n=1, startingMonth=8, weekday=WeekDay.SAT),
|
| 265 |
+
[datetime(2006, 9, 3)] + date_seq_nem_8_sat[1:],
|
| 266 |
+
),
|
| 267 |
+
(
|
| 268 |
+
makeFY5253NearestEndMonth(n=-1, startingMonth=8, weekday=WeekDay.SAT),
|
| 269 |
+
list(reversed(date_seq_nem_8_sat)),
|
| 270 |
+
),
|
| 271 |
+
(
|
| 272 |
+
makeFY5253NearestEndMonth(n=1, startingMonth=12, weekday=WeekDay.SUN),
|
| 273 |
+
JNJ,
|
| 274 |
+
),
|
| 275 |
+
(
|
| 276 |
+
makeFY5253NearestEndMonth(n=-1, startingMonth=12, weekday=WeekDay.SUN),
|
| 277 |
+
list(reversed(JNJ)),
|
| 278 |
+
),
|
| 279 |
+
(
|
| 280 |
+
makeFY5253NearestEndMonth(n=1, startingMonth=12, weekday=WeekDay.SUN),
|
| 281 |
+
[datetime(2005, 1, 2), datetime(2006, 1, 1)],
|
| 282 |
+
),
|
| 283 |
+
(
|
| 284 |
+
makeFY5253NearestEndMonth(n=1, startingMonth=12, weekday=WeekDay.SUN),
|
| 285 |
+
[datetime(2006, 1, 2), datetime(2006, 12, 31)],
|
| 286 |
+
),
|
| 287 |
+
(DEC_SAT, [datetime(2013, 1, 15), datetime(2012, 12, 29)]),
|
| 288 |
+
]
|
| 289 |
+
for test in tests:
|
| 290 |
+
offset, data = test
|
| 291 |
+
current = data[0]
|
| 292 |
+
for datum in data[1:]:
|
| 293 |
+
current = current + offset
|
| 294 |
+
assert current == datum
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
class TestFY5253LastOfMonthQuarter:
|
| 298 |
+
def test_is_anchored(self):
|
| 299 |
+
msg = "FY5253Quarter.is_anchored is deprecated "
|
| 300 |
+
|
| 301 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 302 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 303 |
+
startingMonth=1, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 304 |
+
).is_anchored()
|
| 305 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 306 |
+
weekday=WeekDay.SAT, startingMonth=3, qtr_with_extra_week=4
|
| 307 |
+
).is_anchored()
|
| 308 |
+
assert not makeFY5253LastOfMonthQuarter(
|
| 309 |
+
2, startingMonth=1, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 310 |
+
).is_anchored()
|
| 311 |
+
|
| 312 |
+
def test_equality(self):
|
| 313 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 314 |
+
startingMonth=1, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 315 |
+
) == makeFY5253LastOfMonthQuarter(
|
| 316 |
+
startingMonth=1, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 317 |
+
)
|
| 318 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 319 |
+
startingMonth=1, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 320 |
+
) != makeFY5253LastOfMonthQuarter(
|
| 321 |
+
startingMonth=1, weekday=WeekDay.SUN, qtr_with_extra_week=4
|
| 322 |
+
)
|
| 323 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 324 |
+
startingMonth=1, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 325 |
+
) != makeFY5253LastOfMonthQuarter(
|
| 326 |
+
startingMonth=2, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 327 |
+
)
|
| 328 |
+
|
| 329 |
+
def test_offset(self):
|
| 330 |
+
offset = makeFY5253LastOfMonthQuarter(
|
| 331 |
+
1, startingMonth=9, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 332 |
+
)
|
| 333 |
+
offset2 = makeFY5253LastOfMonthQuarter(
|
| 334 |
+
2, startingMonth=9, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 335 |
+
)
|
| 336 |
+
offset4 = makeFY5253LastOfMonthQuarter(
|
| 337 |
+
4, startingMonth=9, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
offset_neg1 = makeFY5253LastOfMonthQuarter(
|
| 341 |
+
-1, startingMonth=9, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 342 |
+
)
|
| 343 |
+
offset_neg2 = makeFY5253LastOfMonthQuarter(
|
| 344 |
+
-2, startingMonth=9, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 345 |
+
)
|
| 346 |
+
|
| 347 |
+
GMCR = [
|
| 348 |
+
datetime(2010, 3, 27),
|
| 349 |
+
datetime(2010, 6, 26),
|
| 350 |
+
datetime(2010, 9, 25),
|
| 351 |
+
datetime(2010, 12, 25),
|
| 352 |
+
datetime(2011, 3, 26),
|
| 353 |
+
datetime(2011, 6, 25),
|
| 354 |
+
datetime(2011, 9, 24),
|
| 355 |
+
datetime(2011, 12, 24),
|
| 356 |
+
datetime(2012, 3, 24),
|
| 357 |
+
datetime(2012, 6, 23),
|
| 358 |
+
datetime(2012, 9, 29),
|
| 359 |
+
datetime(2012, 12, 29),
|
| 360 |
+
datetime(2013, 3, 30),
|
| 361 |
+
datetime(2013, 6, 29),
|
| 362 |
+
]
|
| 363 |
+
|
| 364 |
+
assert_offset_equal(offset, base=GMCR[0], expected=GMCR[1])
|
| 365 |
+
assert_offset_equal(
|
| 366 |
+
offset, base=GMCR[0] + relativedelta(days=-1), expected=GMCR[0]
|
| 367 |
+
)
|
| 368 |
+
assert_offset_equal(offset, base=GMCR[1], expected=GMCR[2])
|
| 369 |
+
|
| 370 |
+
assert_offset_equal(offset2, base=GMCR[0], expected=GMCR[2])
|
| 371 |
+
assert_offset_equal(offset4, base=GMCR[0], expected=GMCR[4])
|
| 372 |
+
|
| 373 |
+
assert_offset_equal(offset_neg1, base=GMCR[-1], expected=GMCR[-2])
|
| 374 |
+
assert_offset_equal(
|
| 375 |
+
offset_neg1, base=GMCR[-1] + relativedelta(days=+1), expected=GMCR[-1]
|
| 376 |
+
)
|
| 377 |
+
assert_offset_equal(offset_neg2, base=GMCR[-1], expected=GMCR[-3])
|
| 378 |
+
|
| 379 |
+
date = GMCR[0] + relativedelta(days=-1)
|
| 380 |
+
for expected in GMCR:
|
| 381 |
+
assert_offset_equal(offset, date, expected)
|
| 382 |
+
date = date + offset
|
| 383 |
+
|
| 384 |
+
date = GMCR[-1] + relativedelta(days=+1)
|
| 385 |
+
for expected in reversed(GMCR):
|
| 386 |
+
assert_offset_equal(offset_neg1, date, expected)
|
| 387 |
+
date = date + offset_neg1
|
| 388 |
+
|
| 389 |
+
lomq_aug_sat_4 = makeFY5253LastOfMonthQuarter(
|
| 390 |
+
1, startingMonth=8, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 391 |
+
)
|
| 392 |
+
lomq_sep_sat_4 = makeFY5253LastOfMonthQuarter(
|
| 393 |
+
1, startingMonth=9, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 394 |
+
)
|
| 395 |
+
|
| 396 |
+
on_offset_cases = [
|
| 397 |
+
# From Wikipedia
|
| 398 |
+
(lomq_aug_sat_4, datetime(2006, 8, 26), True),
|
| 399 |
+
(lomq_aug_sat_4, datetime(2007, 8, 25), True),
|
| 400 |
+
(lomq_aug_sat_4, datetime(2008, 8, 30), True),
|
| 401 |
+
(lomq_aug_sat_4, datetime(2009, 8, 29), True),
|
| 402 |
+
(lomq_aug_sat_4, datetime(2010, 8, 28), True),
|
| 403 |
+
(lomq_aug_sat_4, datetime(2011, 8, 27), True),
|
| 404 |
+
(lomq_aug_sat_4, datetime(2019, 8, 31), True),
|
| 405 |
+
(lomq_aug_sat_4, datetime(2006, 8, 27), False),
|
| 406 |
+
(lomq_aug_sat_4, datetime(2007, 8, 28), False),
|
| 407 |
+
(lomq_aug_sat_4, datetime(2008, 8, 31), False),
|
| 408 |
+
(lomq_aug_sat_4, datetime(2009, 8, 30), False),
|
| 409 |
+
(lomq_aug_sat_4, datetime(2010, 8, 29), False),
|
| 410 |
+
(lomq_aug_sat_4, datetime(2011, 8, 28), False),
|
| 411 |
+
(lomq_aug_sat_4, datetime(2006, 8, 25), False),
|
| 412 |
+
(lomq_aug_sat_4, datetime(2007, 8, 24), False),
|
| 413 |
+
(lomq_aug_sat_4, datetime(2008, 8, 29), False),
|
| 414 |
+
(lomq_aug_sat_4, datetime(2009, 8, 28), False),
|
| 415 |
+
(lomq_aug_sat_4, datetime(2010, 8, 27), False),
|
| 416 |
+
(lomq_aug_sat_4, datetime(2011, 8, 26), False),
|
| 417 |
+
(lomq_aug_sat_4, datetime(2019, 8, 30), False),
|
| 418 |
+
# From GMCR
|
| 419 |
+
(lomq_sep_sat_4, datetime(2010, 9, 25), True),
|
| 420 |
+
(lomq_sep_sat_4, datetime(2011, 9, 24), True),
|
| 421 |
+
(lomq_sep_sat_4, datetime(2012, 9, 29), True),
|
| 422 |
+
(lomq_sep_sat_4, datetime(2013, 6, 29), True),
|
| 423 |
+
(lomq_sep_sat_4, datetime(2012, 6, 23), True),
|
| 424 |
+
(lomq_sep_sat_4, datetime(2012, 6, 30), False),
|
| 425 |
+
(lomq_sep_sat_4, datetime(2013, 3, 30), True),
|
| 426 |
+
(lomq_sep_sat_4, datetime(2012, 3, 24), True),
|
| 427 |
+
(lomq_sep_sat_4, datetime(2012, 12, 29), True),
|
| 428 |
+
(lomq_sep_sat_4, datetime(2011, 12, 24), True),
|
| 429 |
+
# INTC (extra week in Q1)
|
| 430 |
+
# See: http://www.intc.com/releasedetail.cfm?ReleaseID=542844
|
| 431 |
+
(
|
| 432 |
+
makeFY5253LastOfMonthQuarter(
|
| 433 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 434 |
+
),
|
| 435 |
+
datetime(2011, 4, 2),
|
| 436 |
+
True,
|
| 437 |
+
),
|
| 438 |
+
# see: http://google.brand.edgar-online.com/?sym=INTC&formtypeID=7
|
| 439 |
+
(
|
| 440 |
+
makeFY5253LastOfMonthQuarter(
|
| 441 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 442 |
+
),
|
| 443 |
+
datetime(2012, 12, 29),
|
| 444 |
+
True,
|
| 445 |
+
),
|
| 446 |
+
(
|
| 447 |
+
makeFY5253LastOfMonthQuarter(
|
| 448 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 449 |
+
),
|
| 450 |
+
datetime(2011, 12, 31),
|
| 451 |
+
True,
|
| 452 |
+
),
|
| 453 |
+
(
|
| 454 |
+
makeFY5253LastOfMonthQuarter(
|
| 455 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 456 |
+
),
|
| 457 |
+
datetime(2010, 12, 25),
|
| 458 |
+
True,
|
| 459 |
+
),
|
| 460 |
+
]
|
| 461 |
+
|
| 462 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 463 |
+
def test_is_on_offset(self, case):
|
| 464 |
+
offset, dt, expected = case
|
| 465 |
+
assert_is_on_offset(offset, dt, expected)
|
| 466 |
+
|
| 467 |
+
def test_year_has_extra_week(self):
|
| 468 |
+
# End of long Q1
|
| 469 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 470 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 471 |
+
).year_has_extra_week(datetime(2011, 4, 2))
|
| 472 |
+
|
| 473 |
+
# Start of long Q1
|
| 474 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 475 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 476 |
+
).year_has_extra_week(datetime(2010, 12, 26))
|
| 477 |
+
|
| 478 |
+
# End of year before year with long Q1
|
| 479 |
+
assert not makeFY5253LastOfMonthQuarter(
|
| 480 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 481 |
+
).year_has_extra_week(datetime(2010, 12, 25))
|
| 482 |
+
|
| 483 |
+
for year in [
|
| 484 |
+
x for x in range(1994, 2011 + 1) if x not in [2011, 2005, 2000, 1994]
|
| 485 |
+
]:
|
| 486 |
+
assert not makeFY5253LastOfMonthQuarter(
|
| 487 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 488 |
+
).year_has_extra_week(datetime(year, 4, 2))
|
| 489 |
+
|
| 490 |
+
# Other long years
|
| 491 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 492 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 493 |
+
).year_has_extra_week(datetime(2005, 4, 2))
|
| 494 |
+
|
| 495 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 496 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 497 |
+
).year_has_extra_week(datetime(2000, 4, 2))
|
| 498 |
+
|
| 499 |
+
assert makeFY5253LastOfMonthQuarter(
|
| 500 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 501 |
+
).year_has_extra_week(datetime(1994, 4, 2))
|
| 502 |
+
|
| 503 |
+
def test_get_weeks(self):
|
| 504 |
+
sat_dec_1 = makeFY5253LastOfMonthQuarter(
|
| 505 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=1
|
| 506 |
+
)
|
| 507 |
+
sat_dec_4 = makeFY5253LastOfMonthQuarter(
|
| 508 |
+
1, startingMonth=12, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 509 |
+
)
|
| 510 |
+
|
| 511 |
+
assert sat_dec_1.get_weeks(datetime(2011, 4, 2)) == [14, 13, 13, 13]
|
| 512 |
+
assert sat_dec_4.get_weeks(datetime(2011, 4, 2)) == [13, 13, 13, 14]
|
| 513 |
+
assert sat_dec_1.get_weeks(datetime(2010, 12, 25)) == [13, 13, 13, 13]
|
| 514 |
+
|
| 515 |
+
|
| 516 |
+
class TestFY5253NearestEndMonthQuarter:
|
| 517 |
+
offset_nem_sat_aug_4 = makeFY5253NearestEndMonthQuarter(
|
| 518 |
+
1, startingMonth=8, weekday=WeekDay.SAT, qtr_with_extra_week=4
|
| 519 |
+
)
|
| 520 |
+
offset_nem_thu_aug_4 = makeFY5253NearestEndMonthQuarter(
|
| 521 |
+
1, startingMonth=8, weekday=WeekDay.THU, qtr_with_extra_week=4
|
| 522 |
+
)
|
| 523 |
+
offset_n = FY5253(weekday=WeekDay.TUE, startingMonth=12, variation="nearest")
|
| 524 |
+
|
| 525 |
+
on_offset_cases = [
|
| 526 |
+
# From Wikipedia
|
| 527 |
+
(offset_nem_sat_aug_4, datetime(2006, 9, 2), True),
|
| 528 |
+
(offset_nem_sat_aug_4, datetime(2007, 9, 1), True),
|
| 529 |
+
(offset_nem_sat_aug_4, datetime(2008, 8, 30), True),
|
| 530 |
+
(offset_nem_sat_aug_4, datetime(2009, 8, 29), True),
|
| 531 |
+
(offset_nem_sat_aug_4, datetime(2010, 8, 28), True),
|
| 532 |
+
(offset_nem_sat_aug_4, datetime(2011, 9, 3), True),
|
| 533 |
+
(offset_nem_sat_aug_4, datetime(2016, 9, 3), True),
|
| 534 |
+
(offset_nem_sat_aug_4, datetime(2017, 9, 2), True),
|
| 535 |
+
(offset_nem_sat_aug_4, datetime(2018, 9, 1), True),
|
| 536 |
+
(offset_nem_sat_aug_4, datetime(2019, 8, 31), True),
|
| 537 |
+
(offset_nem_sat_aug_4, datetime(2006, 8, 27), False),
|
| 538 |
+
(offset_nem_sat_aug_4, datetime(2007, 8, 28), False),
|
| 539 |
+
(offset_nem_sat_aug_4, datetime(2008, 8, 31), False),
|
| 540 |
+
(offset_nem_sat_aug_4, datetime(2009, 8, 30), False),
|
| 541 |
+
(offset_nem_sat_aug_4, datetime(2010, 8, 29), False),
|
| 542 |
+
(offset_nem_sat_aug_4, datetime(2011, 8, 28), False),
|
| 543 |
+
(offset_nem_sat_aug_4, datetime(2006, 8, 25), False),
|
| 544 |
+
(offset_nem_sat_aug_4, datetime(2007, 8, 24), False),
|
| 545 |
+
(offset_nem_sat_aug_4, datetime(2008, 8, 29), False),
|
| 546 |
+
(offset_nem_sat_aug_4, datetime(2009, 8, 28), False),
|
| 547 |
+
(offset_nem_sat_aug_4, datetime(2010, 8, 27), False),
|
| 548 |
+
(offset_nem_sat_aug_4, datetime(2011, 8, 26), False),
|
| 549 |
+
(offset_nem_sat_aug_4, datetime(2019, 8, 30), False),
|
| 550 |
+
# From Micron, see:
|
| 551 |
+
# http://google.brand.edgar-online.com/?sym=MU&formtypeID=7
|
| 552 |
+
(offset_nem_thu_aug_4, datetime(2012, 8, 30), True),
|
| 553 |
+
(offset_nem_thu_aug_4, datetime(2011, 9, 1), True),
|
| 554 |
+
# See: http://google.brand.edgar-online.com/?sym=MU&formtypeID=13
|
| 555 |
+
(offset_nem_thu_aug_4, datetime(2013, 5, 30), True),
|
| 556 |
+
(offset_nem_thu_aug_4, datetime(2013, 2, 28), True),
|
| 557 |
+
(offset_nem_thu_aug_4, datetime(2012, 11, 29), True),
|
| 558 |
+
(offset_nem_thu_aug_4, datetime(2012, 5, 31), True),
|
| 559 |
+
(offset_nem_thu_aug_4, datetime(2007, 3, 1), True),
|
| 560 |
+
(offset_nem_thu_aug_4, datetime(1994, 3, 3), True),
|
| 561 |
+
(offset_n, datetime(2012, 12, 31), False),
|
| 562 |
+
(offset_n, datetime(2013, 1, 1), True),
|
| 563 |
+
(offset_n, datetime(2013, 1, 2), False),
|
| 564 |
+
]
|
| 565 |
+
|
| 566 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 567 |
+
def test_is_on_offset(self, case):
|
| 568 |
+
offset, dt, expected = case
|
| 569 |
+
assert_is_on_offset(offset, dt, expected)
|
| 570 |
+
|
| 571 |
+
def test_offset(self):
|
| 572 |
+
offset = makeFY5253NearestEndMonthQuarter(
|
| 573 |
+
1, startingMonth=8, weekday=WeekDay.THU, qtr_with_extra_week=4
|
| 574 |
+
)
|
| 575 |
+
|
| 576 |
+
MU = [
|
| 577 |
+
datetime(2012, 5, 31),
|
| 578 |
+
datetime(2012, 8, 30),
|
| 579 |
+
datetime(2012, 11, 29),
|
| 580 |
+
datetime(2013, 2, 28),
|
| 581 |
+
datetime(2013, 5, 30),
|
| 582 |
+
]
|
| 583 |
+
|
| 584 |
+
date = MU[0] + relativedelta(days=-1)
|
| 585 |
+
for expected in MU:
|
| 586 |
+
assert_offset_equal(offset, date, expected)
|
| 587 |
+
date = date + offset
|
| 588 |
+
|
| 589 |
+
assert_offset_equal(offset, datetime(2012, 5, 31), datetime(2012, 8, 30))
|
| 590 |
+
assert_offset_equal(offset, datetime(2012, 5, 30), datetime(2012, 5, 31))
|
| 591 |
+
|
| 592 |
+
offset2 = FY5253Quarter(
|
| 593 |
+
weekday=5, startingMonth=12, variation="last", qtr_with_extra_week=4
|
| 594 |
+
)
|
| 595 |
+
|
| 596 |
+
assert_offset_equal(offset2, datetime(2013, 1, 15), datetime(2013, 3, 30))
|
| 597 |
+
|
| 598 |
+
|
| 599 |
+
def test_bunched_yearends():
|
| 600 |
+
# GH#14774 cases with two fiscal year-ends in the same calendar-year
|
| 601 |
+
fy = FY5253(n=1, weekday=5, startingMonth=12, variation="nearest")
|
| 602 |
+
dt = Timestamp("2004-01-01")
|
| 603 |
+
assert fy.rollback(dt) == Timestamp("2002-12-28")
|
| 604 |
+
assert (-fy)._apply(dt) == Timestamp("2002-12-28")
|
| 605 |
+
assert dt - fy == Timestamp("2002-12-28")
|
| 606 |
+
|
| 607 |
+
assert fy.rollforward(dt) == Timestamp("2004-01-03")
|
| 608 |
+
assert fy._apply(dt) == Timestamp("2004-01-03")
|
| 609 |
+
assert fy + dt == Timestamp("2004-01-03")
|
| 610 |
+
assert dt + fy == Timestamp("2004-01-03")
|
| 611 |
+
|
| 612 |
+
# Same thing, but starting from a Timestamp in the previous year.
|
| 613 |
+
dt = Timestamp("2003-12-31")
|
| 614 |
+
assert fy.rollback(dt) == Timestamp("2002-12-28")
|
| 615 |
+
assert (-fy)._apply(dt) == Timestamp("2002-12-28")
|
| 616 |
+
assert dt - fy == Timestamp("2002-12-28")
|
| 617 |
+
|
| 618 |
+
|
| 619 |
+
def test_fy5253_last_onoffset():
|
| 620 |
+
# GH#18877 dates on the year-end but not normalized to midnight
|
| 621 |
+
offset = FY5253(n=-5, startingMonth=5, variation="last", weekday=0)
|
| 622 |
+
ts = Timestamp("1984-05-28 06:29:43.955911354+0200", tz="Europe/San_Marino")
|
| 623 |
+
fast = offset.is_on_offset(ts)
|
| 624 |
+
slow = (ts + offset) - offset == ts
|
| 625 |
+
assert fast == slow
|
| 626 |
+
|
| 627 |
+
|
| 628 |
+
def test_fy5253_nearest_onoffset():
|
| 629 |
+
# GH#18877 dates on the year-end but not normalized to midnight
|
| 630 |
+
offset = FY5253(n=3, startingMonth=7, variation="nearest", weekday=2)
|
| 631 |
+
ts = Timestamp("2032-07-28 00:12:59.035729419+0000", tz="Africa/Dakar")
|
| 632 |
+
fast = offset.is_on_offset(ts)
|
| 633 |
+
slow = (ts + offset) - offset == ts
|
| 634 |
+
assert fast == slow
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
def test_fy5253qtr_onoffset_nearest():
|
| 638 |
+
# GH#19036
|
| 639 |
+
ts = Timestamp("1985-09-02 23:57:46.232550356-0300", tz="Atlantic/Bermuda")
|
| 640 |
+
offset = FY5253Quarter(
|
| 641 |
+
n=3, qtr_with_extra_week=1, startingMonth=2, variation="nearest", weekday=0
|
| 642 |
+
)
|
| 643 |
+
fast = offset.is_on_offset(ts)
|
| 644 |
+
slow = (ts + offset) - offset == ts
|
| 645 |
+
assert fast == slow
|
| 646 |
+
|
| 647 |
+
|
| 648 |
+
def test_fy5253qtr_onoffset_last():
|
| 649 |
+
# GH#19036
|
| 650 |
+
offset = FY5253Quarter(
|
| 651 |
+
n=-2, qtr_with_extra_week=1, startingMonth=7, variation="last", weekday=2
|
| 652 |
+
)
|
| 653 |
+
ts = Timestamp("2011-01-26 19:03:40.331096129+0200", tz="Africa/Windhoek")
|
| 654 |
+
slow = (ts + offset) - offset == ts
|
| 655 |
+
fast = offset.is_on_offset(ts)
|
| 656 |
+
assert fast == slow
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_index.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for offset behavior with indices.
|
| 3 |
+
"""
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
from pandas import (
|
| 7 |
+
Series,
|
| 8 |
+
date_range,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
from pandas.tseries.offsets import (
|
| 12 |
+
BMonthBegin,
|
| 13 |
+
BMonthEnd,
|
| 14 |
+
BQuarterBegin,
|
| 15 |
+
BQuarterEnd,
|
| 16 |
+
BYearBegin,
|
| 17 |
+
BYearEnd,
|
| 18 |
+
MonthBegin,
|
| 19 |
+
MonthEnd,
|
| 20 |
+
QuarterBegin,
|
| 21 |
+
QuarterEnd,
|
| 22 |
+
YearBegin,
|
| 23 |
+
YearEnd,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@pytest.mark.parametrize("n", [-2, 1])
|
| 28 |
+
@pytest.mark.parametrize(
|
| 29 |
+
"cls",
|
| 30 |
+
[
|
| 31 |
+
MonthBegin,
|
| 32 |
+
MonthEnd,
|
| 33 |
+
BMonthBegin,
|
| 34 |
+
BMonthEnd,
|
| 35 |
+
QuarterBegin,
|
| 36 |
+
QuarterEnd,
|
| 37 |
+
BQuarterBegin,
|
| 38 |
+
BQuarterEnd,
|
| 39 |
+
YearBegin,
|
| 40 |
+
YearEnd,
|
| 41 |
+
BYearBegin,
|
| 42 |
+
BYearEnd,
|
| 43 |
+
],
|
| 44 |
+
)
|
| 45 |
+
def test_apply_index(cls, n):
|
| 46 |
+
offset = cls(n=n)
|
| 47 |
+
rng = date_range(start="1/1/2000", periods=100000, freq="min")
|
| 48 |
+
ser = Series(rng)
|
| 49 |
+
|
| 50 |
+
res = rng + offset
|
| 51 |
+
assert res.freq is None # not retained
|
| 52 |
+
assert res[0] == rng[0] + offset
|
| 53 |
+
assert res[-1] == rng[-1] + offset
|
| 54 |
+
res2 = ser + offset
|
| 55 |
+
# apply_index is only for indexes, not series, so no res2_v2
|
| 56 |
+
assert res2.iloc[0] == ser.iloc[0] + offset
|
| 57 |
+
assert res2.iloc[-1] == ser.iloc[-1] + offset
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_month.py
ADDED
|
@@ -0,0 +1,666 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the following offsets:
|
| 3 |
+
- SemiMonthBegin
|
| 4 |
+
- SemiMonthEnd
|
| 5 |
+
- MonthBegin
|
| 6 |
+
- MonthEnd
|
| 7 |
+
"""
|
| 8 |
+
from __future__ import annotations
|
| 9 |
+
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
|
| 12 |
+
import pytest
|
| 13 |
+
|
| 14 |
+
from pandas._libs.tslibs import Timestamp
|
| 15 |
+
from pandas._libs.tslibs.offsets import (
|
| 16 |
+
MonthBegin,
|
| 17 |
+
MonthEnd,
|
| 18 |
+
SemiMonthBegin,
|
| 19 |
+
SemiMonthEnd,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
from pandas import (
|
| 23 |
+
DatetimeIndex,
|
| 24 |
+
Series,
|
| 25 |
+
_testing as tm,
|
| 26 |
+
)
|
| 27 |
+
from pandas.tests.tseries.offsets.common import (
|
| 28 |
+
assert_is_on_offset,
|
| 29 |
+
assert_offset_equal,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class TestSemiMonthEnd:
|
| 34 |
+
def test_offset_whole_year(self):
|
| 35 |
+
dates = (
|
| 36 |
+
datetime(2007, 12, 31),
|
| 37 |
+
datetime(2008, 1, 15),
|
| 38 |
+
datetime(2008, 1, 31),
|
| 39 |
+
datetime(2008, 2, 15),
|
| 40 |
+
datetime(2008, 2, 29),
|
| 41 |
+
datetime(2008, 3, 15),
|
| 42 |
+
datetime(2008, 3, 31),
|
| 43 |
+
datetime(2008, 4, 15),
|
| 44 |
+
datetime(2008, 4, 30),
|
| 45 |
+
datetime(2008, 5, 15),
|
| 46 |
+
datetime(2008, 5, 31),
|
| 47 |
+
datetime(2008, 6, 15),
|
| 48 |
+
datetime(2008, 6, 30),
|
| 49 |
+
datetime(2008, 7, 15),
|
| 50 |
+
datetime(2008, 7, 31),
|
| 51 |
+
datetime(2008, 8, 15),
|
| 52 |
+
datetime(2008, 8, 31),
|
| 53 |
+
datetime(2008, 9, 15),
|
| 54 |
+
datetime(2008, 9, 30),
|
| 55 |
+
datetime(2008, 10, 15),
|
| 56 |
+
datetime(2008, 10, 31),
|
| 57 |
+
datetime(2008, 11, 15),
|
| 58 |
+
datetime(2008, 11, 30),
|
| 59 |
+
datetime(2008, 12, 15),
|
| 60 |
+
datetime(2008, 12, 31),
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
for base, exp_date in zip(dates[:-1], dates[1:]):
|
| 64 |
+
assert_offset_equal(SemiMonthEnd(), base, exp_date)
|
| 65 |
+
|
| 66 |
+
# ensure .apply_index works as expected
|
| 67 |
+
shift = DatetimeIndex(dates[:-1])
|
| 68 |
+
with tm.assert_produces_warning(None):
|
| 69 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 70 |
+
# an integer array to PeriodIndex
|
| 71 |
+
result = SemiMonthEnd() + shift
|
| 72 |
+
|
| 73 |
+
exp = DatetimeIndex(dates[1:])
|
| 74 |
+
tm.assert_index_equal(result, exp)
|
| 75 |
+
|
| 76 |
+
offset_cases = []
|
| 77 |
+
offset_cases.append(
|
| 78 |
+
(
|
| 79 |
+
SemiMonthEnd(),
|
| 80 |
+
{
|
| 81 |
+
datetime(2008, 1, 1): datetime(2008, 1, 15),
|
| 82 |
+
datetime(2008, 1, 15): datetime(2008, 1, 31),
|
| 83 |
+
datetime(2008, 1, 31): datetime(2008, 2, 15),
|
| 84 |
+
datetime(2006, 12, 14): datetime(2006, 12, 15),
|
| 85 |
+
datetime(2006, 12, 29): datetime(2006, 12, 31),
|
| 86 |
+
datetime(2006, 12, 31): datetime(2007, 1, 15),
|
| 87 |
+
datetime(2007, 1, 1): datetime(2007, 1, 15),
|
| 88 |
+
datetime(2006, 12, 1): datetime(2006, 12, 15),
|
| 89 |
+
datetime(2006, 12, 15): datetime(2006, 12, 31),
|
| 90 |
+
},
|
| 91 |
+
)
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
offset_cases.append(
|
| 95 |
+
(
|
| 96 |
+
SemiMonthEnd(day_of_month=20),
|
| 97 |
+
{
|
| 98 |
+
datetime(2008, 1, 1): datetime(2008, 1, 20),
|
| 99 |
+
datetime(2008, 1, 15): datetime(2008, 1, 20),
|
| 100 |
+
datetime(2008, 1, 21): datetime(2008, 1, 31),
|
| 101 |
+
datetime(2008, 1, 31): datetime(2008, 2, 20),
|
| 102 |
+
datetime(2006, 12, 14): datetime(2006, 12, 20),
|
| 103 |
+
datetime(2006, 12, 29): datetime(2006, 12, 31),
|
| 104 |
+
datetime(2006, 12, 31): datetime(2007, 1, 20),
|
| 105 |
+
datetime(2007, 1, 1): datetime(2007, 1, 20),
|
| 106 |
+
datetime(2006, 12, 1): datetime(2006, 12, 20),
|
| 107 |
+
datetime(2006, 12, 15): datetime(2006, 12, 20),
|
| 108 |
+
},
|
| 109 |
+
)
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
offset_cases.append(
|
| 113 |
+
(
|
| 114 |
+
SemiMonthEnd(0),
|
| 115 |
+
{
|
| 116 |
+
datetime(2008, 1, 1): datetime(2008, 1, 15),
|
| 117 |
+
datetime(2008, 1, 16): datetime(2008, 1, 31),
|
| 118 |
+
datetime(2008, 1, 15): datetime(2008, 1, 15),
|
| 119 |
+
datetime(2008, 1, 31): datetime(2008, 1, 31),
|
| 120 |
+
datetime(2006, 12, 29): datetime(2006, 12, 31),
|
| 121 |
+
datetime(2006, 12, 31): datetime(2006, 12, 31),
|
| 122 |
+
datetime(2007, 1, 1): datetime(2007, 1, 15),
|
| 123 |
+
},
|
| 124 |
+
)
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
offset_cases.append(
|
| 128 |
+
(
|
| 129 |
+
SemiMonthEnd(0, day_of_month=16),
|
| 130 |
+
{
|
| 131 |
+
datetime(2008, 1, 1): datetime(2008, 1, 16),
|
| 132 |
+
datetime(2008, 1, 16): datetime(2008, 1, 16),
|
| 133 |
+
datetime(2008, 1, 15): datetime(2008, 1, 16),
|
| 134 |
+
datetime(2008, 1, 31): datetime(2008, 1, 31),
|
| 135 |
+
datetime(2006, 12, 29): datetime(2006, 12, 31),
|
| 136 |
+
datetime(2006, 12, 31): datetime(2006, 12, 31),
|
| 137 |
+
datetime(2007, 1, 1): datetime(2007, 1, 16),
|
| 138 |
+
},
|
| 139 |
+
)
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
offset_cases.append(
|
| 143 |
+
(
|
| 144 |
+
SemiMonthEnd(2),
|
| 145 |
+
{
|
| 146 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 147 |
+
datetime(2008, 1, 31): datetime(2008, 2, 29),
|
| 148 |
+
datetime(2006, 12, 29): datetime(2007, 1, 15),
|
| 149 |
+
datetime(2006, 12, 31): datetime(2007, 1, 31),
|
| 150 |
+
datetime(2007, 1, 1): datetime(2007, 1, 31),
|
| 151 |
+
datetime(2007, 1, 16): datetime(2007, 2, 15),
|
| 152 |
+
datetime(2006, 11, 1): datetime(2006, 11, 30),
|
| 153 |
+
},
|
| 154 |
+
)
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
offset_cases.append(
|
| 158 |
+
(
|
| 159 |
+
SemiMonthEnd(-1),
|
| 160 |
+
{
|
| 161 |
+
datetime(2007, 1, 1): datetime(2006, 12, 31),
|
| 162 |
+
datetime(2008, 6, 30): datetime(2008, 6, 15),
|
| 163 |
+
datetime(2008, 12, 31): datetime(2008, 12, 15),
|
| 164 |
+
datetime(2006, 12, 29): datetime(2006, 12, 15),
|
| 165 |
+
datetime(2006, 12, 30): datetime(2006, 12, 15),
|
| 166 |
+
datetime(2007, 1, 1): datetime(2006, 12, 31),
|
| 167 |
+
},
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
offset_cases.append(
|
| 172 |
+
(
|
| 173 |
+
SemiMonthEnd(-1, day_of_month=4),
|
| 174 |
+
{
|
| 175 |
+
datetime(2007, 1, 1): datetime(2006, 12, 31),
|
| 176 |
+
datetime(2007, 1, 4): datetime(2006, 12, 31),
|
| 177 |
+
datetime(2008, 6, 30): datetime(2008, 6, 4),
|
| 178 |
+
datetime(2008, 12, 31): datetime(2008, 12, 4),
|
| 179 |
+
datetime(2006, 12, 5): datetime(2006, 12, 4),
|
| 180 |
+
datetime(2006, 12, 30): datetime(2006, 12, 4),
|
| 181 |
+
datetime(2007, 1, 1): datetime(2006, 12, 31),
|
| 182 |
+
},
|
| 183 |
+
)
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
offset_cases.append(
|
| 187 |
+
(
|
| 188 |
+
SemiMonthEnd(-2),
|
| 189 |
+
{
|
| 190 |
+
datetime(2007, 1, 1): datetime(2006, 12, 15),
|
| 191 |
+
datetime(2008, 6, 30): datetime(2008, 5, 31),
|
| 192 |
+
datetime(2008, 3, 15): datetime(2008, 2, 15),
|
| 193 |
+
datetime(2008, 12, 31): datetime(2008, 11, 30),
|
| 194 |
+
datetime(2006, 12, 29): datetime(2006, 11, 30),
|
| 195 |
+
datetime(2006, 12, 14): datetime(2006, 11, 15),
|
| 196 |
+
datetime(2007, 1, 1): datetime(2006, 12, 15),
|
| 197 |
+
},
|
| 198 |
+
)
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 202 |
+
def test_offset(self, case):
|
| 203 |
+
offset, cases = case
|
| 204 |
+
for base, expected in cases.items():
|
| 205 |
+
assert_offset_equal(offset, base, expected)
|
| 206 |
+
|
| 207 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 208 |
+
def test_apply_index(self, case):
|
| 209 |
+
# https://github.com/pandas-dev/pandas/issues/34580
|
| 210 |
+
offset, cases = case
|
| 211 |
+
shift = DatetimeIndex(cases.keys())
|
| 212 |
+
exp = DatetimeIndex(cases.values())
|
| 213 |
+
|
| 214 |
+
with tm.assert_produces_warning(None):
|
| 215 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 216 |
+
# an integer array to PeriodIndex
|
| 217 |
+
result = offset + shift
|
| 218 |
+
tm.assert_index_equal(result, exp)
|
| 219 |
+
|
| 220 |
+
on_offset_cases = [
|
| 221 |
+
(datetime(2007, 12, 31), True),
|
| 222 |
+
(datetime(2007, 12, 15), True),
|
| 223 |
+
(datetime(2007, 12, 14), False),
|
| 224 |
+
(datetime(2007, 12, 1), False),
|
| 225 |
+
(datetime(2008, 2, 29), True),
|
| 226 |
+
]
|
| 227 |
+
|
| 228 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 229 |
+
def test_is_on_offset(self, case):
|
| 230 |
+
dt, expected = case
|
| 231 |
+
assert_is_on_offset(SemiMonthEnd(), dt, expected)
|
| 232 |
+
|
| 233 |
+
@pytest.mark.parametrize("klass", [Series, DatetimeIndex])
|
| 234 |
+
def test_vectorized_offset_addition(self, klass):
|
| 235 |
+
shift = klass(
|
| 236 |
+
[
|
| 237 |
+
Timestamp("2000-01-15 00:15:00", tz="US/Central"),
|
| 238 |
+
Timestamp("2000-02-15", tz="US/Central"),
|
| 239 |
+
],
|
| 240 |
+
name="a",
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
with tm.assert_produces_warning(None):
|
| 244 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 245 |
+
# an integer array to PeriodIndex
|
| 246 |
+
result = shift + SemiMonthEnd()
|
| 247 |
+
result2 = SemiMonthEnd() + shift
|
| 248 |
+
|
| 249 |
+
exp = klass(
|
| 250 |
+
[
|
| 251 |
+
Timestamp("2000-01-31 00:15:00", tz="US/Central"),
|
| 252 |
+
Timestamp("2000-02-29", tz="US/Central"),
|
| 253 |
+
],
|
| 254 |
+
name="a",
|
| 255 |
+
)
|
| 256 |
+
tm.assert_equal(result, exp)
|
| 257 |
+
tm.assert_equal(result2, exp)
|
| 258 |
+
|
| 259 |
+
shift = klass(
|
| 260 |
+
[
|
| 261 |
+
Timestamp("2000-01-01 00:15:00", tz="US/Central"),
|
| 262 |
+
Timestamp("2000-02-01", tz="US/Central"),
|
| 263 |
+
],
|
| 264 |
+
name="a",
|
| 265 |
+
)
|
| 266 |
+
|
| 267 |
+
with tm.assert_produces_warning(None):
|
| 268 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 269 |
+
# an integer array to PeriodIndex
|
| 270 |
+
result = shift + SemiMonthEnd()
|
| 271 |
+
result2 = SemiMonthEnd() + shift
|
| 272 |
+
|
| 273 |
+
exp = klass(
|
| 274 |
+
[
|
| 275 |
+
Timestamp("2000-01-15 00:15:00", tz="US/Central"),
|
| 276 |
+
Timestamp("2000-02-15", tz="US/Central"),
|
| 277 |
+
],
|
| 278 |
+
name="a",
|
| 279 |
+
)
|
| 280 |
+
tm.assert_equal(result, exp)
|
| 281 |
+
tm.assert_equal(result2, exp)
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
class TestSemiMonthBegin:
|
| 285 |
+
def test_offset_whole_year(self):
|
| 286 |
+
dates = (
|
| 287 |
+
datetime(2007, 12, 15),
|
| 288 |
+
datetime(2008, 1, 1),
|
| 289 |
+
datetime(2008, 1, 15),
|
| 290 |
+
datetime(2008, 2, 1),
|
| 291 |
+
datetime(2008, 2, 15),
|
| 292 |
+
datetime(2008, 3, 1),
|
| 293 |
+
datetime(2008, 3, 15),
|
| 294 |
+
datetime(2008, 4, 1),
|
| 295 |
+
datetime(2008, 4, 15),
|
| 296 |
+
datetime(2008, 5, 1),
|
| 297 |
+
datetime(2008, 5, 15),
|
| 298 |
+
datetime(2008, 6, 1),
|
| 299 |
+
datetime(2008, 6, 15),
|
| 300 |
+
datetime(2008, 7, 1),
|
| 301 |
+
datetime(2008, 7, 15),
|
| 302 |
+
datetime(2008, 8, 1),
|
| 303 |
+
datetime(2008, 8, 15),
|
| 304 |
+
datetime(2008, 9, 1),
|
| 305 |
+
datetime(2008, 9, 15),
|
| 306 |
+
datetime(2008, 10, 1),
|
| 307 |
+
datetime(2008, 10, 15),
|
| 308 |
+
datetime(2008, 11, 1),
|
| 309 |
+
datetime(2008, 11, 15),
|
| 310 |
+
datetime(2008, 12, 1),
|
| 311 |
+
datetime(2008, 12, 15),
|
| 312 |
+
)
|
| 313 |
+
|
| 314 |
+
for base, exp_date in zip(dates[:-1], dates[1:]):
|
| 315 |
+
assert_offset_equal(SemiMonthBegin(), base, exp_date)
|
| 316 |
+
|
| 317 |
+
# ensure .apply_index works as expected
|
| 318 |
+
shift = DatetimeIndex(dates[:-1])
|
| 319 |
+
with tm.assert_produces_warning(None):
|
| 320 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 321 |
+
# an integer array to PeriodIndex
|
| 322 |
+
result = SemiMonthBegin() + shift
|
| 323 |
+
|
| 324 |
+
exp = DatetimeIndex(dates[1:])
|
| 325 |
+
tm.assert_index_equal(result, exp)
|
| 326 |
+
|
| 327 |
+
offset_cases = [
|
| 328 |
+
(
|
| 329 |
+
SemiMonthBegin(),
|
| 330 |
+
{
|
| 331 |
+
datetime(2008, 1, 1): datetime(2008, 1, 15),
|
| 332 |
+
datetime(2008, 1, 15): datetime(2008, 2, 1),
|
| 333 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 334 |
+
datetime(2006, 12, 14): datetime(2006, 12, 15),
|
| 335 |
+
datetime(2006, 12, 29): datetime(2007, 1, 1),
|
| 336 |
+
datetime(2006, 12, 31): datetime(2007, 1, 1),
|
| 337 |
+
datetime(2007, 1, 1): datetime(2007, 1, 15),
|
| 338 |
+
datetime(2006, 12, 1): datetime(2006, 12, 15),
|
| 339 |
+
datetime(2006, 12, 15): datetime(2007, 1, 1),
|
| 340 |
+
},
|
| 341 |
+
),
|
| 342 |
+
(
|
| 343 |
+
SemiMonthBegin(day_of_month=20),
|
| 344 |
+
{
|
| 345 |
+
datetime(2008, 1, 1): datetime(2008, 1, 20),
|
| 346 |
+
datetime(2008, 1, 15): datetime(2008, 1, 20),
|
| 347 |
+
datetime(2008, 1, 21): datetime(2008, 2, 1),
|
| 348 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 349 |
+
datetime(2006, 12, 14): datetime(2006, 12, 20),
|
| 350 |
+
datetime(2006, 12, 29): datetime(2007, 1, 1),
|
| 351 |
+
datetime(2006, 12, 31): datetime(2007, 1, 1),
|
| 352 |
+
datetime(2007, 1, 1): datetime(2007, 1, 20),
|
| 353 |
+
datetime(2006, 12, 1): datetime(2006, 12, 20),
|
| 354 |
+
datetime(2006, 12, 15): datetime(2006, 12, 20),
|
| 355 |
+
},
|
| 356 |
+
),
|
| 357 |
+
(
|
| 358 |
+
SemiMonthBegin(0),
|
| 359 |
+
{
|
| 360 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 361 |
+
datetime(2008, 1, 16): datetime(2008, 2, 1),
|
| 362 |
+
datetime(2008, 1, 15): datetime(2008, 1, 15),
|
| 363 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 364 |
+
datetime(2006, 12, 29): datetime(2007, 1, 1),
|
| 365 |
+
datetime(2006, 12, 2): datetime(2006, 12, 15),
|
| 366 |
+
datetime(2007, 1, 1): datetime(2007, 1, 1),
|
| 367 |
+
},
|
| 368 |
+
),
|
| 369 |
+
(
|
| 370 |
+
SemiMonthBegin(0, day_of_month=16),
|
| 371 |
+
{
|
| 372 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 373 |
+
datetime(2008, 1, 16): datetime(2008, 1, 16),
|
| 374 |
+
datetime(2008, 1, 15): datetime(2008, 1, 16),
|
| 375 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 376 |
+
datetime(2006, 12, 29): datetime(2007, 1, 1),
|
| 377 |
+
datetime(2006, 12, 31): datetime(2007, 1, 1),
|
| 378 |
+
datetime(2007, 1, 5): datetime(2007, 1, 16),
|
| 379 |
+
datetime(2007, 1, 1): datetime(2007, 1, 1),
|
| 380 |
+
},
|
| 381 |
+
),
|
| 382 |
+
(
|
| 383 |
+
SemiMonthBegin(2),
|
| 384 |
+
{
|
| 385 |
+
datetime(2008, 1, 1): datetime(2008, 2, 1),
|
| 386 |
+
datetime(2008, 1, 31): datetime(2008, 2, 15),
|
| 387 |
+
datetime(2006, 12, 1): datetime(2007, 1, 1),
|
| 388 |
+
datetime(2006, 12, 29): datetime(2007, 1, 15),
|
| 389 |
+
datetime(2006, 12, 15): datetime(2007, 1, 15),
|
| 390 |
+
datetime(2007, 1, 1): datetime(2007, 2, 1),
|
| 391 |
+
datetime(2007, 1, 16): datetime(2007, 2, 15),
|
| 392 |
+
datetime(2006, 11, 1): datetime(2006, 12, 1),
|
| 393 |
+
},
|
| 394 |
+
),
|
| 395 |
+
(
|
| 396 |
+
SemiMonthBegin(-1),
|
| 397 |
+
{
|
| 398 |
+
datetime(2007, 1, 1): datetime(2006, 12, 15),
|
| 399 |
+
datetime(2008, 6, 30): datetime(2008, 6, 15),
|
| 400 |
+
datetime(2008, 6, 14): datetime(2008, 6, 1),
|
| 401 |
+
datetime(2008, 12, 31): datetime(2008, 12, 15),
|
| 402 |
+
datetime(2006, 12, 29): datetime(2006, 12, 15),
|
| 403 |
+
datetime(2006, 12, 15): datetime(2006, 12, 1),
|
| 404 |
+
datetime(2007, 1, 1): datetime(2006, 12, 15),
|
| 405 |
+
},
|
| 406 |
+
),
|
| 407 |
+
(
|
| 408 |
+
SemiMonthBegin(-1, day_of_month=4),
|
| 409 |
+
{
|
| 410 |
+
datetime(2007, 1, 1): datetime(2006, 12, 4),
|
| 411 |
+
datetime(2007, 1, 4): datetime(2007, 1, 1),
|
| 412 |
+
datetime(2008, 6, 30): datetime(2008, 6, 4),
|
| 413 |
+
datetime(2008, 12, 31): datetime(2008, 12, 4),
|
| 414 |
+
datetime(2006, 12, 5): datetime(2006, 12, 4),
|
| 415 |
+
datetime(2006, 12, 30): datetime(2006, 12, 4),
|
| 416 |
+
datetime(2006, 12, 2): datetime(2006, 12, 1),
|
| 417 |
+
datetime(2007, 1, 1): datetime(2006, 12, 4),
|
| 418 |
+
},
|
| 419 |
+
),
|
| 420 |
+
(
|
| 421 |
+
SemiMonthBegin(-2),
|
| 422 |
+
{
|
| 423 |
+
datetime(2007, 1, 1): datetime(2006, 12, 1),
|
| 424 |
+
datetime(2008, 6, 30): datetime(2008, 6, 1),
|
| 425 |
+
datetime(2008, 6, 14): datetime(2008, 5, 15),
|
| 426 |
+
datetime(2008, 12, 31): datetime(2008, 12, 1),
|
| 427 |
+
datetime(2006, 12, 29): datetime(2006, 12, 1),
|
| 428 |
+
datetime(2006, 12, 15): datetime(2006, 11, 15),
|
| 429 |
+
datetime(2007, 1, 1): datetime(2006, 12, 1),
|
| 430 |
+
},
|
| 431 |
+
),
|
| 432 |
+
]
|
| 433 |
+
|
| 434 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 435 |
+
def test_offset(self, case):
|
| 436 |
+
offset, cases = case
|
| 437 |
+
for base, expected in cases.items():
|
| 438 |
+
assert_offset_equal(offset, base, expected)
|
| 439 |
+
|
| 440 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 441 |
+
def test_apply_index(self, case):
|
| 442 |
+
offset, cases = case
|
| 443 |
+
shift = DatetimeIndex(cases.keys())
|
| 444 |
+
|
| 445 |
+
with tm.assert_produces_warning(None):
|
| 446 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 447 |
+
# an integer array to PeriodIndex
|
| 448 |
+
result = offset + shift
|
| 449 |
+
|
| 450 |
+
exp = DatetimeIndex(cases.values())
|
| 451 |
+
tm.assert_index_equal(result, exp)
|
| 452 |
+
|
| 453 |
+
on_offset_cases = [
|
| 454 |
+
(datetime(2007, 12, 1), True),
|
| 455 |
+
(datetime(2007, 12, 15), True),
|
| 456 |
+
(datetime(2007, 12, 14), False),
|
| 457 |
+
(datetime(2007, 12, 31), False),
|
| 458 |
+
(datetime(2008, 2, 15), True),
|
| 459 |
+
]
|
| 460 |
+
|
| 461 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 462 |
+
def test_is_on_offset(self, case):
|
| 463 |
+
dt, expected = case
|
| 464 |
+
assert_is_on_offset(SemiMonthBegin(), dt, expected)
|
| 465 |
+
|
| 466 |
+
@pytest.mark.parametrize("klass", [Series, DatetimeIndex])
|
| 467 |
+
def test_vectorized_offset_addition(self, klass):
|
| 468 |
+
shift = klass(
|
| 469 |
+
[
|
| 470 |
+
Timestamp("2000-01-15 00:15:00", tz="US/Central"),
|
| 471 |
+
Timestamp("2000-02-15", tz="US/Central"),
|
| 472 |
+
],
|
| 473 |
+
name="a",
|
| 474 |
+
)
|
| 475 |
+
with tm.assert_produces_warning(None):
|
| 476 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 477 |
+
# an integer array to PeriodIndex
|
| 478 |
+
result = shift + SemiMonthBegin()
|
| 479 |
+
result2 = SemiMonthBegin() + shift
|
| 480 |
+
|
| 481 |
+
exp = klass(
|
| 482 |
+
[
|
| 483 |
+
Timestamp("2000-02-01 00:15:00", tz="US/Central"),
|
| 484 |
+
Timestamp("2000-03-01", tz="US/Central"),
|
| 485 |
+
],
|
| 486 |
+
name="a",
|
| 487 |
+
)
|
| 488 |
+
tm.assert_equal(result, exp)
|
| 489 |
+
tm.assert_equal(result2, exp)
|
| 490 |
+
|
| 491 |
+
shift = klass(
|
| 492 |
+
[
|
| 493 |
+
Timestamp("2000-01-01 00:15:00", tz="US/Central"),
|
| 494 |
+
Timestamp("2000-02-01", tz="US/Central"),
|
| 495 |
+
],
|
| 496 |
+
name="a",
|
| 497 |
+
)
|
| 498 |
+
with tm.assert_produces_warning(None):
|
| 499 |
+
# GH#22535 check that we don't get a FutureWarning from adding
|
| 500 |
+
# an integer array to PeriodIndex
|
| 501 |
+
result = shift + SemiMonthBegin()
|
| 502 |
+
result2 = SemiMonthBegin() + shift
|
| 503 |
+
|
| 504 |
+
exp = klass(
|
| 505 |
+
[
|
| 506 |
+
Timestamp("2000-01-15 00:15:00", tz="US/Central"),
|
| 507 |
+
Timestamp("2000-02-15", tz="US/Central"),
|
| 508 |
+
],
|
| 509 |
+
name="a",
|
| 510 |
+
)
|
| 511 |
+
tm.assert_equal(result, exp)
|
| 512 |
+
tm.assert_equal(result2, exp)
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
class TestMonthBegin:
|
| 516 |
+
offset_cases = []
|
| 517 |
+
# NOTE: I'm not entirely happy with the logic here for Begin -ss
|
| 518 |
+
# see thread 'offset conventions' on the ML
|
| 519 |
+
offset_cases.append(
|
| 520 |
+
(
|
| 521 |
+
MonthBegin(),
|
| 522 |
+
{
|
| 523 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 524 |
+
datetime(2008, 2, 1): datetime(2008, 3, 1),
|
| 525 |
+
datetime(2006, 12, 31): datetime(2007, 1, 1),
|
| 526 |
+
datetime(2006, 12, 1): datetime(2007, 1, 1),
|
| 527 |
+
datetime(2007, 1, 31): datetime(2007, 2, 1),
|
| 528 |
+
},
|
| 529 |
+
)
|
| 530 |
+
)
|
| 531 |
+
|
| 532 |
+
offset_cases.append(
|
| 533 |
+
(
|
| 534 |
+
MonthBegin(0),
|
| 535 |
+
{
|
| 536 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 537 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 538 |
+
datetime(2006, 12, 3): datetime(2007, 1, 1),
|
| 539 |
+
datetime(2007, 1, 31): datetime(2007, 2, 1),
|
| 540 |
+
},
|
| 541 |
+
)
|
| 542 |
+
)
|
| 543 |
+
|
| 544 |
+
offset_cases.append(
|
| 545 |
+
(
|
| 546 |
+
MonthBegin(2),
|
| 547 |
+
{
|
| 548 |
+
datetime(2008, 2, 29): datetime(2008, 4, 1),
|
| 549 |
+
datetime(2008, 1, 31): datetime(2008, 3, 1),
|
| 550 |
+
datetime(2006, 12, 31): datetime(2007, 2, 1),
|
| 551 |
+
datetime(2007, 12, 28): datetime(2008, 2, 1),
|
| 552 |
+
datetime(2007, 1, 1): datetime(2007, 3, 1),
|
| 553 |
+
datetime(2006, 11, 1): datetime(2007, 1, 1),
|
| 554 |
+
},
|
| 555 |
+
)
|
| 556 |
+
)
|
| 557 |
+
|
| 558 |
+
offset_cases.append(
|
| 559 |
+
(
|
| 560 |
+
MonthBegin(-1),
|
| 561 |
+
{
|
| 562 |
+
datetime(2007, 1, 1): datetime(2006, 12, 1),
|
| 563 |
+
datetime(2008, 5, 31): datetime(2008, 5, 1),
|
| 564 |
+
datetime(2008, 12, 31): datetime(2008, 12, 1),
|
| 565 |
+
datetime(2006, 12, 29): datetime(2006, 12, 1),
|
| 566 |
+
datetime(2006, 1, 2): datetime(2006, 1, 1),
|
| 567 |
+
},
|
| 568 |
+
)
|
| 569 |
+
)
|
| 570 |
+
|
| 571 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 572 |
+
def test_offset(self, case):
|
| 573 |
+
offset, cases = case
|
| 574 |
+
for base, expected in cases.items():
|
| 575 |
+
assert_offset_equal(offset, base, expected)
|
| 576 |
+
|
| 577 |
+
|
| 578 |
+
class TestMonthEnd:
|
| 579 |
+
def test_day_of_month(self):
|
| 580 |
+
dt = datetime(2007, 1, 1)
|
| 581 |
+
offset = MonthEnd()
|
| 582 |
+
|
| 583 |
+
result = dt + offset
|
| 584 |
+
assert result == Timestamp(2007, 1, 31)
|
| 585 |
+
|
| 586 |
+
result = result + offset
|
| 587 |
+
assert result == Timestamp(2007, 2, 28)
|
| 588 |
+
|
| 589 |
+
def test_normalize(self):
|
| 590 |
+
dt = datetime(2007, 1, 1, 3)
|
| 591 |
+
|
| 592 |
+
result = dt + MonthEnd(normalize=True)
|
| 593 |
+
expected = dt.replace(hour=0) + MonthEnd()
|
| 594 |
+
assert result == expected
|
| 595 |
+
|
| 596 |
+
offset_cases = []
|
| 597 |
+
offset_cases.append(
|
| 598 |
+
(
|
| 599 |
+
MonthEnd(),
|
| 600 |
+
{
|
| 601 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 602 |
+
datetime(2008, 1, 31): datetime(2008, 2, 29),
|
| 603 |
+
datetime(2006, 12, 29): datetime(2006, 12, 31),
|
| 604 |
+
datetime(2006, 12, 31): datetime(2007, 1, 31),
|
| 605 |
+
datetime(2007, 1, 1): datetime(2007, 1, 31),
|
| 606 |
+
datetime(2006, 12, 1): datetime(2006, 12, 31),
|
| 607 |
+
},
|
| 608 |
+
)
|
| 609 |
+
)
|
| 610 |
+
|
| 611 |
+
offset_cases.append(
|
| 612 |
+
(
|
| 613 |
+
MonthEnd(0),
|
| 614 |
+
{
|
| 615 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 616 |
+
datetime(2008, 1, 31): datetime(2008, 1, 31),
|
| 617 |
+
datetime(2006, 12, 29): datetime(2006, 12, 31),
|
| 618 |
+
datetime(2006, 12, 31): datetime(2006, 12, 31),
|
| 619 |
+
datetime(2007, 1, 1): datetime(2007, 1, 31),
|
| 620 |
+
},
|
| 621 |
+
)
|
| 622 |
+
)
|
| 623 |
+
|
| 624 |
+
offset_cases.append(
|
| 625 |
+
(
|
| 626 |
+
MonthEnd(2),
|
| 627 |
+
{
|
| 628 |
+
datetime(2008, 1, 1): datetime(2008, 2, 29),
|
| 629 |
+
datetime(2008, 1, 31): datetime(2008, 3, 31),
|
| 630 |
+
datetime(2006, 12, 29): datetime(2007, 1, 31),
|
| 631 |
+
datetime(2006, 12, 31): datetime(2007, 2, 28),
|
| 632 |
+
datetime(2007, 1, 1): datetime(2007, 2, 28),
|
| 633 |
+
datetime(2006, 11, 1): datetime(2006, 12, 31),
|
| 634 |
+
},
|
| 635 |
+
)
|
| 636 |
+
)
|
| 637 |
+
|
| 638 |
+
offset_cases.append(
|
| 639 |
+
(
|
| 640 |
+
MonthEnd(-1),
|
| 641 |
+
{
|
| 642 |
+
datetime(2007, 1, 1): datetime(2006, 12, 31),
|
| 643 |
+
datetime(2008, 6, 30): datetime(2008, 5, 31),
|
| 644 |
+
datetime(2008, 12, 31): datetime(2008, 11, 30),
|
| 645 |
+
datetime(2006, 12, 29): datetime(2006, 11, 30),
|
| 646 |
+
datetime(2006, 12, 30): datetime(2006, 11, 30),
|
| 647 |
+
datetime(2007, 1, 1): datetime(2006, 12, 31),
|
| 648 |
+
},
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
|
| 652 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 653 |
+
def test_offset(self, case):
|
| 654 |
+
offset, cases = case
|
| 655 |
+
for base, expected in cases.items():
|
| 656 |
+
assert_offset_equal(offset, base, expected)
|
| 657 |
+
|
| 658 |
+
on_offset_cases = [
|
| 659 |
+
(MonthEnd(), datetime(2007, 12, 31), True),
|
| 660 |
+
(MonthEnd(), datetime(2008, 1, 1), False),
|
| 661 |
+
]
|
| 662 |
+
|
| 663 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 664 |
+
def test_is_on_offset(self, case):
|
| 665 |
+
offset, dt, expected = case
|
| 666 |
+
assert_is_on_offset(offset, dt, expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_offsets.py
ADDED
|
@@ -0,0 +1,1185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests of pandas.tseries.offsets
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from datetime import (
|
| 7 |
+
datetime,
|
| 8 |
+
timedelta,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
import numpy as np
|
| 12 |
+
import pytest
|
| 13 |
+
|
| 14 |
+
from pandas._libs.tslibs import (
|
| 15 |
+
NaT,
|
| 16 |
+
Timedelta,
|
| 17 |
+
Timestamp,
|
| 18 |
+
conversion,
|
| 19 |
+
timezones,
|
| 20 |
+
)
|
| 21 |
+
import pandas._libs.tslibs.offsets as liboffsets
|
| 22 |
+
from pandas._libs.tslibs.offsets import (
|
| 23 |
+
_get_offset,
|
| 24 |
+
_offset_map,
|
| 25 |
+
to_offset,
|
| 26 |
+
)
|
| 27 |
+
from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
|
| 28 |
+
from pandas.errors import PerformanceWarning
|
| 29 |
+
|
| 30 |
+
from pandas import (
|
| 31 |
+
DataFrame,
|
| 32 |
+
DatetimeIndex,
|
| 33 |
+
Series,
|
| 34 |
+
date_range,
|
| 35 |
+
)
|
| 36 |
+
import pandas._testing as tm
|
| 37 |
+
from pandas.tests.tseries.offsets.common import WeekDay
|
| 38 |
+
|
| 39 |
+
from pandas.tseries import offsets
|
| 40 |
+
from pandas.tseries.offsets import (
|
| 41 |
+
FY5253,
|
| 42 |
+
BDay,
|
| 43 |
+
BMonthEnd,
|
| 44 |
+
BusinessHour,
|
| 45 |
+
CustomBusinessDay,
|
| 46 |
+
CustomBusinessHour,
|
| 47 |
+
CustomBusinessMonthBegin,
|
| 48 |
+
CustomBusinessMonthEnd,
|
| 49 |
+
DateOffset,
|
| 50 |
+
Easter,
|
| 51 |
+
FY5253Quarter,
|
| 52 |
+
LastWeekOfMonth,
|
| 53 |
+
MonthBegin,
|
| 54 |
+
Nano,
|
| 55 |
+
Tick,
|
| 56 |
+
Week,
|
| 57 |
+
WeekOfMonth,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
_ARITHMETIC_DATE_OFFSET = [
|
| 61 |
+
"years",
|
| 62 |
+
"months",
|
| 63 |
+
"weeks",
|
| 64 |
+
"days",
|
| 65 |
+
"hours",
|
| 66 |
+
"minutes",
|
| 67 |
+
"seconds",
|
| 68 |
+
"milliseconds",
|
| 69 |
+
"microseconds",
|
| 70 |
+
]
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def _create_offset(klass, value=1, normalize=False):
|
| 74 |
+
# create instance from offset class
|
| 75 |
+
if klass is FY5253:
|
| 76 |
+
klass = klass(
|
| 77 |
+
n=value,
|
| 78 |
+
startingMonth=1,
|
| 79 |
+
weekday=1,
|
| 80 |
+
variation="last",
|
| 81 |
+
normalize=normalize,
|
| 82 |
+
)
|
| 83 |
+
elif klass is FY5253Quarter:
|
| 84 |
+
klass = klass(
|
| 85 |
+
n=value,
|
| 86 |
+
startingMonth=1,
|
| 87 |
+
weekday=1,
|
| 88 |
+
qtr_with_extra_week=1,
|
| 89 |
+
variation="last",
|
| 90 |
+
normalize=normalize,
|
| 91 |
+
)
|
| 92 |
+
elif klass is LastWeekOfMonth:
|
| 93 |
+
klass = klass(n=value, weekday=5, normalize=normalize)
|
| 94 |
+
elif klass is WeekOfMonth:
|
| 95 |
+
klass = klass(n=value, week=1, weekday=5, normalize=normalize)
|
| 96 |
+
elif klass is Week:
|
| 97 |
+
klass = klass(n=value, weekday=5, normalize=normalize)
|
| 98 |
+
elif klass is DateOffset:
|
| 99 |
+
klass = klass(days=value, normalize=normalize)
|
| 100 |
+
else:
|
| 101 |
+
klass = klass(value, normalize=normalize)
|
| 102 |
+
return klass
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@pytest.fixture(
|
| 106 |
+
params=[
|
| 107 |
+
getattr(offsets, o)
|
| 108 |
+
for o in offsets.__all__
|
| 109 |
+
if issubclass(getattr(offsets, o), liboffsets.MonthOffset)
|
| 110 |
+
and o != "MonthOffset"
|
| 111 |
+
]
|
| 112 |
+
)
|
| 113 |
+
def month_classes(request):
|
| 114 |
+
"""
|
| 115 |
+
Fixture for month based datetime offsets available for a time series.
|
| 116 |
+
"""
|
| 117 |
+
return request.param
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@pytest.fixture(
|
| 121 |
+
params=[
|
| 122 |
+
getattr(offsets, o) for o in offsets.__all__ if o not in ("Tick", "BaseOffset")
|
| 123 |
+
]
|
| 124 |
+
)
|
| 125 |
+
def offset_types(request):
|
| 126 |
+
"""
|
| 127 |
+
Fixture for all the datetime offsets available for a time series.
|
| 128 |
+
"""
|
| 129 |
+
return request.param
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
@pytest.fixture
|
| 133 |
+
def dt():
|
| 134 |
+
return Timestamp(datetime(2008, 1, 2))
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
@pytest.fixture
|
| 138 |
+
def expecteds():
|
| 139 |
+
# executed value created by _create_offset
|
| 140 |
+
# are applied to 2011/01/01 09:00 (Saturday)
|
| 141 |
+
# used for .apply and .rollforward
|
| 142 |
+
return {
|
| 143 |
+
"Day": Timestamp("2011-01-02 09:00:00"),
|
| 144 |
+
"DateOffset": Timestamp("2011-01-02 09:00:00"),
|
| 145 |
+
"BusinessDay": Timestamp("2011-01-03 09:00:00"),
|
| 146 |
+
"CustomBusinessDay": Timestamp("2011-01-03 09:00:00"),
|
| 147 |
+
"CustomBusinessMonthEnd": Timestamp("2011-01-31 09:00:00"),
|
| 148 |
+
"CustomBusinessMonthBegin": Timestamp("2011-01-03 09:00:00"),
|
| 149 |
+
"MonthBegin": Timestamp("2011-02-01 09:00:00"),
|
| 150 |
+
"BusinessMonthBegin": Timestamp("2011-01-03 09:00:00"),
|
| 151 |
+
"MonthEnd": Timestamp("2011-01-31 09:00:00"),
|
| 152 |
+
"SemiMonthEnd": Timestamp("2011-01-15 09:00:00"),
|
| 153 |
+
"SemiMonthBegin": Timestamp("2011-01-15 09:00:00"),
|
| 154 |
+
"BusinessMonthEnd": Timestamp("2011-01-31 09:00:00"),
|
| 155 |
+
"YearBegin": Timestamp("2012-01-01 09:00:00"),
|
| 156 |
+
"BYearBegin": Timestamp("2011-01-03 09:00:00"),
|
| 157 |
+
"YearEnd": Timestamp("2011-12-31 09:00:00"),
|
| 158 |
+
"BYearEnd": Timestamp("2011-12-30 09:00:00"),
|
| 159 |
+
"QuarterBegin": Timestamp("2011-03-01 09:00:00"),
|
| 160 |
+
"BQuarterBegin": Timestamp("2011-03-01 09:00:00"),
|
| 161 |
+
"QuarterEnd": Timestamp("2011-03-31 09:00:00"),
|
| 162 |
+
"BQuarterEnd": Timestamp("2011-03-31 09:00:00"),
|
| 163 |
+
"BusinessHour": Timestamp("2011-01-03 10:00:00"),
|
| 164 |
+
"CustomBusinessHour": Timestamp("2011-01-03 10:00:00"),
|
| 165 |
+
"WeekOfMonth": Timestamp("2011-01-08 09:00:00"),
|
| 166 |
+
"LastWeekOfMonth": Timestamp("2011-01-29 09:00:00"),
|
| 167 |
+
"FY5253Quarter": Timestamp("2011-01-25 09:00:00"),
|
| 168 |
+
"FY5253": Timestamp("2011-01-25 09:00:00"),
|
| 169 |
+
"Week": Timestamp("2011-01-08 09:00:00"),
|
| 170 |
+
"Easter": Timestamp("2011-04-24 09:00:00"),
|
| 171 |
+
"Hour": Timestamp("2011-01-01 10:00:00"),
|
| 172 |
+
"Minute": Timestamp("2011-01-01 09:01:00"),
|
| 173 |
+
"Second": Timestamp("2011-01-01 09:00:01"),
|
| 174 |
+
"Milli": Timestamp("2011-01-01 09:00:00.001000"),
|
| 175 |
+
"Micro": Timestamp("2011-01-01 09:00:00.000001"),
|
| 176 |
+
"Nano": Timestamp("2011-01-01T09:00:00.000000001"),
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class TestCommon:
|
| 181 |
+
def test_immutable(self, offset_types):
|
| 182 |
+
# GH#21341 check that __setattr__ raises
|
| 183 |
+
offset = _create_offset(offset_types)
|
| 184 |
+
msg = "objects is not writable|DateOffset objects are immutable"
|
| 185 |
+
with pytest.raises(AttributeError, match=msg):
|
| 186 |
+
offset.normalize = True
|
| 187 |
+
with pytest.raises(AttributeError, match=msg):
|
| 188 |
+
offset.n = 91
|
| 189 |
+
|
| 190 |
+
def test_return_type(self, offset_types):
|
| 191 |
+
offset = _create_offset(offset_types)
|
| 192 |
+
|
| 193 |
+
# make sure that we are returning a Timestamp
|
| 194 |
+
result = Timestamp("20080101") + offset
|
| 195 |
+
assert isinstance(result, Timestamp)
|
| 196 |
+
|
| 197 |
+
# make sure that we are returning NaT
|
| 198 |
+
assert NaT + offset is NaT
|
| 199 |
+
assert offset + NaT is NaT
|
| 200 |
+
|
| 201 |
+
assert NaT - offset is NaT
|
| 202 |
+
assert (-offset)._apply(NaT) is NaT
|
| 203 |
+
|
| 204 |
+
def test_offset_n(self, offset_types):
|
| 205 |
+
offset = _create_offset(offset_types)
|
| 206 |
+
assert offset.n == 1
|
| 207 |
+
|
| 208 |
+
neg_offset = offset * -1
|
| 209 |
+
assert neg_offset.n == -1
|
| 210 |
+
|
| 211 |
+
mul_offset = offset * 3
|
| 212 |
+
assert mul_offset.n == 3
|
| 213 |
+
|
| 214 |
+
def test_offset_timedelta64_arg(self, offset_types):
|
| 215 |
+
# check that offset._validate_n raises TypeError on a timedelt64
|
| 216 |
+
# object
|
| 217 |
+
off = _create_offset(offset_types)
|
| 218 |
+
|
| 219 |
+
td64 = np.timedelta64(4567, "s")
|
| 220 |
+
with pytest.raises(TypeError, match="argument must be an integer"):
|
| 221 |
+
type(off)(n=td64, **off.kwds)
|
| 222 |
+
|
| 223 |
+
def test_offset_mul_ndarray(self, offset_types):
|
| 224 |
+
off = _create_offset(offset_types)
|
| 225 |
+
|
| 226 |
+
expected = np.array([[off, off * 2], [off * 3, off * 4]])
|
| 227 |
+
|
| 228 |
+
result = np.array([[1, 2], [3, 4]]) * off
|
| 229 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 230 |
+
|
| 231 |
+
result = off * np.array([[1, 2], [3, 4]])
|
| 232 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 233 |
+
|
| 234 |
+
def test_offset_freqstr(self, offset_types):
|
| 235 |
+
offset = _create_offset(offset_types)
|
| 236 |
+
|
| 237 |
+
freqstr = offset.freqstr
|
| 238 |
+
if freqstr not in ("<Easter>", "<DateOffset: days=1>", "LWOM-SAT"):
|
| 239 |
+
code = _get_offset(freqstr)
|
| 240 |
+
assert offset.rule_code == code
|
| 241 |
+
|
| 242 |
+
def _check_offsetfunc_works(self, offset, funcname, dt, expected, normalize=False):
|
| 243 |
+
if normalize and issubclass(offset, Tick):
|
| 244 |
+
# normalize=True disallowed for Tick subclasses GH#21427
|
| 245 |
+
return
|
| 246 |
+
|
| 247 |
+
offset_s = _create_offset(offset, normalize=normalize)
|
| 248 |
+
func = getattr(offset_s, funcname)
|
| 249 |
+
|
| 250 |
+
result = func(dt)
|
| 251 |
+
assert isinstance(result, Timestamp)
|
| 252 |
+
assert result == expected
|
| 253 |
+
|
| 254 |
+
result = func(Timestamp(dt))
|
| 255 |
+
assert isinstance(result, Timestamp)
|
| 256 |
+
assert result == expected
|
| 257 |
+
|
| 258 |
+
# see gh-14101
|
| 259 |
+
ts = Timestamp(dt) + Nano(5)
|
| 260 |
+
# test nanosecond is preserved
|
| 261 |
+
with tm.assert_produces_warning(None):
|
| 262 |
+
result = func(ts)
|
| 263 |
+
|
| 264 |
+
assert isinstance(result, Timestamp)
|
| 265 |
+
if normalize is False:
|
| 266 |
+
assert result == expected + Nano(5)
|
| 267 |
+
else:
|
| 268 |
+
assert result == expected
|
| 269 |
+
|
| 270 |
+
if isinstance(dt, np.datetime64):
|
| 271 |
+
# test tz when input is datetime or Timestamp
|
| 272 |
+
return
|
| 273 |
+
|
| 274 |
+
for tz in [
|
| 275 |
+
None,
|
| 276 |
+
"UTC",
|
| 277 |
+
"Asia/Tokyo",
|
| 278 |
+
"US/Eastern",
|
| 279 |
+
"dateutil/Asia/Tokyo",
|
| 280 |
+
"dateutil/US/Pacific",
|
| 281 |
+
]:
|
| 282 |
+
expected_localize = expected.tz_localize(tz)
|
| 283 |
+
tz_obj = timezones.maybe_get_tz(tz)
|
| 284 |
+
dt_tz = conversion.localize_pydatetime(dt, tz_obj)
|
| 285 |
+
|
| 286 |
+
result = func(dt_tz)
|
| 287 |
+
assert isinstance(result, Timestamp)
|
| 288 |
+
assert result == expected_localize
|
| 289 |
+
|
| 290 |
+
result = func(Timestamp(dt, tz=tz))
|
| 291 |
+
assert isinstance(result, Timestamp)
|
| 292 |
+
assert result == expected_localize
|
| 293 |
+
|
| 294 |
+
# see gh-14101
|
| 295 |
+
ts = Timestamp(dt, tz=tz) + Nano(5)
|
| 296 |
+
# test nanosecond is preserved
|
| 297 |
+
with tm.assert_produces_warning(None):
|
| 298 |
+
result = func(ts)
|
| 299 |
+
assert isinstance(result, Timestamp)
|
| 300 |
+
if normalize is False:
|
| 301 |
+
assert result == expected_localize + Nano(5)
|
| 302 |
+
else:
|
| 303 |
+
assert result == expected_localize
|
| 304 |
+
|
| 305 |
+
def test_apply(self, offset_types, expecteds):
|
| 306 |
+
sdt = datetime(2011, 1, 1, 9, 0)
|
| 307 |
+
ndt = np.datetime64("2011-01-01 09:00")
|
| 308 |
+
|
| 309 |
+
expected = expecteds[offset_types.__name__]
|
| 310 |
+
expected_norm = Timestamp(expected.date())
|
| 311 |
+
|
| 312 |
+
for dt in [sdt, ndt]:
|
| 313 |
+
self._check_offsetfunc_works(offset_types, "_apply", dt, expected)
|
| 314 |
+
|
| 315 |
+
self._check_offsetfunc_works(
|
| 316 |
+
offset_types, "_apply", dt, expected_norm, normalize=True
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
def test_rollforward(self, offset_types, expecteds):
|
| 320 |
+
expecteds = expecteds.copy()
|
| 321 |
+
|
| 322 |
+
# result will not be changed if the target is on the offset
|
| 323 |
+
no_changes = [
|
| 324 |
+
"Day",
|
| 325 |
+
"MonthBegin",
|
| 326 |
+
"SemiMonthBegin",
|
| 327 |
+
"YearBegin",
|
| 328 |
+
"Week",
|
| 329 |
+
"Hour",
|
| 330 |
+
"Minute",
|
| 331 |
+
"Second",
|
| 332 |
+
"Milli",
|
| 333 |
+
"Micro",
|
| 334 |
+
"Nano",
|
| 335 |
+
"DateOffset",
|
| 336 |
+
]
|
| 337 |
+
for n in no_changes:
|
| 338 |
+
expecteds[n] = Timestamp("2011/01/01 09:00")
|
| 339 |
+
|
| 340 |
+
expecteds["BusinessHour"] = Timestamp("2011-01-03 09:00:00")
|
| 341 |
+
expecteds["CustomBusinessHour"] = Timestamp("2011-01-03 09:00:00")
|
| 342 |
+
|
| 343 |
+
# but be changed when normalize=True
|
| 344 |
+
norm_expected = expecteds.copy()
|
| 345 |
+
for k in norm_expected:
|
| 346 |
+
norm_expected[k] = Timestamp(norm_expected[k].date())
|
| 347 |
+
|
| 348 |
+
normalized = {
|
| 349 |
+
"Day": Timestamp("2011-01-02 00:00:00"),
|
| 350 |
+
"DateOffset": Timestamp("2011-01-02 00:00:00"),
|
| 351 |
+
"MonthBegin": Timestamp("2011-02-01 00:00:00"),
|
| 352 |
+
"SemiMonthBegin": Timestamp("2011-01-15 00:00:00"),
|
| 353 |
+
"YearBegin": Timestamp("2012-01-01 00:00:00"),
|
| 354 |
+
"Week": Timestamp("2011-01-08 00:00:00"),
|
| 355 |
+
"Hour": Timestamp("2011-01-01 00:00:00"),
|
| 356 |
+
"Minute": Timestamp("2011-01-01 00:00:00"),
|
| 357 |
+
"Second": Timestamp("2011-01-01 00:00:00"),
|
| 358 |
+
"Milli": Timestamp("2011-01-01 00:00:00"),
|
| 359 |
+
"Micro": Timestamp("2011-01-01 00:00:00"),
|
| 360 |
+
}
|
| 361 |
+
norm_expected.update(normalized)
|
| 362 |
+
|
| 363 |
+
sdt = datetime(2011, 1, 1, 9, 0)
|
| 364 |
+
ndt = np.datetime64("2011-01-01 09:00")
|
| 365 |
+
|
| 366 |
+
for dt in [sdt, ndt]:
|
| 367 |
+
expected = expecteds[offset_types.__name__]
|
| 368 |
+
self._check_offsetfunc_works(offset_types, "rollforward", dt, expected)
|
| 369 |
+
expected = norm_expected[offset_types.__name__]
|
| 370 |
+
self._check_offsetfunc_works(
|
| 371 |
+
offset_types, "rollforward", dt, expected, normalize=True
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
def test_rollback(self, offset_types):
|
| 375 |
+
expecteds = {
|
| 376 |
+
"BusinessDay": Timestamp("2010-12-31 09:00:00"),
|
| 377 |
+
"CustomBusinessDay": Timestamp("2010-12-31 09:00:00"),
|
| 378 |
+
"CustomBusinessMonthEnd": Timestamp("2010-12-31 09:00:00"),
|
| 379 |
+
"CustomBusinessMonthBegin": Timestamp("2010-12-01 09:00:00"),
|
| 380 |
+
"BusinessMonthBegin": Timestamp("2010-12-01 09:00:00"),
|
| 381 |
+
"MonthEnd": Timestamp("2010-12-31 09:00:00"),
|
| 382 |
+
"SemiMonthEnd": Timestamp("2010-12-31 09:00:00"),
|
| 383 |
+
"BusinessMonthEnd": Timestamp("2010-12-31 09:00:00"),
|
| 384 |
+
"BYearBegin": Timestamp("2010-01-01 09:00:00"),
|
| 385 |
+
"YearEnd": Timestamp("2010-12-31 09:00:00"),
|
| 386 |
+
"BYearEnd": Timestamp("2010-12-31 09:00:00"),
|
| 387 |
+
"QuarterBegin": Timestamp("2010-12-01 09:00:00"),
|
| 388 |
+
"BQuarterBegin": Timestamp("2010-12-01 09:00:00"),
|
| 389 |
+
"QuarterEnd": Timestamp("2010-12-31 09:00:00"),
|
| 390 |
+
"BQuarterEnd": Timestamp("2010-12-31 09:00:00"),
|
| 391 |
+
"BusinessHour": Timestamp("2010-12-31 17:00:00"),
|
| 392 |
+
"CustomBusinessHour": Timestamp("2010-12-31 17:00:00"),
|
| 393 |
+
"WeekOfMonth": Timestamp("2010-12-11 09:00:00"),
|
| 394 |
+
"LastWeekOfMonth": Timestamp("2010-12-25 09:00:00"),
|
| 395 |
+
"FY5253Quarter": Timestamp("2010-10-26 09:00:00"),
|
| 396 |
+
"FY5253": Timestamp("2010-01-26 09:00:00"),
|
| 397 |
+
"Easter": Timestamp("2010-04-04 09:00:00"),
|
| 398 |
+
}
|
| 399 |
+
|
| 400 |
+
# result will not be changed if the target is on the offset
|
| 401 |
+
for n in [
|
| 402 |
+
"Day",
|
| 403 |
+
"MonthBegin",
|
| 404 |
+
"SemiMonthBegin",
|
| 405 |
+
"YearBegin",
|
| 406 |
+
"Week",
|
| 407 |
+
"Hour",
|
| 408 |
+
"Minute",
|
| 409 |
+
"Second",
|
| 410 |
+
"Milli",
|
| 411 |
+
"Micro",
|
| 412 |
+
"Nano",
|
| 413 |
+
"DateOffset",
|
| 414 |
+
]:
|
| 415 |
+
expecteds[n] = Timestamp("2011/01/01 09:00")
|
| 416 |
+
|
| 417 |
+
# but be changed when normalize=True
|
| 418 |
+
norm_expected = expecteds.copy()
|
| 419 |
+
for k in norm_expected:
|
| 420 |
+
norm_expected[k] = Timestamp(norm_expected[k].date())
|
| 421 |
+
|
| 422 |
+
normalized = {
|
| 423 |
+
"Day": Timestamp("2010-12-31 00:00:00"),
|
| 424 |
+
"DateOffset": Timestamp("2010-12-31 00:00:00"),
|
| 425 |
+
"MonthBegin": Timestamp("2010-12-01 00:00:00"),
|
| 426 |
+
"SemiMonthBegin": Timestamp("2010-12-15 00:00:00"),
|
| 427 |
+
"YearBegin": Timestamp("2010-01-01 00:00:00"),
|
| 428 |
+
"Week": Timestamp("2010-12-25 00:00:00"),
|
| 429 |
+
"Hour": Timestamp("2011-01-01 00:00:00"),
|
| 430 |
+
"Minute": Timestamp("2011-01-01 00:00:00"),
|
| 431 |
+
"Second": Timestamp("2011-01-01 00:00:00"),
|
| 432 |
+
"Milli": Timestamp("2011-01-01 00:00:00"),
|
| 433 |
+
"Micro": Timestamp("2011-01-01 00:00:00"),
|
| 434 |
+
}
|
| 435 |
+
norm_expected.update(normalized)
|
| 436 |
+
|
| 437 |
+
sdt = datetime(2011, 1, 1, 9, 0)
|
| 438 |
+
ndt = np.datetime64("2011-01-01 09:00")
|
| 439 |
+
|
| 440 |
+
for dt in [sdt, ndt]:
|
| 441 |
+
expected = expecteds[offset_types.__name__]
|
| 442 |
+
self._check_offsetfunc_works(offset_types, "rollback", dt, expected)
|
| 443 |
+
|
| 444 |
+
expected = norm_expected[offset_types.__name__]
|
| 445 |
+
self._check_offsetfunc_works(
|
| 446 |
+
offset_types, "rollback", dt, expected, normalize=True
|
| 447 |
+
)
|
| 448 |
+
|
| 449 |
+
def test_is_on_offset(self, offset_types, expecteds):
|
| 450 |
+
dt = expecteds[offset_types.__name__]
|
| 451 |
+
offset_s = _create_offset(offset_types)
|
| 452 |
+
assert offset_s.is_on_offset(dt)
|
| 453 |
+
|
| 454 |
+
# when normalize=True, is_on_offset checks time is 00:00:00
|
| 455 |
+
if issubclass(offset_types, Tick):
|
| 456 |
+
# normalize=True disallowed for Tick subclasses GH#21427
|
| 457 |
+
return
|
| 458 |
+
offset_n = _create_offset(offset_types, normalize=True)
|
| 459 |
+
assert not offset_n.is_on_offset(dt)
|
| 460 |
+
|
| 461 |
+
if offset_types in (BusinessHour, CustomBusinessHour):
|
| 462 |
+
# In default BusinessHour (9:00-17:00), normalized time
|
| 463 |
+
# cannot be in business hour range
|
| 464 |
+
return
|
| 465 |
+
date = datetime(dt.year, dt.month, dt.day)
|
| 466 |
+
assert offset_n.is_on_offset(date)
|
| 467 |
+
|
| 468 |
+
def test_add(self, offset_types, tz_naive_fixture, expecteds):
|
| 469 |
+
tz = tz_naive_fixture
|
| 470 |
+
dt = datetime(2011, 1, 1, 9, 0)
|
| 471 |
+
|
| 472 |
+
offset_s = _create_offset(offset_types)
|
| 473 |
+
expected = expecteds[offset_types.__name__]
|
| 474 |
+
|
| 475 |
+
result_dt = dt + offset_s
|
| 476 |
+
result_ts = Timestamp(dt) + offset_s
|
| 477 |
+
for result in [result_dt, result_ts]:
|
| 478 |
+
assert isinstance(result, Timestamp)
|
| 479 |
+
assert result == expected
|
| 480 |
+
|
| 481 |
+
expected_localize = expected.tz_localize(tz)
|
| 482 |
+
result = Timestamp(dt, tz=tz) + offset_s
|
| 483 |
+
assert isinstance(result, Timestamp)
|
| 484 |
+
assert result == expected_localize
|
| 485 |
+
|
| 486 |
+
# normalize=True, disallowed for Tick subclasses GH#21427
|
| 487 |
+
if issubclass(offset_types, Tick):
|
| 488 |
+
return
|
| 489 |
+
offset_s = _create_offset(offset_types, normalize=True)
|
| 490 |
+
expected = Timestamp(expected.date())
|
| 491 |
+
|
| 492 |
+
result_dt = dt + offset_s
|
| 493 |
+
result_ts = Timestamp(dt) + offset_s
|
| 494 |
+
for result in [result_dt, result_ts]:
|
| 495 |
+
assert isinstance(result, Timestamp)
|
| 496 |
+
assert result == expected
|
| 497 |
+
|
| 498 |
+
expected_localize = expected.tz_localize(tz)
|
| 499 |
+
result = Timestamp(dt, tz=tz) + offset_s
|
| 500 |
+
assert isinstance(result, Timestamp)
|
| 501 |
+
assert result == expected_localize
|
| 502 |
+
|
| 503 |
+
def test_add_empty_datetimeindex(self, offset_types, tz_naive_fixture):
|
| 504 |
+
# GH#12724, GH#30336
|
| 505 |
+
offset_s = _create_offset(offset_types)
|
| 506 |
+
|
| 507 |
+
dti = DatetimeIndex([], tz=tz_naive_fixture).as_unit("ns")
|
| 508 |
+
|
| 509 |
+
warn = None
|
| 510 |
+
if isinstance(
|
| 511 |
+
offset_s,
|
| 512 |
+
(
|
| 513 |
+
Easter,
|
| 514 |
+
WeekOfMonth,
|
| 515 |
+
LastWeekOfMonth,
|
| 516 |
+
CustomBusinessDay,
|
| 517 |
+
BusinessHour,
|
| 518 |
+
CustomBusinessHour,
|
| 519 |
+
CustomBusinessMonthBegin,
|
| 520 |
+
CustomBusinessMonthEnd,
|
| 521 |
+
FY5253,
|
| 522 |
+
FY5253Quarter,
|
| 523 |
+
),
|
| 524 |
+
):
|
| 525 |
+
# We don't have an optimized apply_index
|
| 526 |
+
warn = PerformanceWarning
|
| 527 |
+
|
| 528 |
+
# stacklevel checking is slow, and we have ~800 of variants of this
|
| 529 |
+
# test, so let's only check the stacklevel in a subset of them
|
| 530 |
+
check_stacklevel = tz_naive_fixture is None
|
| 531 |
+
with tm.assert_produces_warning(warn, check_stacklevel=check_stacklevel):
|
| 532 |
+
result = dti + offset_s
|
| 533 |
+
tm.assert_index_equal(result, dti)
|
| 534 |
+
with tm.assert_produces_warning(warn, check_stacklevel=check_stacklevel):
|
| 535 |
+
result = offset_s + dti
|
| 536 |
+
tm.assert_index_equal(result, dti)
|
| 537 |
+
|
| 538 |
+
dta = dti._data
|
| 539 |
+
with tm.assert_produces_warning(warn, check_stacklevel=check_stacklevel):
|
| 540 |
+
result = dta + offset_s
|
| 541 |
+
tm.assert_equal(result, dta)
|
| 542 |
+
with tm.assert_produces_warning(warn, check_stacklevel=check_stacklevel):
|
| 543 |
+
result = offset_s + dta
|
| 544 |
+
tm.assert_equal(result, dta)
|
| 545 |
+
|
| 546 |
+
def test_pickle_roundtrip(self, offset_types):
|
| 547 |
+
off = _create_offset(offset_types)
|
| 548 |
+
res = tm.round_trip_pickle(off)
|
| 549 |
+
assert off == res
|
| 550 |
+
if type(off) is not DateOffset:
|
| 551 |
+
for attr in off._attributes:
|
| 552 |
+
if attr == "calendar":
|
| 553 |
+
# np.busdaycalendar __eq__ will return False;
|
| 554 |
+
# we check holidays and weekmask attrs so are OK
|
| 555 |
+
continue
|
| 556 |
+
# Make sure nothings got lost from _params (which __eq__) is based on
|
| 557 |
+
assert getattr(off, attr) == getattr(res, attr)
|
| 558 |
+
|
| 559 |
+
def test_pickle_dateoffset_odd_inputs(self):
|
| 560 |
+
# GH#34511
|
| 561 |
+
off = DateOffset(months=12)
|
| 562 |
+
res = tm.round_trip_pickle(off)
|
| 563 |
+
assert off == res
|
| 564 |
+
|
| 565 |
+
base_dt = datetime(2020, 1, 1)
|
| 566 |
+
assert base_dt + off == base_dt + res
|
| 567 |
+
|
| 568 |
+
def test_offsets_hashable(self, offset_types):
|
| 569 |
+
# GH: 37267
|
| 570 |
+
off = _create_offset(offset_types)
|
| 571 |
+
assert hash(off) is not None
|
| 572 |
+
|
| 573 |
+
# TODO: belongs in arithmetic tests?
|
| 574 |
+
@pytest.mark.filterwarnings(
|
| 575 |
+
"ignore:Non-vectorized DateOffset being applied to Series or DatetimeIndex"
|
| 576 |
+
)
|
| 577 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us"])
|
| 578 |
+
def test_add_dt64_ndarray_non_nano(self, offset_types, unit):
|
| 579 |
+
# check that the result with non-nano matches nano
|
| 580 |
+
off = _create_offset(offset_types)
|
| 581 |
+
|
| 582 |
+
dti = date_range("2016-01-01", periods=35, freq="D", unit=unit)
|
| 583 |
+
|
| 584 |
+
result = (dti + off)._with_freq(None)
|
| 585 |
+
|
| 586 |
+
exp_unit = unit
|
| 587 |
+
if isinstance(off, Tick) and off._creso > dti._data._creso:
|
| 588 |
+
# cast to higher reso like we would with Timedelta scalar
|
| 589 |
+
exp_unit = Timedelta(off).unit
|
| 590 |
+
# TODO(GH#55564): as_unit will be unnecessary
|
| 591 |
+
expected = DatetimeIndex([x + off for x in dti]).as_unit(exp_unit)
|
| 592 |
+
|
| 593 |
+
tm.assert_index_equal(result, expected)
|
| 594 |
+
|
| 595 |
+
|
| 596 |
+
class TestDateOffset:
|
| 597 |
+
def setup_method(self):
|
| 598 |
+
_offset_map.clear()
|
| 599 |
+
|
| 600 |
+
def test_repr(self):
|
| 601 |
+
repr(DateOffset())
|
| 602 |
+
repr(DateOffset(2))
|
| 603 |
+
repr(2 * DateOffset())
|
| 604 |
+
repr(2 * DateOffset(months=2))
|
| 605 |
+
|
| 606 |
+
def test_mul(self):
|
| 607 |
+
assert DateOffset(2) == 2 * DateOffset(1)
|
| 608 |
+
assert DateOffset(2) == DateOffset(1) * 2
|
| 609 |
+
|
| 610 |
+
@pytest.mark.parametrize("kwd", sorted(liboffsets._relativedelta_kwds))
|
| 611 |
+
def test_constructor(self, kwd, request):
|
| 612 |
+
if kwd == "millisecond":
|
| 613 |
+
request.applymarker(
|
| 614 |
+
pytest.mark.xfail(
|
| 615 |
+
raises=NotImplementedError,
|
| 616 |
+
reason="Constructing DateOffset object with `millisecond` is not "
|
| 617 |
+
"yet supported.",
|
| 618 |
+
)
|
| 619 |
+
)
|
| 620 |
+
offset = DateOffset(**{kwd: 2})
|
| 621 |
+
assert offset.kwds == {kwd: 2}
|
| 622 |
+
assert getattr(offset, kwd) == 2
|
| 623 |
+
|
| 624 |
+
def test_default_constructor(self, dt):
|
| 625 |
+
assert (dt + DateOffset(2)) == datetime(2008, 1, 4)
|
| 626 |
+
|
| 627 |
+
def test_is_anchored(self):
|
| 628 |
+
msg = "DateOffset.is_anchored is deprecated "
|
| 629 |
+
|
| 630 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 631 |
+
assert not DateOffset(2).is_anchored()
|
| 632 |
+
assert DateOffset(1).is_anchored()
|
| 633 |
+
|
| 634 |
+
def test_copy(self):
|
| 635 |
+
assert DateOffset(months=2).copy() == DateOffset(months=2)
|
| 636 |
+
assert DateOffset(milliseconds=1).copy() == DateOffset(milliseconds=1)
|
| 637 |
+
|
| 638 |
+
@pytest.mark.parametrize(
|
| 639 |
+
"arithmatic_offset_type, expected",
|
| 640 |
+
zip(
|
| 641 |
+
_ARITHMETIC_DATE_OFFSET,
|
| 642 |
+
[
|
| 643 |
+
"2009-01-02",
|
| 644 |
+
"2008-02-02",
|
| 645 |
+
"2008-01-09",
|
| 646 |
+
"2008-01-03",
|
| 647 |
+
"2008-01-02 01:00:00",
|
| 648 |
+
"2008-01-02 00:01:00",
|
| 649 |
+
"2008-01-02 00:00:01",
|
| 650 |
+
"2008-01-02 00:00:00.001000000",
|
| 651 |
+
"2008-01-02 00:00:00.000001000",
|
| 652 |
+
],
|
| 653 |
+
),
|
| 654 |
+
)
|
| 655 |
+
def test_add(self, arithmatic_offset_type, expected, dt):
|
| 656 |
+
assert DateOffset(**{arithmatic_offset_type: 1}) + dt == Timestamp(expected)
|
| 657 |
+
assert dt + DateOffset(**{arithmatic_offset_type: 1}) == Timestamp(expected)
|
| 658 |
+
|
| 659 |
+
@pytest.mark.parametrize(
|
| 660 |
+
"arithmatic_offset_type, expected",
|
| 661 |
+
zip(
|
| 662 |
+
_ARITHMETIC_DATE_OFFSET,
|
| 663 |
+
[
|
| 664 |
+
"2007-01-02",
|
| 665 |
+
"2007-12-02",
|
| 666 |
+
"2007-12-26",
|
| 667 |
+
"2008-01-01",
|
| 668 |
+
"2008-01-01 23:00:00",
|
| 669 |
+
"2008-01-01 23:59:00",
|
| 670 |
+
"2008-01-01 23:59:59",
|
| 671 |
+
"2008-01-01 23:59:59.999000000",
|
| 672 |
+
"2008-01-01 23:59:59.999999000",
|
| 673 |
+
],
|
| 674 |
+
),
|
| 675 |
+
)
|
| 676 |
+
def test_sub(self, arithmatic_offset_type, expected, dt):
|
| 677 |
+
assert dt - DateOffset(**{arithmatic_offset_type: 1}) == Timestamp(expected)
|
| 678 |
+
with pytest.raises(TypeError, match="Cannot subtract datetime from offset"):
|
| 679 |
+
DateOffset(**{arithmatic_offset_type: 1}) - dt
|
| 680 |
+
|
| 681 |
+
@pytest.mark.parametrize(
|
| 682 |
+
"arithmatic_offset_type, n, expected",
|
| 683 |
+
zip(
|
| 684 |
+
_ARITHMETIC_DATE_OFFSET,
|
| 685 |
+
range(1, 10),
|
| 686 |
+
[
|
| 687 |
+
"2009-01-02",
|
| 688 |
+
"2008-03-02",
|
| 689 |
+
"2008-01-23",
|
| 690 |
+
"2008-01-06",
|
| 691 |
+
"2008-01-02 05:00:00",
|
| 692 |
+
"2008-01-02 00:06:00",
|
| 693 |
+
"2008-01-02 00:00:07",
|
| 694 |
+
"2008-01-02 00:00:00.008000000",
|
| 695 |
+
"2008-01-02 00:00:00.000009000",
|
| 696 |
+
],
|
| 697 |
+
),
|
| 698 |
+
)
|
| 699 |
+
def test_mul_add(self, arithmatic_offset_type, n, expected, dt):
|
| 700 |
+
assert DateOffset(**{arithmatic_offset_type: 1}) * n + dt == Timestamp(expected)
|
| 701 |
+
assert n * DateOffset(**{arithmatic_offset_type: 1}) + dt == Timestamp(expected)
|
| 702 |
+
assert dt + DateOffset(**{arithmatic_offset_type: 1}) * n == Timestamp(expected)
|
| 703 |
+
assert dt + n * DateOffset(**{arithmatic_offset_type: 1}) == Timestamp(expected)
|
| 704 |
+
|
| 705 |
+
@pytest.mark.parametrize(
|
| 706 |
+
"arithmatic_offset_type, n, expected",
|
| 707 |
+
zip(
|
| 708 |
+
_ARITHMETIC_DATE_OFFSET,
|
| 709 |
+
range(1, 10),
|
| 710 |
+
[
|
| 711 |
+
"2007-01-02",
|
| 712 |
+
"2007-11-02",
|
| 713 |
+
"2007-12-12",
|
| 714 |
+
"2007-12-29",
|
| 715 |
+
"2008-01-01 19:00:00",
|
| 716 |
+
"2008-01-01 23:54:00",
|
| 717 |
+
"2008-01-01 23:59:53",
|
| 718 |
+
"2008-01-01 23:59:59.992000000",
|
| 719 |
+
"2008-01-01 23:59:59.999991000",
|
| 720 |
+
],
|
| 721 |
+
),
|
| 722 |
+
)
|
| 723 |
+
def test_mul_sub(self, arithmatic_offset_type, n, expected, dt):
|
| 724 |
+
assert dt - DateOffset(**{arithmatic_offset_type: 1}) * n == Timestamp(expected)
|
| 725 |
+
assert dt - n * DateOffset(**{arithmatic_offset_type: 1}) == Timestamp(expected)
|
| 726 |
+
|
| 727 |
+
def test_leap_year(self):
|
| 728 |
+
d = datetime(2008, 1, 31)
|
| 729 |
+
assert (d + DateOffset(months=1)) == datetime(2008, 2, 29)
|
| 730 |
+
|
| 731 |
+
def test_eq(self):
|
| 732 |
+
offset1 = DateOffset(days=1)
|
| 733 |
+
offset2 = DateOffset(days=365)
|
| 734 |
+
|
| 735 |
+
assert offset1 != offset2
|
| 736 |
+
|
| 737 |
+
assert DateOffset(milliseconds=3) != DateOffset(milliseconds=7)
|
| 738 |
+
|
| 739 |
+
@pytest.mark.parametrize(
|
| 740 |
+
"offset_kwargs, expected_arg",
|
| 741 |
+
[
|
| 742 |
+
({"microseconds": 1, "milliseconds": 1}, "2022-01-01 00:00:00.001001"),
|
| 743 |
+
({"seconds": 1, "milliseconds": 1}, "2022-01-01 00:00:01.001"),
|
| 744 |
+
({"minutes": 1, "milliseconds": 1}, "2022-01-01 00:01:00.001"),
|
| 745 |
+
({"hours": 1, "milliseconds": 1}, "2022-01-01 01:00:00.001"),
|
| 746 |
+
({"days": 1, "milliseconds": 1}, "2022-01-02 00:00:00.001"),
|
| 747 |
+
({"weeks": 1, "milliseconds": 1}, "2022-01-08 00:00:00.001"),
|
| 748 |
+
({"months": 1, "milliseconds": 1}, "2022-02-01 00:00:00.001"),
|
| 749 |
+
({"years": 1, "milliseconds": 1}, "2023-01-01 00:00:00.001"),
|
| 750 |
+
],
|
| 751 |
+
)
|
| 752 |
+
def test_milliseconds_combination(self, offset_kwargs, expected_arg):
|
| 753 |
+
# GH 49897
|
| 754 |
+
offset = DateOffset(**offset_kwargs)
|
| 755 |
+
ts = Timestamp("2022-01-01")
|
| 756 |
+
result = ts + offset
|
| 757 |
+
expected = Timestamp(expected_arg)
|
| 758 |
+
|
| 759 |
+
assert result == expected
|
| 760 |
+
|
| 761 |
+
def test_offset_invalid_arguments(self):
|
| 762 |
+
msg = "^Invalid argument/s or bad combination of arguments"
|
| 763 |
+
with pytest.raises(ValueError, match=msg):
|
| 764 |
+
DateOffset(picoseconds=1)
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
class TestOffsetNames:
|
| 768 |
+
def test_get_offset_name(self):
|
| 769 |
+
assert BDay().freqstr == "B"
|
| 770 |
+
assert BDay(2).freqstr == "2B"
|
| 771 |
+
assert BMonthEnd().freqstr == "BME"
|
| 772 |
+
assert Week(weekday=0).freqstr == "W-MON"
|
| 773 |
+
assert Week(weekday=1).freqstr == "W-TUE"
|
| 774 |
+
assert Week(weekday=2).freqstr == "W-WED"
|
| 775 |
+
assert Week(weekday=3).freqstr == "W-THU"
|
| 776 |
+
assert Week(weekday=4).freqstr == "W-FRI"
|
| 777 |
+
|
| 778 |
+
assert LastWeekOfMonth(weekday=WeekDay.SUN).freqstr == "LWOM-SUN"
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
def test_get_offset():
|
| 782 |
+
with pytest.raises(ValueError, match=INVALID_FREQ_ERR_MSG):
|
| 783 |
+
_get_offset("gibberish")
|
| 784 |
+
with pytest.raises(ValueError, match=INVALID_FREQ_ERR_MSG):
|
| 785 |
+
_get_offset("QS-JAN-B")
|
| 786 |
+
|
| 787 |
+
pairs = [
|
| 788 |
+
("B", BDay()),
|
| 789 |
+
("b", BDay()),
|
| 790 |
+
("bme", BMonthEnd()),
|
| 791 |
+
("Bme", BMonthEnd()),
|
| 792 |
+
("W-MON", Week(weekday=0)),
|
| 793 |
+
("W-TUE", Week(weekday=1)),
|
| 794 |
+
("W-WED", Week(weekday=2)),
|
| 795 |
+
("W-THU", Week(weekday=3)),
|
| 796 |
+
("W-FRI", Week(weekday=4)),
|
| 797 |
+
]
|
| 798 |
+
|
| 799 |
+
for name, expected in pairs:
|
| 800 |
+
offset = _get_offset(name)
|
| 801 |
+
assert offset == expected, (
|
| 802 |
+
f"Expected {repr(name)} to yield {repr(expected)} "
|
| 803 |
+
f"(actual: {repr(offset)})"
|
| 804 |
+
)
|
| 805 |
+
|
| 806 |
+
|
| 807 |
+
def test_get_offset_legacy():
|
| 808 |
+
pairs = [("w@Sat", Week(weekday=5))]
|
| 809 |
+
for name, expected in pairs:
|
| 810 |
+
with pytest.raises(ValueError, match=INVALID_FREQ_ERR_MSG):
|
| 811 |
+
_get_offset(name)
|
| 812 |
+
|
| 813 |
+
|
| 814 |
+
class TestOffsetAliases:
|
| 815 |
+
def setup_method(self):
|
| 816 |
+
_offset_map.clear()
|
| 817 |
+
|
| 818 |
+
def test_alias_equality(self):
|
| 819 |
+
for k, v in _offset_map.items():
|
| 820 |
+
if v is None:
|
| 821 |
+
continue
|
| 822 |
+
assert k == v.copy()
|
| 823 |
+
|
| 824 |
+
def test_rule_code(self):
|
| 825 |
+
lst = ["ME", "MS", "BME", "BMS", "D", "B", "h", "min", "s", "ms", "us"]
|
| 826 |
+
for k in lst:
|
| 827 |
+
assert k == _get_offset(k).rule_code
|
| 828 |
+
# should be cached - this is kind of an internals test...
|
| 829 |
+
assert k in _offset_map
|
| 830 |
+
assert k == (_get_offset(k) * 3).rule_code
|
| 831 |
+
|
| 832 |
+
suffix_lst = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"]
|
| 833 |
+
base = "W"
|
| 834 |
+
for v in suffix_lst:
|
| 835 |
+
alias = "-".join([base, v])
|
| 836 |
+
assert alias == _get_offset(alias).rule_code
|
| 837 |
+
assert alias == (_get_offset(alias) * 5).rule_code
|
| 838 |
+
|
| 839 |
+
suffix_lst = [
|
| 840 |
+
"JAN",
|
| 841 |
+
"FEB",
|
| 842 |
+
"MAR",
|
| 843 |
+
"APR",
|
| 844 |
+
"MAY",
|
| 845 |
+
"JUN",
|
| 846 |
+
"JUL",
|
| 847 |
+
"AUG",
|
| 848 |
+
"SEP",
|
| 849 |
+
"OCT",
|
| 850 |
+
"NOV",
|
| 851 |
+
"DEC",
|
| 852 |
+
]
|
| 853 |
+
base_lst = ["YE", "YS", "BYE", "BYS", "QE", "QS", "BQE", "BQS"]
|
| 854 |
+
for base in base_lst:
|
| 855 |
+
for v in suffix_lst:
|
| 856 |
+
alias = "-".join([base, v])
|
| 857 |
+
assert alias == _get_offset(alias).rule_code
|
| 858 |
+
assert alias == (_get_offset(alias) * 5).rule_code
|
| 859 |
+
|
| 860 |
+
|
| 861 |
+
def test_freq_offsets():
|
| 862 |
+
off = BDay(1, offset=timedelta(0, 1800))
|
| 863 |
+
assert off.freqstr == "B+30Min"
|
| 864 |
+
|
| 865 |
+
off = BDay(1, offset=timedelta(0, -1800))
|
| 866 |
+
assert off.freqstr == "B-30Min"
|
| 867 |
+
|
| 868 |
+
|
| 869 |
+
class TestReprNames:
|
| 870 |
+
def test_str_for_named_is_name(self):
|
| 871 |
+
# look at all the amazing combinations!
|
| 872 |
+
month_prefixes = ["YE", "YS", "BYE", "BYS", "QE", "BQE", "BQS", "QS"]
|
| 873 |
+
names = [
|
| 874 |
+
prefix + "-" + month
|
| 875 |
+
for prefix in month_prefixes
|
| 876 |
+
for month in [
|
| 877 |
+
"JAN",
|
| 878 |
+
"FEB",
|
| 879 |
+
"MAR",
|
| 880 |
+
"APR",
|
| 881 |
+
"MAY",
|
| 882 |
+
"JUN",
|
| 883 |
+
"JUL",
|
| 884 |
+
"AUG",
|
| 885 |
+
"SEP",
|
| 886 |
+
"OCT",
|
| 887 |
+
"NOV",
|
| 888 |
+
"DEC",
|
| 889 |
+
]
|
| 890 |
+
]
|
| 891 |
+
days = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"]
|
| 892 |
+
names += ["W-" + day for day in days]
|
| 893 |
+
names += ["WOM-" + week + day for week in ("1", "2", "3", "4") for day in days]
|
| 894 |
+
_offset_map.clear()
|
| 895 |
+
for name in names:
|
| 896 |
+
offset = _get_offset(name)
|
| 897 |
+
assert offset.freqstr == name
|
| 898 |
+
|
| 899 |
+
|
| 900 |
+
# ---------------------------------------------------------------------
|
| 901 |
+
|
| 902 |
+
|
| 903 |
+
def test_valid_default_arguments(offset_types):
|
| 904 |
+
# GH#19142 check that the calling the constructors without passing
|
| 905 |
+
# any keyword arguments produce valid offsets
|
| 906 |
+
cls = offset_types
|
| 907 |
+
cls()
|
| 908 |
+
|
| 909 |
+
|
| 910 |
+
@pytest.mark.parametrize("kwd", sorted(liboffsets._relativedelta_kwds))
|
| 911 |
+
def test_valid_month_attributes(kwd, month_classes):
|
| 912 |
+
# GH#18226
|
| 913 |
+
cls = month_classes
|
| 914 |
+
# check that we cannot create e.g. MonthEnd(weeks=3)
|
| 915 |
+
msg = rf"__init__\(\) got an unexpected keyword argument '{kwd}'"
|
| 916 |
+
with pytest.raises(TypeError, match=msg):
|
| 917 |
+
cls(**{kwd: 3})
|
| 918 |
+
|
| 919 |
+
|
| 920 |
+
def test_month_offset_name(month_classes):
|
| 921 |
+
# GH#33757 off.name with n != 1 should not raise AttributeError
|
| 922 |
+
obj = month_classes(1)
|
| 923 |
+
obj2 = month_classes(2)
|
| 924 |
+
assert obj2.name == obj.name
|
| 925 |
+
|
| 926 |
+
|
| 927 |
+
@pytest.mark.parametrize("kwd", sorted(liboffsets._relativedelta_kwds))
|
| 928 |
+
def test_valid_relativedelta_kwargs(kwd, request):
|
| 929 |
+
if kwd == "millisecond":
|
| 930 |
+
request.applymarker(
|
| 931 |
+
pytest.mark.xfail(
|
| 932 |
+
raises=NotImplementedError,
|
| 933 |
+
reason="Constructing DateOffset object with `millisecond` is not "
|
| 934 |
+
"yet supported.",
|
| 935 |
+
)
|
| 936 |
+
)
|
| 937 |
+
# Check that all the arguments specified in liboffsets._relativedelta_kwds
|
| 938 |
+
# are in fact valid relativedelta keyword args
|
| 939 |
+
DateOffset(**{kwd: 1})
|
| 940 |
+
|
| 941 |
+
|
| 942 |
+
@pytest.mark.parametrize("kwd", sorted(liboffsets._relativedelta_kwds))
|
| 943 |
+
def test_valid_tick_attributes(kwd, tick_classes):
|
| 944 |
+
# GH#18226
|
| 945 |
+
cls = tick_classes
|
| 946 |
+
# check that we cannot create e.g. Hour(weeks=3)
|
| 947 |
+
msg = rf"__init__\(\) got an unexpected keyword argument '{kwd}'"
|
| 948 |
+
with pytest.raises(TypeError, match=msg):
|
| 949 |
+
cls(**{kwd: 3})
|
| 950 |
+
|
| 951 |
+
|
| 952 |
+
def test_validate_n_error():
|
| 953 |
+
with pytest.raises(TypeError, match="argument must be an integer"):
|
| 954 |
+
DateOffset(n="Doh!")
|
| 955 |
+
|
| 956 |
+
with pytest.raises(TypeError, match="argument must be an integer"):
|
| 957 |
+
MonthBegin(n=timedelta(1))
|
| 958 |
+
|
| 959 |
+
with pytest.raises(TypeError, match="argument must be an integer"):
|
| 960 |
+
BDay(n=np.array([1, 2], dtype=np.int64))
|
| 961 |
+
|
| 962 |
+
|
| 963 |
+
def test_require_integers(offset_types):
|
| 964 |
+
cls = offset_types
|
| 965 |
+
with pytest.raises(ValueError, match="argument must be an integer"):
|
| 966 |
+
cls(n=1.5)
|
| 967 |
+
|
| 968 |
+
|
| 969 |
+
def test_tick_normalize_raises(tick_classes):
|
| 970 |
+
# check that trying to create a Tick object with normalize=True raises
|
| 971 |
+
# GH#21427
|
| 972 |
+
cls = tick_classes
|
| 973 |
+
msg = "Tick offset with `normalize=True` are not allowed."
|
| 974 |
+
with pytest.raises(ValueError, match=msg):
|
| 975 |
+
cls(n=3, normalize=True)
|
| 976 |
+
|
| 977 |
+
|
| 978 |
+
@pytest.mark.parametrize(
|
| 979 |
+
"offset_kwargs, expected_arg",
|
| 980 |
+
[
|
| 981 |
+
({"nanoseconds": 1}, "1970-01-01 00:00:00.000000001"),
|
| 982 |
+
({"nanoseconds": 5}, "1970-01-01 00:00:00.000000005"),
|
| 983 |
+
({"nanoseconds": -1}, "1969-12-31 23:59:59.999999999"),
|
| 984 |
+
({"microseconds": 1}, "1970-01-01 00:00:00.000001"),
|
| 985 |
+
({"microseconds": -1}, "1969-12-31 23:59:59.999999"),
|
| 986 |
+
({"seconds": 1}, "1970-01-01 00:00:01"),
|
| 987 |
+
({"seconds": -1}, "1969-12-31 23:59:59"),
|
| 988 |
+
({"minutes": 1}, "1970-01-01 00:01:00"),
|
| 989 |
+
({"minutes": -1}, "1969-12-31 23:59:00"),
|
| 990 |
+
({"hours": 1}, "1970-01-01 01:00:00"),
|
| 991 |
+
({"hours": -1}, "1969-12-31 23:00:00"),
|
| 992 |
+
({"days": 1}, "1970-01-02 00:00:00"),
|
| 993 |
+
({"days": -1}, "1969-12-31 00:00:00"),
|
| 994 |
+
({"weeks": 1}, "1970-01-08 00:00:00"),
|
| 995 |
+
({"weeks": -1}, "1969-12-25 00:00:00"),
|
| 996 |
+
({"months": 1}, "1970-02-01 00:00:00"),
|
| 997 |
+
({"months": -1}, "1969-12-01 00:00:00"),
|
| 998 |
+
({"years": 1}, "1971-01-01 00:00:00"),
|
| 999 |
+
({"years": -1}, "1969-01-01 00:00:00"),
|
| 1000 |
+
],
|
| 1001 |
+
)
|
| 1002 |
+
def test_dateoffset_add_sub(offset_kwargs, expected_arg):
|
| 1003 |
+
offset = DateOffset(**offset_kwargs)
|
| 1004 |
+
ts = Timestamp(0)
|
| 1005 |
+
result = ts + offset
|
| 1006 |
+
expected = Timestamp(expected_arg)
|
| 1007 |
+
assert result == expected
|
| 1008 |
+
result -= offset
|
| 1009 |
+
assert result == ts
|
| 1010 |
+
result = offset + ts
|
| 1011 |
+
assert result == expected
|
| 1012 |
+
|
| 1013 |
+
|
| 1014 |
+
def test_dateoffset_add_sub_timestamp_with_nano():
|
| 1015 |
+
offset = DateOffset(minutes=2, nanoseconds=9)
|
| 1016 |
+
ts = Timestamp(4)
|
| 1017 |
+
result = ts + offset
|
| 1018 |
+
expected = Timestamp("1970-01-01 00:02:00.000000013")
|
| 1019 |
+
assert result == expected
|
| 1020 |
+
result -= offset
|
| 1021 |
+
assert result == ts
|
| 1022 |
+
result = offset + ts
|
| 1023 |
+
assert result == expected
|
| 1024 |
+
|
| 1025 |
+
offset2 = DateOffset(minutes=2, nanoseconds=9, hour=1)
|
| 1026 |
+
assert offset2._use_relativedelta
|
| 1027 |
+
with tm.assert_produces_warning(None):
|
| 1028 |
+
# no warning about Discarding nonzero nanoseconds
|
| 1029 |
+
result2 = ts + offset2
|
| 1030 |
+
expected2 = Timestamp("1970-01-01 01:02:00.000000013")
|
| 1031 |
+
assert result2 == expected2
|
| 1032 |
+
|
| 1033 |
+
|
| 1034 |
+
@pytest.mark.parametrize(
|
| 1035 |
+
"attribute",
|
| 1036 |
+
[
|
| 1037 |
+
"hours",
|
| 1038 |
+
"days",
|
| 1039 |
+
"weeks",
|
| 1040 |
+
"months",
|
| 1041 |
+
"years",
|
| 1042 |
+
],
|
| 1043 |
+
)
|
| 1044 |
+
def test_dateoffset_immutable(attribute):
|
| 1045 |
+
offset = DateOffset(**{attribute: 0})
|
| 1046 |
+
msg = "DateOffset objects are immutable"
|
| 1047 |
+
with pytest.raises(AttributeError, match=msg):
|
| 1048 |
+
setattr(offset, attribute, 5)
|
| 1049 |
+
|
| 1050 |
+
|
| 1051 |
+
def test_dateoffset_misc():
|
| 1052 |
+
oset = offsets.DateOffset(months=2, days=4)
|
| 1053 |
+
# it works
|
| 1054 |
+
oset.freqstr
|
| 1055 |
+
|
| 1056 |
+
assert not offsets.DateOffset(months=2) == 2
|
| 1057 |
+
|
| 1058 |
+
|
| 1059 |
+
@pytest.mark.parametrize("n", [-1, 1, 3])
|
| 1060 |
+
def test_construct_int_arg_no_kwargs_assumed_days(n):
|
| 1061 |
+
# GH 45890, 45643
|
| 1062 |
+
offset = DateOffset(n)
|
| 1063 |
+
assert offset._offset == timedelta(1)
|
| 1064 |
+
result = Timestamp(2022, 1, 2) + offset
|
| 1065 |
+
expected = Timestamp(2022, 1, 2 + n)
|
| 1066 |
+
assert result == expected
|
| 1067 |
+
|
| 1068 |
+
|
| 1069 |
+
@pytest.mark.parametrize(
|
| 1070 |
+
"offset, expected",
|
| 1071 |
+
[
|
| 1072 |
+
(
|
| 1073 |
+
DateOffset(minutes=7, nanoseconds=18),
|
| 1074 |
+
Timestamp("2022-01-01 00:07:00.000000018"),
|
| 1075 |
+
),
|
| 1076 |
+
(DateOffset(nanoseconds=3), Timestamp("2022-01-01 00:00:00.000000003")),
|
| 1077 |
+
],
|
| 1078 |
+
)
|
| 1079 |
+
def test_dateoffset_add_sub_timestamp_series_with_nano(offset, expected):
|
| 1080 |
+
# GH 47856
|
| 1081 |
+
start_time = Timestamp("2022-01-01")
|
| 1082 |
+
teststamp = start_time
|
| 1083 |
+
testseries = Series([start_time])
|
| 1084 |
+
testseries = testseries + offset
|
| 1085 |
+
assert testseries[0] == expected
|
| 1086 |
+
testseries -= offset
|
| 1087 |
+
assert testseries[0] == teststamp
|
| 1088 |
+
testseries = offset + testseries
|
| 1089 |
+
assert testseries[0] == expected
|
| 1090 |
+
|
| 1091 |
+
|
| 1092 |
+
@pytest.mark.parametrize(
|
| 1093 |
+
"n_months, scaling_factor, start_timestamp, expected_timestamp",
|
| 1094 |
+
[
|
| 1095 |
+
(1, 2, "2020-01-30", "2020-03-30"),
|
| 1096 |
+
(2, 1, "2020-01-30", "2020-03-30"),
|
| 1097 |
+
(1, 0, "2020-01-30", "2020-01-30"),
|
| 1098 |
+
(2, 0, "2020-01-30", "2020-01-30"),
|
| 1099 |
+
(1, -1, "2020-01-30", "2019-12-30"),
|
| 1100 |
+
(2, -1, "2020-01-30", "2019-11-30"),
|
| 1101 |
+
],
|
| 1102 |
+
)
|
| 1103 |
+
def test_offset_multiplication(
|
| 1104 |
+
n_months, scaling_factor, start_timestamp, expected_timestamp
|
| 1105 |
+
):
|
| 1106 |
+
# GH 47953
|
| 1107 |
+
mo1 = DateOffset(months=n_months)
|
| 1108 |
+
|
| 1109 |
+
startscalar = Timestamp(start_timestamp)
|
| 1110 |
+
startarray = Series([startscalar])
|
| 1111 |
+
|
| 1112 |
+
resultscalar = startscalar + (mo1 * scaling_factor)
|
| 1113 |
+
resultarray = startarray + (mo1 * scaling_factor)
|
| 1114 |
+
|
| 1115 |
+
expectedscalar = Timestamp(expected_timestamp)
|
| 1116 |
+
expectedarray = Series([expectedscalar])
|
| 1117 |
+
assert resultscalar == expectedscalar
|
| 1118 |
+
|
| 1119 |
+
tm.assert_series_equal(resultarray, expectedarray)
|
| 1120 |
+
|
| 1121 |
+
|
| 1122 |
+
def test_dateoffset_operations_on_dataframes():
|
| 1123 |
+
# GH 47953
|
| 1124 |
+
df = DataFrame({"T": [Timestamp("2019-04-30")], "D": [DateOffset(months=1)]})
|
| 1125 |
+
frameresult1 = df["T"] + 26 * df["D"]
|
| 1126 |
+
df2 = DataFrame(
|
| 1127 |
+
{
|
| 1128 |
+
"T": [Timestamp("2019-04-30"), Timestamp("2019-04-30")],
|
| 1129 |
+
"D": [DateOffset(months=1), DateOffset(months=1)],
|
| 1130 |
+
}
|
| 1131 |
+
)
|
| 1132 |
+
expecteddate = Timestamp("2021-06-30")
|
| 1133 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1134 |
+
frameresult2 = df2["T"] + 26 * df2["D"]
|
| 1135 |
+
|
| 1136 |
+
assert frameresult1[0] == expecteddate
|
| 1137 |
+
assert frameresult2[0] == expecteddate
|
| 1138 |
+
|
| 1139 |
+
|
| 1140 |
+
def test_is_yqm_start_end():
|
| 1141 |
+
freq_m = to_offset("ME")
|
| 1142 |
+
bm = to_offset("BME")
|
| 1143 |
+
qfeb = to_offset("QE-FEB")
|
| 1144 |
+
qsfeb = to_offset("QS-FEB")
|
| 1145 |
+
bq = to_offset("BQE")
|
| 1146 |
+
bqs_apr = to_offset("BQS-APR")
|
| 1147 |
+
as_nov = to_offset("YS-NOV")
|
| 1148 |
+
|
| 1149 |
+
tests = [
|
| 1150 |
+
(freq_m.is_month_start(Timestamp("2013-06-01")), 1),
|
| 1151 |
+
(bm.is_month_start(Timestamp("2013-06-01")), 0),
|
| 1152 |
+
(freq_m.is_month_start(Timestamp("2013-06-03")), 0),
|
| 1153 |
+
(bm.is_month_start(Timestamp("2013-06-03")), 1),
|
| 1154 |
+
(qfeb.is_month_end(Timestamp("2013-02-28")), 1),
|
| 1155 |
+
(qfeb.is_quarter_end(Timestamp("2013-02-28")), 1),
|
| 1156 |
+
(qfeb.is_year_end(Timestamp("2013-02-28")), 1),
|
| 1157 |
+
(qfeb.is_month_start(Timestamp("2013-03-01")), 1),
|
| 1158 |
+
(qfeb.is_quarter_start(Timestamp("2013-03-01")), 1),
|
| 1159 |
+
(qfeb.is_year_start(Timestamp("2013-03-01")), 1),
|
| 1160 |
+
(qsfeb.is_month_end(Timestamp("2013-03-31")), 1),
|
| 1161 |
+
(qsfeb.is_quarter_end(Timestamp("2013-03-31")), 0),
|
| 1162 |
+
(qsfeb.is_year_end(Timestamp("2013-03-31")), 0),
|
| 1163 |
+
(qsfeb.is_month_start(Timestamp("2013-02-01")), 1),
|
| 1164 |
+
(qsfeb.is_quarter_start(Timestamp("2013-02-01")), 1),
|
| 1165 |
+
(qsfeb.is_year_start(Timestamp("2013-02-01")), 1),
|
| 1166 |
+
(bq.is_month_end(Timestamp("2013-06-30")), 0),
|
| 1167 |
+
(bq.is_quarter_end(Timestamp("2013-06-30")), 0),
|
| 1168 |
+
(bq.is_year_end(Timestamp("2013-06-30")), 0),
|
| 1169 |
+
(bq.is_month_end(Timestamp("2013-06-28")), 1),
|
| 1170 |
+
(bq.is_quarter_end(Timestamp("2013-06-28")), 1),
|
| 1171 |
+
(bq.is_year_end(Timestamp("2013-06-28")), 0),
|
| 1172 |
+
(bqs_apr.is_month_end(Timestamp("2013-06-30")), 0),
|
| 1173 |
+
(bqs_apr.is_quarter_end(Timestamp("2013-06-30")), 0),
|
| 1174 |
+
(bqs_apr.is_year_end(Timestamp("2013-06-30")), 0),
|
| 1175 |
+
(bqs_apr.is_month_end(Timestamp("2013-06-28")), 1),
|
| 1176 |
+
(bqs_apr.is_quarter_end(Timestamp("2013-06-28")), 1),
|
| 1177 |
+
(bqs_apr.is_year_end(Timestamp("2013-03-29")), 1),
|
| 1178 |
+
(as_nov.is_year_start(Timestamp("2013-11-01")), 1),
|
| 1179 |
+
(as_nov.is_year_end(Timestamp("2013-10-31")), 1),
|
| 1180 |
+
(Timestamp("2012-02-01").days_in_month, 29),
|
| 1181 |
+
(Timestamp("2013-02-01").days_in_month, 28),
|
| 1182 |
+
]
|
| 1183 |
+
|
| 1184 |
+
for ts, value in tests:
|
| 1185 |
+
assert ts == value
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_offsets_properties.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Behavioral based tests for offsets and date_range.
|
| 3 |
+
|
| 4 |
+
This file is adapted from https://github.com/pandas-dev/pandas/pull/18761 -
|
| 5 |
+
which was more ambitious but less idiomatic in its use of Hypothesis.
|
| 6 |
+
|
| 7 |
+
You may wish to consult the previous version for inspiration on further
|
| 8 |
+
tests, or when trying to pin down the bugs exposed by the tests below.
|
| 9 |
+
"""
|
| 10 |
+
from hypothesis import (
|
| 11 |
+
assume,
|
| 12 |
+
given,
|
| 13 |
+
)
|
| 14 |
+
import pytest
|
| 15 |
+
import pytz
|
| 16 |
+
|
| 17 |
+
import pandas as pd
|
| 18 |
+
from pandas._testing._hypothesis import (
|
| 19 |
+
DATETIME_JAN_1_1900_OPTIONAL_TZ,
|
| 20 |
+
YQM_OFFSET,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
# ----------------------------------------------------------------
|
| 24 |
+
# Offset-specific behaviour tests
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@pytest.mark.arm_slow
|
| 28 |
+
@given(DATETIME_JAN_1_1900_OPTIONAL_TZ, YQM_OFFSET)
|
| 29 |
+
def test_on_offset_implementations(dt, offset):
|
| 30 |
+
assume(not offset.normalize)
|
| 31 |
+
# check that the class-specific implementations of is_on_offset match
|
| 32 |
+
# the general case definition:
|
| 33 |
+
# (dt + offset) - offset == dt
|
| 34 |
+
try:
|
| 35 |
+
compare = (dt + offset) - offset
|
| 36 |
+
except (pytz.NonExistentTimeError, pytz.AmbiguousTimeError):
|
| 37 |
+
# When dt + offset does not exist or is DST-ambiguous, assume(False) to
|
| 38 |
+
# indicate to hypothesis that this is not a valid test case
|
| 39 |
+
# DST-ambiguous example (GH41906):
|
| 40 |
+
# dt = datetime.datetime(1900, 1, 1, tzinfo=pytz.timezone('Africa/Kinshasa'))
|
| 41 |
+
# offset = MonthBegin(66)
|
| 42 |
+
assume(False)
|
| 43 |
+
|
| 44 |
+
assert offset.is_on_offset(dt) == (compare == dt)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@given(YQM_OFFSET)
|
| 48 |
+
def test_shift_across_dst(offset):
|
| 49 |
+
# GH#18319 check that 1) timezone is correctly normalized and
|
| 50 |
+
# 2) that hour is not incorrectly changed by this normalization
|
| 51 |
+
assume(not offset.normalize)
|
| 52 |
+
|
| 53 |
+
# Note that dti includes a transition across DST boundary
|
| 54 |
+
dti = pd.date_range(
|
| 55 |
+
start="2017-10-30 12:00:00", end="2017-11-06", freq="D", tz="US/Eastern"
|
| 56 |
+
)
|
| 57 |
+
assert (dti.hour == 12).all() # we haven't screwed up yet
|
| 58 |
+
|
| 59 |
+
res = dti + offset
|
| 60 |
+
assert (res.hour == 12).all()
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_quarter.py
ADDED
|
@@ -0,0 +1,303 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the following offsets:
|
| 3 |
+
- QuarterBegin
|
| 4 |
+
- QuarterEnd
|
| 5 |
+
"""
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
import pandas._testing as tm
|
| 13 |
+
from pandas.tests.tseries.offsets.common import (
|
| 14 |
+
assert_is_on_offset,
|
| 15 |
+
assert_offset_equal,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
from pandas.tseries.offsets import (
|
| 19 |
+
QuarterBegin,
|
| 20 |
+
QuarterEnd,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@pytest.mark.parametrize("klass", (QuarterBegin, QuarterEnd))
|
| 25 |
+
def test_quarterly_dont_normalize(klass):
|
| 26 |
+
date = datetime(2012, 3, 31, 5, 30)
|
| 27 |
+
result = date + klass()
|
| 28 |
+
assert result.time() == date.time()
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@pytest.mark.parametrize("offset", [QuarterBegin(), QuarterEnd()])
|
| 32 |
+
@pytest.mark.parametrize(
|
| 33 |
+
"date",
|
| 34 |
+
[
|
| 35 |
+
datetime(2016, m, d)
|
| 36 |
+
for m in [10, 11, 12]
|
| 37 |
+
for d in [1, 2, 3, 28, 29, 30, 31]
|
| 38 |
+
if not (m == 11 and d == 31)
|
| 39 |
+
],
|
| 40 |
+
)
|
| 41 |
+
def test_on_offset(offset, date):
|
| 42 |
+
res = offset.is_on_offset(date)
|
| 43 |
+
slow_version = date == (date + offset) - offset
|
| 44 |
+
assert res == slow_version
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class TestQuarterBegin:
|
| 48 |
+
def test_repr(self):
|
| 49 |
+
expected = "<QuarterBegin: startingMonth=3>"
|
| 50 |
+
assert repr(QuarterBegin()) == expected
|
| 51 |
+
expected = "<QuarterBegin: startingMonth=3>"
|
| 52 |
+
assert repr(QuarterBegin(startingMonth=3)) == expected
|
| 53 |
+
expected = "<QuarterBegin: startingMonth=1>"
|
| 54 |
+
assert repr(QuarterBegin(startingMonth=1)) == expected
|
| 55 |
+
|
| 56 |
+
def test_is_anchored(self):
|
| 57 |
+
msg = "QuarterBegin.is_anchored is deprecated "
|
| 58 |
+
|
| 59 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 60 |
+
assert QuarterBegin(startingMonth=1).is_anchored()
|
| 61 |
+
assert QuarterBegin().is_anchored()
|
| 62 |
+
assert not QuarterBegin(2, startingMonth=1).is_anchored()
|
| 63 |
+
|
| 64 |
+
def test_offset_corner_case(self):
|
| 65 |
+
# corner
|
| 66 |
+
offset = QuarterBegin(n=-1, startingMonth=1)
|
| 67 |
+
assert datetime(2010, 2, 1) + offset == datetime(2010, 1, 1)
|
| 68 |
+
|
| 69 |
+
offset_cases = []
|
| 70 |
+
offset_cases.append(
|
| 71 |
+
(
|
| 72 |
+
QuarterBegin(startingMonth=1),
|
| 73 |
+
{
|
| 74 |
+
datetime(2007, 12, 1): datetime(2008, 1, 1),
|
| 75 |
+
datetime(2008, 1, 1): datetime(2008, 4, 1),
|
| 76 |
+
datetime(2008, 2, 15): datetime(2008, 4, 1),
|
| 77 |
+
datetime(2008, 2, 29): datetime(2008, 4, 1),
|
| 78 |
+
datetime(2008, 3, 15): datetime(2008, 4, 1),
|
| 79 |
+
datetime(2008, 3, 31): datetime(2008, 4, 1),
|
| 80 |
+
datetime(2008, 4, 15): datetime(2008, 7, 1),
|
| 81 |
+
datetime(2008, 4, 1): datetime(2008, 7, 1),
|
| 82 |
+
},
|
| 83 |
+
)
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
offset_cases.append(
|
| 87 |
+
(
|
| 88 |
+
QuarterBegin(startingMonth=2),
|
| 89 |
+
{
|
| 90 |
+
datetime(2008, 1, 1): datetime(2008, 2, 1),
|
| 91 |
+
datetime(2008, 1, 31): datetime(2008, 2, 1),
|
| 92 |
+
datetime(2008, 1, 15): datetime(2008, 2, 1),
|
| 93 |
+
datetime(2008, 2, 29): datetime(2008, 5, 1),
|
| 94 |
+
datetime(2008, 3, 15): datetime(2008, 5, 1),
|
| 95 |
+
datetime(2008, 3, 31): datetime(2008, 5, 1),
|
| 96 |
+
datetime(2008, 4, 15): datetime(2008, 5, 1),
|
| 97 |
+
datetime(2008, 4, 30): datetime(2008, 5, 1),
|
| 98 |
+
},
|
| 99 |
+
)
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
offset_cases.append(
|
| 103 |
+
(
|
| 104 |
+
QuarterBegin(startingMonth=1, n=0),
|
| 105 |
+
{
|
| 106 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 107 |
+
datetime(2008, 12, 1): datetime(2009, 1, 1),
|
| 108 |
+
datetime(2008, 1, 1): datetime(2008, 1, 1),
|
| 109 |
+
datetime(2008, 2, 15): datetime(2008, 4, 1),
|
| 110 |
+
datetime(2008, 2, 29): datetime(2008, 4, 1),
|
| 111 |
+
datetime(2008, 3, 15): datetime(2008, 4, 1),
|
| 112 |
+
datetime(2008, 3, 31): datetime(2008, 4, 1),
|
| 113 |
+
datetime(2008, 4, 15): datetime(2008, 7, 1),
|
| 114 |
+
datetime(2008, 4, 30): datetime(2008, 7, 1),
|
| 115 |
+
},
|
| 116 |
+
)
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
offset_cases.append(
|
| 120 |
+
(
|
| 121 |
+
QuarterBegin(startingMonth=1, n=-1),
|
| 122 |
+
{
|
| 123 |
+
datetime(2008, 1, 1): datetime(2007, 10, 1),
|
| 124 |
+
datetime(2008, 1, 31): datetime(2008, 1, 1),
|
| 125 |
+
datetime(2008, 2, 15): datetime(2008, 1, 1),
|
| 126 |
+
datetime(2008, 2, 29): datetime(2008, 1, 1),
|
| 127 |
+
datetime(2008, 3, 15): datetime(2008, 1, 1),
|
| 128 |
+
datetime(2008, 3, 31): datetime(2008, 1, 1),
|
| 129 |
+
datetime(2008, 4, 15): datetime(2008, 4, 1),
|
| 130 |
+
datetime(2008, 4, 30): datetime(2008, 4, 1),
|
| 131 |
+
datetime(2008, 7, 1): datetime(2008, 4, 1),
|
| 132 |
+
},
|
| 133 |
+
)
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
offset_cases.append(
|
| 137 |
+
(
|
| 138 |
+
QuarterBegin(startingMonth=1, n=2),
|
| 139 |
+
{
|
| 140 |
+
datetime(2008, 1, 1): datetime(2008, 7, 1),
|
| 141 |
+
datetime(2008, 2, 15): datetime(2008, 7, 1),
|
| 142 |
+
datetime(2008, 2, 29): datetime(2008, 7, 1),
|
| 143 |
+
datetime(2008, 3, 15): datetime(2008, 7, 1),
|
| 144 |
+
datetime(2008, 3, 31): datetime(2008, 7, 1),
|
| 145 |
+
datetime(2008, 4, 15): datetime(2008, 10, 1),
|
| 146 |
+
datetime(2008, 4, 1): datetime(2008, 10, 1),
|
| 147 |
+
},
|
| 148 |
+
)
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 152 |
+
def test_offset(self, case):
|
| 153 |
+
offset, cases = case
|
| 154 |
+
for base, expected in cases.items():
|
| 155 |
+
assert_offset_equal(offset, base, expected)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class TestQuarterEnd:
|
| 159 |
+
def test_repr(self):
|
| 160 |
+
expected = "<QuarterEnd: startingMonth=3>"
|
| 161 |
+
assert repr(QuarterEnd()) == expected
|
| 162 |
+
expected = "<QuarterEnd: startingMonth=3>"
|
| 163 |
+
assert repr(QuarterEnd(startingMonth=3)) == expected
|
| 164 |
+
expected = "<QuarterEnd: startingMonth=1>"
|
| 165 |
+
assert repr(QuarterEnd(startingMonth=1)) == expected
|
| 166 |
+
|
| 167 |
+
def test_is_anchored(self):
|
| 168 |
+
msg = "QuarterEnd.is_anchored is deprecated "
|
| 169 |
+
|
| 170 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 171 |
+
assert QuarterEnd(startingMonth=1).is_anchored()
|
| 172 |
+
assert QuarterEnd().is_anchored()
|
| 173 |
+
assert not QuarterEnd(2, startingMonth=1).is_anchored()
|
| 174 |
+
|
| 175 |
+
def test_offset_corner_case(self):
|
| 176 |
+
# corner
|
| 177 |
+
offset = QuarterEnd(n=-1, startingMonth=1)
|
| 178 |
+
assert datetime(2010, 2, 1) + offset == datetime(2010, 1, 31)
|
| 179 |
+
|
| 180 |
+
offset_cases = []
|
| 181 |
+
offset_cases.append(
|
| 182 |
+
(
|
| 183 |
+
QuarterEnd(startingMonth=1),
|
| 184 |
+
{
|
| 185 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 186 |
+
datetime(2008, 1, 31): datetime(2008, 4, 30),
|
| 187 |
+
datetime(2008, 2, 15): datetime(2008, 4, 30),
|
| 188 |
+
datetime(2008, 2, 29): datetime(2008, 4, 30),
|
| 189 |
+
datetime(2008, 3, 15): datetime(2008, 4, 30),
|
| 190 |
+
datetime(2008, 3, 31): datetime(2008, 4, 30),
|
| 191 |
+
datetime(2008, 4, 15): datetime(2008, 4, 30),
|
| 192 |
+
datetime(2008, 4, 30): datetime(2008, 7, 31),
|
| 193 |
+
},
|
| 194 |
+
)
|
| 195 |
+
)
|
| 196 |
+
|
| 197 |
+
offset_cases.append(
|
| 198 |
+
(
|
| 199 |
+
QuarterEnd(startingMonth=2),
|
| 200 |
+
{
|
| 201 |
+
datetime(2008, 1, 1): datetime(2008, 2, 29),
|
| 202 |
+
datetime(2008, 1, 31): datetime(2008, 2, 29),
|
| 203 |
+
datetime(2008, 2, 15): datetime(2008, 2, 29),
|
| 204 |
+
datetime(2008, 2, 29): datetime(2008, 5, 31),
|
| 205 |
+
datetime(2008, 3, 15): datetime(2008, 5, 31),
|
| 206 |
+
datetime(2008, 3, 31): datetime(2008, 5, 31),
|
| 207 |
+
datetime(2008, 4, 15): datetime(2008, 5, 31),
|
| 208 |
+
datetime(2008, 4, 30): datetime(2008, 5, 31),
|
| 209 |
+
},
|
| 210 |
+
)
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
offset_cases.append(
|
| 214 |
+
(
|
| 215 |
+
QuarterEnd(startingMonth=1, n=0),
|
| 216 |
+
{
|
| 217 |
+
datetime(2008, 1, 1): datetime(2008, 1, 31),
|
| 218 |
+
datetime(2008, 1, 31): datetime(2008, 1, 31),
|
| 219 |
+
datetime(2008, 2, 15): datetime(2008, 4, 30),
|
| 220 |
+
datetime(2008, 2, 29): datetime(2008, 4, 30),
|
| 221 |
+
datetime(2008, 3, 15): datetime(2008, 4, 30),
|
| 222 |
+
datetime(2008, 3, 31): datetime(2008, 4, 30),
|
| 223 |
+
datetime(2008, 4, 15): datetime(2008, 4, 30),
|
| 224 |
+
datetime(2008, 4, 30): datetime(2008, 4, 30),
|
| 225 |
+
},
|
| 226 |
+
)
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
offset_cases.append(
|
| 230 |
+
(
|
| 231 |
+
QuarterEnd(startingMonth=1, n=-1),
|
| 232 |
+
{
|
| 233 |
+
datetime(2008, 1, 1): datetime(2007, 10, 31),
|
| 234 |
+
datetime(2008, 1, 31): datetime(2007, 10, 31),
|
| 235 |
+
datetime(2008, 2, 15): datetime(2008, 1, 31),
|
| 236 |
+
datetime(2008, 2, 29): datetime(2008, 1, 31),
|
| 237 |
+
datetime(2008, 3, 15): datetime(2008, 1, 31),
|
| 238 |
+
datetime(2008, 3, 31): datetime(2008, 1, 31),
|
| 239 |
+
datetime(2008, 4, 15): datetime(2008, 1, 31),
|
| 240 |
+
datetime(2008, 4, 30): datetime(2008, 1, 31),
|
| 241 |
+
datetime(2008, 7, 1): datetime(2008, 4, 30),
|
| 242 |
+
},
|
| 243 |
+
)
|
| 244 |
+
)
|
| 245 |
+
|
| 246 |
+
offset_cases.append(
|
| 247 |
+
(
|
| 248 |
+
QuarterEnd(startingMonth=1, n=2),
|
| 249 |
+
{
|
| 250 |
+
datetime(2008, 1, 31): datetime(2008, 7, 31),
|
| 251 |
+
datetime(2008, 2, 15): datetime(2008, 7, 31),
|
| 252 |
+
datetime(2008, 2, 29): datetime(2008, 7, 31),
|
| 253 |
+
datetime(2008, 3, 15): datetime(2008, 7, 31),
|
| 254 |
+
datetime(2008, 3, 31): datetime(2008, 7, 31),
|
| 255 |
+
datetime(2008, 4, 15): datetime(2008, 7, 31),
|
| 256 |
+
datetime(2008, 4, 30): datetime(2008, 10, 31),
|
| 257 |
+
},
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
@pytest.mark.parametrize("case", offset_cases)
|
| 262 |
+
def test_offset(self, case):
|
| 263 |
+
offset, cases = case
|
| 264 |
+
for base, expected in cases.items():
|
| 265 |
+
assert_offset_equal(offset, base, expected)
|
| 266 |
+
|
| 267 |
+
on_offset_cases = [
|
| 268 |
+
(QuarterEnd(1, startingMonth=1), datetime(2008, 1, 31), True),
|
| 269 |
+
(QuarterEnd(1, startingMonth=1), datetime(2007, 12, 31), False),
|
| 270 |
+
(QuarterEnd(1, startingMonth=1), datetime(2008, 2, 29), False),
|
| 271 |
+
(QuarterEnd(1, startingMonth=1), datetime(2007, 3, 30), False),
|
| 272 |
+
(QuarterEnd(1, startingMonth=1), datetime(2007, 3, 31), False),
|
| 273 |
+
(QuarterEnd(1, startingMonth=1), datetime(2008, 4, 30), True),
|
| 274 |
+
(QuarterEnd(1, startingMonth=1), datetime(2008, 5, 30), False),
|
| 275 |
+
(QuarterEnd(1, startingMonth=1), datetime(2008, 5, 31), False),
|
| 276 |
+
(QuarterEnd(1, startingMonth=1), datetime(2007, 6, 29), False),
|
| 277 |
+
(QuarterEnd(1, startingMonth=1), datetime(2007, 6, 30), False),
|
| 278 |
+
(QuarterEnd(1, startingMonth=2), datetime(2008, 1, 31), False),
|
| 279 |
+
(QuarterEnd(1, startingMonth=2), datetime(2007, 12, 31), False),
|
| 280 |
+
(QuarterEnd(1, startingMonth=2), datetime(2008, 2, 29), True),
|
| 281 |
+
(QuarterEnd(1, startingMonth=2), datetime(2007, 3, 30), False),
|
| 282 |
+
(QuarterEnd(1, startingMonth=2), datetime(2007, 3, 31), False),
|
| 283 |
+
(QuarterEnd(1, startingMonth=2), datetime(2008, 4, 30), False),
|
| 284 |
+
(QuarterEnd(1, startingMonth=2), datetime(2008, 5, 30), False),
|
| 285 |
+
(QuarterEnd(1, startingMonth=2), datetime(2008, 5, 31), True),
|
| 286 |
+
(QuarterEnd(1, startingMonth=2), datetime(2007, 6, 29), False),
|
| 287 |
+
(QuarterEnd(1, startingMonth=2), datetime(2007, 6, 30), False),
|
| 288 |
+
(QuarterEnd(1, startingMonth=3), datetime(2008, 1, 31), False),
|
| 289 |
+
(QuarterEnd(1, startingMonth=3), datetime(2007, 12, 31), True),
|
| 290 |
+
(QuarterEnd(1, startingMonth=3), datetime(2008, 2, 29), False),
|
| 291 |
+
(QuarterEnd(1, startingMonth=3), datetime(2007, 3, 30), False),
|
| 292 |
+
(QuarterEnd(1, startingMonth=3), datetime(2007, 3, 31), True),
|
| 293 |
+
(QuarterEnd(1, startingMonth=3), datetime(2008, 4, 30), False),
|
| 294 |
+
(QuarterEnd(1, startingMonth=3), datetime(2008, 5, 30), False),
|
| 295 |
+
(QuarterEnd(1, startingMonth=3), datetime(2008, 5, 31), False),
|
| 296 |
+
(QuarterEnd(1, startingMonth=3), datetime(2007, 6, 29), False),
|
| 297 |
+
(QuarterEnd(1, startingMonth=3), datetime(2007, 6, 30), True),
|
| 298 |
+
]
|
| 299 |
+
|
| 300 |
+
@pytest.mark.parametrize("case", on_offset_cases)
|
| 301 |
+
def test_is_on_offset(self, case):
|
| 302 |
+
offset, dt, expected = case
|
| 303 |
+
assert_is_on_offset(offset, dt, expected)
|
llava_next/lib/python3.10/site-packages/pandas/tests/tseries/offsets/test_ticks.py
ADDED
|
@@ -0,0 +1,405 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for offsets.Tick and subclasses
|
| 3 |
+
"""
|
| 4 |
+
from datetime import (
|
| 5 |
+
datetime,
|
| 6 |
+
timedelta,
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
from hypothesis import (
|
| 10 |
+
assume,
|
| 11 |
+
example,
|
| 12 |
+
given,
|
| 13 |
+
)
|
| 14 |
+
import numpy as np
|
| 15 |
+
import pytest
|
| 16 |
+
|
| 17 |
+
from pandas._libs.tslibs.offsets import delta_to_tick
|
| 18 |
+
from pandas.errors import OutOfBoundsTimedelta
|
| 19 |
+
|
| 20 |
+
from pandas import (
|
| 21 |
+
Timedelta,
|
| 22 |
+
Timestamp,
|
| 23 |
+
)
|
| 24 |
+
import pandas._testing as tm
|
| 25 |
+
from pandas._testing._hypothesis import INT_NEG_999_TO_POS_999
|
| 26 |
+
from pandas.tests.tseries.offsets.common import assert_offset_equal
|
| 27 |
+
|
| 28 |
+
from pandas.tseries import offsets
|
| 29 |
+
from pandas.tseries.offsets import (
|
| 30 |
+
Hour,
|
| 31 |
+
Micro,
|
| 32 |
+
Milli,
|
| 33 |
+
Minute,
|
| 34 |
+
Nano,
|
| 35 |
+
Second,
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
# ---------------------------------------------------------------------
|
| 39 |
+
# Test Helpers
|
| 40 |
+
|
| 41 |
+
tick_classes = [Hour, Minute, Second, Milli, Micro, Nano]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
# ---------------------------------------------------------------------
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def test_apply_ticks():
|
| 48 |
+
result = offsets.Hour(3) + offsets.Hour(4)
|
| 49 |
+
exp = offsets.Hour(7)
|
| 50 |
+
assert result == exp
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def test_delta_to_tick():
|
| 54 |
+
delta = timedelta(3)
|
| 55 |
+
|
| 56 |
+
tick = delta_to_tick(delta)
|
| 57 |
+
assert tick == offsets.Day(3)
|
| 58 |
+
|
| 59 |
+
td = Timedelta(nanoseconds=5)
|
| 60 |
+
tick = delta_to_tick(td)
|
| 61 |
+
assert tick == Nano(5)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 65 |
+
@example(n=2, m=3)
|
| 66 |
+
@example(n=800, m=300)
|
| 67 |
+
@example(n=1000, m=5)
|
| 68 |
+
@given(n=INT_NEG_999_TO_POS_999, m=INT_NEG_999_TO_POS_999)
|
| 69 |
+
def test_tick_add_sub(cls, n, m):
|
| 70 |
+
# For all Tick subclasses and all integers n, m, we should have
|
| 71 |
+
# tick(n) + tick(m) == tick(n+m)
|
| 72 |
+
# tick(n) - tick(m) == tick(n-m)
|
| 73 |
+
left = cls(n)
|
| 74 |
+
right = cls(m)
|
| 75 |
+
expected = cls(n + m)
|
| 76 |
+
|
| 77 |
+
assert left + right == expected
|
| 78 |
+
|
| 79 |
+
expected = cls(n - m)
|
| 80 |
+
assert left - right == expected
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@pytest.mark.arm_slow
|
| 84 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 85 |
+
@example(n=2, m=3)
|
| 86 |
+
@given(n=INT_NEG_999_TO_POS_999, m=INT_NEG_999_TO_POS_999)
|
| 87 |
+
def test_tick_equality(cls, n, m):
|
| 88 |
+
assume(m != n)
|
| 89 |
+
# tick == tock iff tick.n == tock.n
|
| 90 |
+
left = cls(n)
|
| 91 |
+
right = cls(m)
|
| 92 |
+
assert left != right
|
| 93 |
+
|
| 94 |
+
right = cls(n)
|
| 95 |
+
assert left == right
|
| 96 |
+
assert not left != right
|
| 97 |
+
|
| 98 |
+
if n != 0:
|
| 99 |
+
assert cls(n) != cls(-n)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
# ---------------------------------------------------------------------
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def test_Hour():
|
| 106 |
+
assert_offset_equal(Hour(), datetime(2010, 1, 1), datetime(2010, 1, 1, 1))
|
| 107 |
+
assert_offset_equal(Hour(-1), datetime(2010, 1, 1, 1), datetime(2010, 1, 1))
|
| 108 |
+
assert_offset_equal(2 * Hour(), datetime(2010, 1, 1), datetime(2010, 1, 1, 2))
|
| 109 |
+
assert_offset_equal(-1 * Hour(), datetime(2010, 1, 1, 1), datetime(2010, 1, 1))
|
| 110 |
+
|
| 111 |
+
assert Hour(3) + Hour(2) == Hour(5)
|
| 112 |
+
assert Hour(3) - Hour(2) == Hour()
|
| 113 |
+
|
| 114 |
+
assert Hour(4) != Hour(1)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def test_Minute():
|
| 118 |
+
assert_offset_equal(Minute(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 1))
|
| 119 |
+
assert_offset_equal(Minute(-1), datetime(2010, 1, 1, 0, 1), datetime(2010, 1, 1))
|
| 120 |
+
assert_offset_equal(2 * Minute(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 2))
|
| 121 |
+
assert_offset_equal(-1 * Minute(), datetime(2010, 1, 1, 0, 1), datetime(2010, 1, 1))
|
| 122 |
+
|
| 123 |
+
assert Minute(3) + Minute(2) == Minute(5)
|
| 124 |
+
assert Minute(3) - Minute(2) == Minute()
|
| 125 |
+
assert Minute(5) != Minute()
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def test_Second():
|
| 129 |
+
assert_offset_equal(Second(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 0, 1))
|
| 130 |
+
assert_offset_equal(Second(-1), datetime(2010, 1, 1, 0, 0, 1), datetime(2010, 1, 1))
|
| 131 |
+
assert_offset_equal(
|
| 132 |
+
2 * Second(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 0, 2)
|
| 133 |
+
)
|
| 134 |
+
assert_offset_equal(
|
| 135 |
+
-1 * Second(), datetime(2010, 1, 1, 0, 0, 1), datetime(2010, 1, 1)
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
assert Second(3) + Second(2) == Second(5)
|
| 139 |
+
assert Second(3) - Second(2) == Second()
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def test_Millisecond():
|
| 143 |
+
assert_offset_equal(
|
| 144 |
+
Milli(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 0, 0, 1000)
|
| 145 |
+
)
|
| 146 |
+
assert_offset_equal(
|
| 147 |
+
Milli(-1), datetime(2010, 1, 1, 0, 0, 0, 1000), datetime(2010, 1, 1)
|
| 148 |
+
)
|
| 149 |
+
assert_offset_equal(
|
| 150 |
+
Milli(2), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 0, 0, 2000)
|
| 151 |
+
)
|
| 152 |
+
assert_offset_equal(
|
| 153 |
+
2 * Milli(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 0, 0, 2000)
|
| 154 |
+
)
|
| 155 |
+
assert_offset_equal(
|
| 156 |
+
-1 * Milli(), datetime(2010, 1, 1, 0, 0, 0, 1000), datetime(2010, 1, 1)
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
assert Milli(3) + Milli(2) == Milli(5)
|
| 160 |
+
assert Milli(3) - Milli(2) == Milli()
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def test_MillisecondTimestampArithmetic():
|
| 164 |
+
assert_offset_equal(
|
| 165 |
+
Milli(), Timestamp("2010-01-01"), Timestamp("2010-01-01 00:00:00.001")
|
| 166 |
+
)
|
| 167 |
+
assert_offset_equal(
|
| 168 |
+
Milli(-1), Timestamp("2010-01-01 00:00:00.001"), Timestamp("2010-01-01")
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def test_Microsecond():
|
| 173 |
+
assert_offset_equal(Micro(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 0, 0, 1))
|
| 174 |
+
assert_offset_equal(
|
| 175 |
+
Micro(-1), datetime(2010, 1, 1, 0, 0, 0, 1), datetime(2010, 1, 1)
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
assert_offset_equal(
|
| 179 |
+
2 * Micro(), datetime(2010, 1, 1), datetime(2010, 1, 1, 0, 0, 0, 2)
|
| 180 |
+
)
|
| 181 |
+
assert_offset_equal(
|
| 182 |
+
-1 * Micro(), datetime(2010, 1, 1, 0, 0, 0, 1), datetime(2010, 1, 1)
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
assert Micro(3) + Micro(2) == Micro(5)
|
| 186 |
+
assert Micro(3) - Micro(2) == Micro()
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
def test_NanosecondGeneric():
|
| 190 |
+
timestamp = Timestamp(datetime(2010, 1, 1))
|
| 191 |
+
assert timestamp.nanosecond == 0
|
| 192 |
+
|
| 193 |
+
result = timestamp + Nano(10)
|
| 194 |
+
assert result.nanosecond == 10
|
| 195 |
+
|
| 196 |
+
reverse_result = Nano(10) + timestamp
|
| 197 |
+
assert reverse_result.nanosecond == 10
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def test_Nanosecond():
|
| 201 |
+
timestamp = Timestamp(datetime(2010, 1, 1))
|
| 202 |
+
assert_offset_equal(Nano(), timestamp, timestamp + np.timedelta64(1, "ns"))
|
| 203 |
+
assert_offset_equal(Nano(-1), timestamp + np.timedelta64(1, "ns"), timestamp)
|
| 204 |
+
assert_offset_equal(2 * Nano(), timestamp, timestamp + np.timedelta64(2, "ns"))
|
| 205 |
+
assert_offset_equal(-1 * Nano(), timestamp + np.timedelta64(1, "ns"), timestamp)
|
| 206 |
+
|
| 207 |
+
assert Nano(3) + Nano(2) == Nano(5)
|
| 208 |
+
assert Nano(3) - Nano(2) == Nano()
|
| 209 |
+
|
| 210 |
+
# GH9284
|
| 211 |
+
assert Nano(1) + Nano(10) == Nano(11)
|
| 212 |
+
assert Nano(5) + Micro(1) == Nano(1005)
|
| 213 |
+
assert Micro(5) + Nano(1) == Nano(5001)
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
@pytest.mark.parametrize(
|
| 217 |
+
"kls, expected",
|
| 218 |
+
[
|
| 219 |
+
(Hour, Timedelta(hours=5)),
|
| 220 |
+
(Minute, Timedelta(hours=2, minutes=3)),
|
| 221 |
+
(Second, Timedelta(hours=2, seconds=3)),
|
| 222 |
+
(Milli, Timedelta(hours=2, milliseconds=3)),
|
| 223 |
+
(Micro, Timedelta(hours=2, microseconds=3)),
|
| 224 |
+
(Nano, Timedelta(hours=2, nanoseconds=3)),
|
| 225 |
+
],
|
| 226 |
+
)
|
| 227 |
+
def test_tick_addition(kls, expected):
|
| 228 |
+
offset = kls(3)
|
| 229 |
+
td = Timedelta(hours=2)
|
| 230 |
+
|
| 231 |
+
for other in [td, td.to_pytimedelta(), td.to_timedelta64()]:
|
| 232 |
+
result = offset + other
|
| 233 |
+
assert isinstance(result, Timedelta)
|
| 234 |
+
assert result == expected
|
| 235 |
+
|
| 236 |
+
result = other + offset
|
| 237 |
+
assert isinstance(result, Timedelta)
|
| 238 |
+
assert result == expected
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
def test_tick_delta_overflow():
|
| 242 |
+
# GH#55503 raise OutOfBoundsTimedelta, not OverflowError
|
| 243 |
+
tick = offsets.Day(10**9)
|
| 244 |
+
msg = "Cannot cast 1000000000 days 00:00:00 to unit='ns' without overflow"
|
| 245 |
+
depr_msg = "Day.delta is deprecated"
|
| 246 |
+
with pytest.raises(OutOfBoundsTimedelta, match=msg):
|
| 247 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
| 248 |
+
tick.delta
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 252 |
+
def test_tick_division(cls):
|
| 253 |
+
off = cls(10)
|
| 254 |
+
|
| 255 |
+
assert off / cls(5) == 2
|
| 256 |
+
assert off / 2 == cls(5)
|
| 257 |
+
assert off / 2.0 == cls(5)
|
| 258 |
+
|
| 259 |
+
assert off / off._as_pd_timedelta == 1
|
| 260 |
+
assert off / off._as_pd_timedelta.to_timedelta64() == 1
|
| 261 |
+
|
| 262 |
+
assert off / Nano(1) == off._as_pd_timedelta / Nano(1)._as_pd_timedelta
|
| 263 |
+
|
| 264 |
+
if cls is not Nano:
|
| 265 |
+
# A case where we end up with a smaller class
|
| 266 |
+
result = off / 1000
|
| 267 |
+
assert isinstance(result, offsets.Tick)
|
| 268 |
+
assert not isinstance(result, cls)
|
| 269 |
+
assert result._as_pd_timedelta == off._as_pd_timedelta / 1000
|
| 270 |
+
|
| 271 |
+
if cls._nanos_inc < Timedelta(seconds=1)._value:
|
| 272 |
+
# Case where we end up with a bigger class
|
| 273 |
+
result = off / 0.001
|
| 274 |
+
assert isinstance(result, offsets.Tick)
|
| 275 |
+
assert not isinstance(result, cls)
|
| 276 |
+
assert result._as_pd_timedelta == off._as_pd_timedelta / 0.001
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def test_tick_mul_float():
|
| 280 |
+
off = Micro(2)
|
| 281 |
+
|
| 282 |
+
# Case where we retain type
|
| 283 |
+
result = off * 1.5
|
| 284 |
+
expected = Micro(3)
|
| 285 |
+
assert result == expected
|
| 286 |
+
assert isinstance(result, Micro)
|
| 287 |
+
|
| 288 |
+
# Case where we bump up to the next type
|
| 289 |
+
result = off * 1.25
|
| 290 |
+
expected = Nano(2500)
|
| 291 |
+
assert result == expected
|
| 292 |
+
assert isinstance(result, Nano)
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 296 |
+
def test_tick_rdiv(cls):
|
| 297 |
+
off = cls(10)
|
| 298 |
+
delta = off._as_pd_timedelta
|
| 299 |
+
td64 = delta.to_timedelta64()
|
| 300 |
+
instance__type = ".".join([cls.__module__, cls.__name__])
|
| 301 |
+
msg = (
|
| 302 |
+
"unsupported operand type\\(s\\) for \\/: 'int'|'float' and "
|
| 303 |
+
f"'{instance__type}'"
|
| 304 |
+
)
|
| 305 |
+
|
| 306 |
+
with pytest.raises(TypeError, match=msg):
|
| 307 |
+
2 / off
|
| 308 |
+
with pytest.raises(TypeError, match=msg):
|
| 309 |
+
2.0 / off
|
| 310 |
+
|
| 311 |
+
assert (td64 * 2.5) / off == 2.5
|
| 312 |
+
|
| 313 |
+
if cls is not Nano:
|
| 314 |
+
# skip pytimedelta for Nano since it gets dropped
|
| 315 |
+
assert (delta.to_pytimedelta() * 2) / off == 2
|
| 316 |
+
|
| 317 |
+
result = np.array([2 * td64, td64]) / off
|
| 318 |
+
expected = np.array([2.0, 1.0])
|
| 319 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
@pytest.mark.parametrize("cls1", tick_classes)
|
| 323 |
+
@pytest.mark.parametrize("cls2", tick_classes)
|
| 324 |
+
def test_tick_zero(cls1, cls2):
|
| 325 |
+
assert cls1(0) == cls2(0)
|
| 326 |
+
assert cls1(0) + cls2(0) == cls1(0)
|
| 327 |
+
|
| 328 |
+
if cls1 is not Nano:
|
| 329 |
+
assert cls1(2) + cls2(0) == cls1(2)
|
| 330 |
+
|
| 331 |
+
if cls1 is Nano:
|
| 332 |
+
assert cls1(2) + Nano(0) == cls1(2)
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 336 |
+
def test_tick_equalities(cls):
|
| 337 |
+
assert cls() == cls(1)
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 341 |
+
def test_tick_offset(cls):
|
| 342 |
+
msg = f"{cls.__name__}.is_anchored is deprecated "
|
| 343 |
+
|
| 344 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 345 |
+
assert not cls().is_anchored()
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 349 |
+
def test_compare_ticks(cls):
|
| 350 |
+
three = cls(3)
|
| 351 |
+
four = cls(4)
|
| 352 |
+
|
| 353 |
+
assert three < cls(4)
|
| 354 |
+
assert cls(3) < four
|
| 355 |
+
assert four > cls(3)
|
| 356 |
+
assert cls(4) > three
|
| 357 |
+
assert cls(3) == cls(3)
|
| 358 |
+
assert cls(3) != cls(4)
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 362 |
+
def test_compare_ticks_to_strs(cls):
|
| 363 |
+
# GH#23524
|
| 364 |
+
off = cls(19)
|
| 365 |
+
|
| 366 |
+
# These tests should work with any strings, but we particularly are
|
| 367 |
+
# interested in "infer" as that comparison is convenient to make in
|
| 368 |
+
# Datetime/Timedelta Array/Index constructors
|
| 369 |
+
assert not off == "infer"
|
| 370 |
+
assert not "foo" == off
|
| 371 |
+
|
| 372 |
+
instance_type = ".".join([cls.__module__, cls.__name__])
|
| 373 |
+
msg = (
|
| 374 |
+
"'<'|'<='|'>'|'>=' not supported between instances of "
|
| 375 |
+
f"'str' and '{instance_type}'|'{instance_type}' and 'str'"
|
| 376 |
+
)
|
| 377 |
+
|
| 378 |
+
for left, right in [("infer", off), (off, "infer")]:
|
| 379 |
+
with pytest.raises(TypeError, match=msg):
|
| 380 |
+
left < right
|
| 381 |
+
with pytest.raises(TypeError, match=msg):
|
| 382 |
+
left <= right
|
| 383 |
+
with pytest.raises(TypeError, match=msg):
|
| 384 |
+
left > right
|
| 385 |
+
with pytest.raises(TypeError, match=msg):
|
| 386 |
+
left >= right
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
@pytest.mark.parametrize("cls", tick_classes)
|
| 390 |
+
def test_compare_ticks_to_timedeltalike(cls):
|
| 391 |
+
off = cls(19)
|
| 392 |
+
|
| 393 |
+
td = off._as_pd_timedelta
|
| 394 |
+
|
| 395 |
+
others = [td, td.to_timedelta64()]
|
| 396 |
+
if cls is not Nano:
|
| 397 |
+
others.append(td.to_pytimedelta())
|
| 398 |
+
|
| 399 |
+
for other in others:
|
| 400 |
+
assert off == other
|
| 401 |
+
assert not off != other
|
| 402 |
+
assert not off < other
|
| 403 |
+
assert not off > other
|
| 404 |
+
assert off <= other
|
| 405 |
+
assert off >= other
|