Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_aggregation.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_common.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_expressions.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_optional_dependency.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_register_accessor.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_take.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/api/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/api/__pycache__/test_types.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/api/test_types.py +62 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/common.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_numba.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_str.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/common.py +7 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply.py +1733 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply_relabeling.py +113 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_frame_transform.py +264 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_invalid_arg.py +361 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_numba.py +118 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_series_apply_relabeling.py +39 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_series_transform.py +84 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_str.py +326 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__init__.py +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/common.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_categorical.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_datetime64.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_interval.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_numeric.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_object.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_period.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_timedelta64.cpython-310.pyc +0 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/common.py +155 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/conftest.py +139 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_array_ops.py +39 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_categorical.py +25 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_datetime64.py +2469 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_interval.py +306 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_numeric.py +1567 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_object.py +420 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_period.py +1675 -0
- omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_timedelta64.py +2179 -0
omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (165 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_aggregation.cpython-310.pyc
ADDED
|
Binary file (3.07 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_common.cpython-310.pyc
ADDED
|
Binary file (7.94 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_expressions.cpython-310.pyc
ADDED
|
Binary file (11.8 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_optional_dependency.cpython-310.pyc
ADDED
|
Binary file (2.7 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_register_accessor.cpython-310.pyc
ADDED
|
Binary file (4.22 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/__pycache__/test_take.cpython-310.pyc
ADDED
|
Binary file (9.59 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/api/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/api/__pycache__/test_types.cpython-310.pyc
ADDED
|
Binary file (1.88 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/api/test_types.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import pandas._testing as tm
|
| 4 |
+
from pandas.api import types
|
| 5 |
+
from pandas.tests.api.test_api import Base
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestTypes(Base):
|
| 9 |
+
allowed = [
|
| 10 |
+
"is_any_real_numeric_dtype",
|
| 11 |
+
"is_bool",
|
| 12 |
+
"is_bool_dtype",
|
| 13 |
+
"is_categorical_dtype",
|
| 14 |
+
"is_complex",
|
| 15 |
+
"is_complex_dtype",
|
| 16 |
+
"is_datetime64_any_dtype",
|
| 17 |
+
"is_datetime64_dtype",
|
| 18 |
+
"is_datetime64_ns_dtype",
|
| 19 |
+
"is_datetime64tz_dtype",
|
| 20 |
+
"is_dtype_equal",
|
| 21 |
+
"is_float",
|
| 22 |
+
"is_float_dtype",
|
| 23 |
+
"is_int64_dtype",
|
| 24 |
+
"is_integer",
|
| 25 |
+
"is_integer_dtype",
|
| 26 |
+
"is_number",
|
| 27 |
+
"is_numeric_dtype",
|
| 28 |
+
"is_object_dtype",
|
| 29 |
+
"is_scalar",
|
| 30 |
+
"is_sparse",
|
| 31 |
+
"is_string_dtype",
|
| 32 |
+
"is_signed_integer_dtype",
|
| 33 |
+
"is_timedelta64_dtype",
|
| 34 |
+
"is_timedelta64_ns_dtype",
|
| 35 |
+
"is_unsigned_integer_dtype",
|
| 36 |
+
"is_period_dtype",
|
| 37 |
+
"is_interval",
|
| 38 |
+
"is_interval_dtype",
|
| 39 |
+
"is_re",
|
| 40 |
+
"is_re_compilable",
|
| 41 |
+
"is_dict_like",
|
| 42 |
+
"is_iterator",
|
| 43 |
+
"is_file_like",
|
| 44 |
+
"is_list_like",
|
| 45 |
+
"is_hashable",
|
| 46 |
+
"is_array_like",
|
| 47 |
+
"is_named_tuple",
|
| 48 |
+
"pandas_dtype",
|
| 49 |
+
"union_categoricals",
|
| 50 |
+
"infer_dtype",
|
| 51 |
+
"is_extension_array_dtype",
|
| 52 |
+
]
|
| 53 |
+
deprecated: list[str] = []
|
| 54 |
+
dtypes = ["CategoricalDtype", "DatetimeTZDtype", "PeriodDtype", "IntervalDtype"]
|
| 55 |
+
|
| 56 |
+
def test_types(self):
|
| 57 |
+
self.check(types, self.allowed + self.dtypes + self.deprecated)
|
| 58 |
+
|
| 59 |
+
def test_deprecated_from_api_types(self):
|
| 60 |
+
for t in self.deprecated:
|
| 61 |
+
with tm.assert_produces_warning(FutureWarning):
|
| 62 |
+
getattr(types, t)(1)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (171 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/common.cpython-310.pyc
ADDED
|
Binary file (544 Bytes). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply.cpython-310.pyc
ADDED
|
Binary file (57.2 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc
ADDED
|
Binary file (3.28 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc
ADDED
|
Binary file (8.02 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc
ADDED
|
Binary file (11.8 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_numba.cpython-310.pyc
ADDED
|
Binary file (5.29 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc
ADDED
|
Binary file (23.7 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc
ADDED
|
Binary file (1.43 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc
ADDED
|
Binary file (3.22 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_str.cpython-310.pyc
ADDED
|
Binary file (7.23 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/common.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pandas.core.groupby.base import transformation_kernels
|
| 2 |
+
|
| 3 |
+
# There is no Series.cumcount or DataFrame.cumcount
|
| 4 |
+
series_transform_kernels = [
|
| 5 |
+
x for x in sorted(transformation_kernels) if x != "cumcount"
|
| 6 |
+
]
|
| 7 |
+
frame_transform_kernels = [x for x in sorted(transformation_kernels) if x != "cumcount"]
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply.py
ADDED
|
@@ -0,0 +1,1733 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
import warnings
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
from pandas.core.dtypes.dtypes import CategoricalDtype
|
| 8 |
+
|
| 9 |
+
import pandas as pd
|
| 10 |
+
from pandas import (
|
| 11 |
+
DataFrame,
|
| 12 |
+
MultiIndex,
|
| 13 |
+
Series,
|
| 14 |
+
Timestamp,
|
| 15 |
+
date_range,
|
| 16 |
+
)
|
| 17 |
+
import pandas._testing as tm
|
| 18 |
+
from pandas.tests.frame.common import zip_frames
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@pytest.fixture
|
| 22 |
+
def int_frame_const_col():
|
| 23 |
+
"""
|
| 24 |
+
Fixture for DataFrame of ints which are constant per column
|
| 25 |
+
|
| 26 |
+
Columns are ['A', 'B', 'C'], with values (per column): [1, 2, 3]
|
| 27 |
+
"""
|
| 28 |
+
df = DataFrame(
|
| 29 |
+
np.tile(np.arange(3, dtype="int64"), 6).reshape(6, -1) + 1,
|
| 30 |
+
columns=["A", "B", "C"],
|
| 31 |
+
)
|
| 32 |
+
return df
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
@pytest.fixture(params=["python", pytest.param("numba", marks=pytest.mark.single_cpu)])
|
| 36 |
+
def engine(request):
|
| 37 |
+
if request.param == "numba":
|
| 38 |
+
pytest.importorskip("numba")
|
| 39 |
+
return request.param
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def test_apply(float_frame, engine, request):
|
| 43 |
+
if engine == "numba":
|
| 44 |
+
mark = pytest.mark.xfail(reason="numba engine not supporting numpy ufunc yet")
|
| 45 |
+
request.node.add_marker(mark)
|
| 46 |
+
with np.errstate(all="ignore"):
|
| 47 |
+
# ufunc
|
| 48 |
+
result = np.sqrt(float_frame["A"])
|
| 49 |
+
expected = float_frame.apply(np.sqrt, engine=engine)["A"]
|
| 50 |
+
tm.assert_series_equal(result, expected)
|
| 51 |
+
|
| 52 |
+
# aggregator
|
| 53 |
+
result = float_frame.apply(np.mean, engine=engine)["A"]
|
| 54 |
+
expected = np.mean(float_frame["A"])
|
| 55 |
+
assert result == expected
|
| 56 |
+
|
| 57 |
+
d = float_frame.index[0]
|
| 58 |
+
result = float_frame.apply(np.mean, axis=1, engine=engine)
|
| 59 |
+
expected = np.mean(float_frame.xs(d))
|
| 60 |
+
assert result[d] == expected
|
| 61 |
+
assert result.index is float_frame.index
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
@pytest.mark.parametrize("axis", [0, 1])
|
| 65 |
+
@pytest.mark.parametrize("raw", [True, False])
|
| 66 |
+
def test_apply_args(float_frame, axis, raw, engine, request):
|
| 67 |
+
if engine == "numba":
|
| 68 |
+
mark = pytest.mark.xfail(reason="numba engine doesn't support args")
|
| 69 |
+
request.node.add_marker(mark)
|
| 70 |
+
result = float_frame.apply(
|
| 71 |
+
lambda x, y: x + y, axis, args=(1,), raw=raw, engine=engine
|
| 72 |
+
)
|
| 73 |
+
expected = float_frame + 1
|
| 74 |
+
tm.assert_frame_equal(result, expected)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def test_apply_categorical_func():
|
| 78 |
+
# GH 9573
|
| 79 |
+
df = DataFrame({"c0": ["A", "A", "B", "B"], "c1": ["C", "C", "D", "D"]})
|
| 80 |
+
result = df.apply(lambda ts: ts.astype("category"))
|
| 81 |
+
|
| 82 |
+
assert result.shape == (4, 2)
|
| 83 |
+
assert isinstance(result["c0"].dtype, CategoricalDtype)
|
| 84 |
+
assert isinstance(result["c1"].dtype, CategoricalDtype)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def test_apply_axis1_with_ea():
|
| 88 |
+
# GH#36785
|
| 89 |
+
expected = DataFrame({"A": [Timestamp("2013-01-01", tz="UTC")]})
|
| 90 |
+
result = expected.apply(lambda x: x, axis=1)
|
| 91 |
+
tm.assert_frame_equal(result, expected)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@pytest.mark.parametrize(
|
| 95 |
+
"data, dtype",
|
| 96 |
+
[(1, None), (1, CategoricalDtype([1])), (Timestamp("2013-01-01", tz="UTC"), None)],
|
| 97 |
+
)
|
| 98 |
+
def test_agg_axis1_duplicate_index(data, dtype):
|
| 99 |
+
# GH 42380
|
| 100 |
+
expected = DataFrame([[data], [data]], index=["a", "a"], dtype=dtype)
|
| 101 |
+
result = expected.agg(lambda x: x, axis=1)
|
| 102 |
+
tm.assert_frame_equal(result, expected)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def test_apply_mixed_datetimelike():
|
| 106 |
+
# mixed datetimelike
|
| 107 |
+
# GH 7778
|
| 108 |
+
expected = DataFrame(
|
| 109 |
+
{
|
| 110 |
+
"A": date_range("20130101", periods=3),
|
| 111 |
+
"B": pd.to_timedelta(np.arange(3), unit="s"),
|
| 112 |
+
}
|
| 113 |
+
)
|
| 114 |
+
result = expected.apply(lambda x: x, axis=1)
|
| 115 |
+
tm.assert_frame_equal(result, expected)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@pytest.mark.parametrize("func", [np.sqrt, np.mean])
|
| 119 |
+
def test_apply_empty(func, engine):
|
| 120 |
+
# empty
|
| 121 |
+
empty_frame = DataFrame()
|
| 122 |
+
|
| 123 |
+
result = empty_frame.apply(func, engine=engine)
|
| 124 |
+
assert result.empty
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def test_apply_float_frame(float_frame, engine):
|
| 128 |
+
no_rows = float_frame[:0]
|
| 129 |
+
result = no_rows.apply(lambda x: x.mean(), engine=engine)
|
| 130 |
+
expected = Series(np.nan, index=float_frame.columns)
|
| 131 |
+
tm.assert_series_equal(result, expected)
|
| 132 |
+
|
| 133 |
+
no_cols = float_frame.loc[:, []]
|
| 134 |
+
result = no_cols.apply(lambda x: x.mean(), axis=1, engine=engine)
|
| 135 |
+
expected = Series(np.nan, index=float_frame.index)
|
| 136 |
+
tm.assert_series_equal(result, expected)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def test_apply_empty_except_index(engine):
|
| 140 |
+
# GH 2476
|
| 141 |
+
expected = DataFrame(index=["a"])
|
| 142 |
+
result = expected.apply(lambda x: x["a"], axis=1, engine=engine)
|
| 143 |
+
tm.assert_frame_equal(result, expected)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def test_apply_with_reduce_empty():
|
| 147 |
+
# reduce with an empty DataFrame
|
| 148 |
+
empty_frame = DataFrame()
|
| 149 |
+
|
| 150 |
+
x = []
|
| 151 |
+
result = empty_frame.apply(x.append, axis=1, result_type="expand")
|
| 152 |
+
tm.assert_frame_equal(result, empty_frame)
|
| 153 |
+
result = empty_frame.apply(x.append, axis=1, result_type="reduce")
|
| 154 |
+
expected = Series([], dtype=np.float64)
|
| 155 |
+
tm.assert_series_equal(result, expected)
|
| 156 |
+
|
| 157 |
+
empty_with_cols = DataFrame(columns=["a", "b", "c"])
|
| 158 |
+
result = empty_with_cols.apply(x.append, axis=1, result_type="expand")
|
| 159 |
+
tm.assert_frame_equal(result, empty_with_cols)
|
| 160 |
+
result = empty_with_cols.apply(x.append, axis=1, result_type="reduce")
|
| 161 |
+
expected = Series([], dtype=np.float64)
|
| 162 |
+
tm.assert_series_equal(result, expected)
|
| 163 |
+
|
| 164 |
+
# Ensure that x.append hasn't been called
|
| 165 |
+
assert x == []
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
@pytest.mark.parametrize("func", ["sum", "prod", "any", "all"])
|
| 169 |
+
def test_apply_funcs_over_empty(func):
|
| 170 |
+
# GH 28213
|
| 171 |
+
df = DataFrame(columns=["a", "b", "c"])
|
| 172 |
+
|
| 173 |
+
result = df.apply(getattr(np, func))
|
| 174 |
+
expected = getattr(df, func)()
|
| 175 |
+
if func in ("sum", "prod"):
|
| 176 |
+
expected = expected.astype(float)
|
| 177 |
+
tm.assert_series_equal(result, expected)
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def test_nunique_empty():
|
| 181 |
+
# GH 28213
|
| 182 |
+
df = DataFrame(columns=["a", "b", "c"])
|
| 183 |
+
|
| 184 |
+
result = df.nunique()
|
| 185 |
+
expected = Series(0, index=df.columns)
|
| 186 |
+
tm.assert_series_equal(result, expected)
|
| 187 |
+
|
| 188 |
+
result = df.T.nunique()
|
| 189 |
+
expected = Series([], dtype=np.float64)
|
| 190 |
+
tm.assert_series_equal(result, expected)
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def test_apply_standard_nonunique():
|
| 194 |
+
df = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]], index=["a", "a", "c"])
|
| 195 |
+
|
| 196 |
+
result = df.apply(lambda s: s[0], axis=1)
|
| 197 |
+
expected = Series([1, 4, 7], ["a", "a", "c"])
|
| 198 |
+
tm.assert_series_equal(result, expected)
|
| 199 |
+
|
| 200 |
+
result = df.T.apply(lambda s: s[0], axis=0)
|
| 201 |
+
tm.assert_series_equal(result, expected)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def test_apply_broadcast_scalars(float_frame):
|
| 205 |
+
# scalars
|
| 206 |
+
result = float_frame.apply(np.mean, result_type="broadcast")
|
| 207 |
+
expected = DataFrame([float_frame.mean()], index=float_frame.index)
|
| 208 |
+
tm.assert_frame_equal(result, expected)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def test_apply_broadcast_scalars_axis1(float_frame):
|
| 212 |
+
result = float_frame.apply(np.mean, axis=1, result_type="broadcast")
|
| 213 |
+
m = float_frame.mean(axis=1)
|
| 214 |
+
expected = DataFrame({c: m for c in float_frame.columns})
|
| 215 |
+
tm.assert_frame_equal(result, expected)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def test_apply_broadcast_lists_columns(float_frame):
|
| 219 |
+
# lists
|
| 220 |
+
result = float_frame.apply(
|
| 221 |
+
lambda x: list(range(len(float_frame.columns))),
|
| 222 |
+
axis=1,
|
| 223 |
+
result_type="broadcast",
|
| 224 |
+
)
|
| 225 |
+
m = list(range(len(float_frame.columns)))
|
| 226 |
+
expected = DataFrame(
|
| 227 |
+
[m] * len(float_frame.index),
|
| 228 |
+
dtype="float64",
|
| 229 |
+
index=float_frame.index,
|
| 230 |
+
columns=float_frame.columns,
|
| 231 |
+
)
|
| 232 |
+
tm.assert_frame_equal(result, expected)
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def test_apply_broadcast_lists_index(float_frame):
|
| 236 |
+
result = float_frame.apply(
|
| 237 |
+
lambda x: list(range(len(float_frame.index))), result_type="broadcast"
|
| 238 |
+
)
|
| 239 |
+
m = list(range(len(float_frame.index)))
|
| 240 |
+
expected = DataFrame(
|
| 241 |
+
{c: m for c in float_frame.columns},
|
| 242 |
+
dtype="float64",
|
| 243 |
+
index=float_frame.index,
|
| 244 |
+
)
|
| 245 |
+
tm.assert_frame_equal(result, expected)
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def test_apply_broadcast_list_lambda_func(int_frame_const_col):
|
| 249 |
+
# preserve columns
|
| 250 |
+
df = int_frame_const_col
|
| 251 |
+
result = df.apply(lambda x: [1, 2, 3], axis=1, result_type="broadcast")
|
| 252 |
+
tm.assert_frame_equal(result, df)
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
def test_apply_broadcast_series_lambda_func(int_frame_const_col):
|
| 256 |
+
df = int_frame_const_col
|
| 257 |
+
result = df.apply(
|
| 258 |
+
lambda x: Series([1, 2, 3], index=list("abc")),
|
| 259 |
+
axis=1,
|
| 260 |
+
result_type="broadcast",
|
| 261 |
+
)
|
| 262 |
+
expected = df.copy()
|
| 263 |
+
tm.assert_frame_equal(result, expected)
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
@pytest.mark.parametrize("axis", [0, 1])
|
| 267 |
+
def test_apply_raw_float_frame(float_frame, axis, engine):
|
| 268 |
+
if engine == "numba":
|
| 269 |
+
pytest.skip("numba can't handle when UDF returns None.")
|
| 270 |
+
|
| 271 |
+
def _assert_raw(x):
|
| 272 |
+
assert isinstance(x, np.ndarray)
|
| 273 |
+
assert x.ndim == 1
|
| 274 |
+
|
| 275 |
+
float_frame.apply(_assert_raw, axis=axis, engine=engine, raw=True)
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
@pytest.mark.parametrize("axis", [0, 1])
|
| 279 |
+
def test_apply_raw_float_frame_lambda(float_frame, axis, engine):
|
| 280 |
+
result = float_frame.apply(np.mean, axis=axis, engine=engine, raw=True)
|
| 281 |
+
expected = float_frame.apply(lambda x: x.values.mean(), axis=axis)
|
| 282 |
+
tm.assert_series_equal(result, expected)
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
def test_apply_raw_float_frame_no_reduction(float_frame, engine):
|
| 286 |
+
# no reduction
|
| 287 |
+
result = float_frame.apply(lambda x: x * 2, engine=engine, raw=True)
|
| 288 |
+
expected = float_frame * 2
|
| 289 |
+
tm.assert_frame_equal(result, expected)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
@pytest.mark.parametrize("axis", [0, 1])
|
| 293 |
+
def test_apply_raw_mixed_type_frame(axis, engine):
|
| 294 |
+
if engine == "numba":
|
| 295 |
+
pytest.skip("isinstance check doesn't work with numba")
|
| 296 |
+
|
| 297 |
+
def _assert_raw(x):
|
| 298 |
+
assert isinstance(x, np.ndarray)
|
| 299 |
+
assert x.ndim == 1
|
| 300 |
+
|
| 301 |
+
# Mixed dtype (GH-32423)
|
| 302 |
+
df = DataFrame(
|
| 303 |
+
{
|
| 304 |
+
"a": 1.0,
|
| 305 |
+
"b": 2,
|
| 306 |
+
"c": "foo",
|
| 307 |
+
"float32": np.array([1.0] * 10, dtype="float32"),
|
| 308 |
+
"int32": np.array([1] * 10, dtype="int32"),
|
| 309 |
+
},
|
| 310 |
+
index=np.arange(10),
|
| 311 |
+
)
|
| 312 |
+
df.apply(_assert_raw, axis=axis, engine=engine, raw=True)
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def test_apply_axis1(float_frame):
|
| 316 |
+
d = float_frame.index[0]
|
| 317 |
+
result = float_frame.apply(np.mean, axis=1)[d]
|
| 318 |
+
expected = np.mean(float_frame.xs(d))
|
| 319 |
+
assert result == expected
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
def test_apply_mixed_dtype_corner():
|
| 323 |
+
df = DataFrame({"A": ["foo"], "B": [1.0]})
|
| 324 |
+
result = df[:0].apply(np.mean, axis=1)
|
| 325 |
+
# the result here is actually kind of ambiguous, should it be a Series
|
| 326 |
+
# or a DataFrame?
|
| 327 |
+
expected = Series(np.nan, index=pd.Index([], dtype="int64"))
|
| 328 |
+
tm.assert_series_equal(result, expected)
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def test_apply_mixed_dtype_corner_indexing():
|
| 332 |
+
df = DataFrame({"A": ["foo"], "B": [1.0]})
|
| 333 |
+
result = df.apply(lambda x: x["A"], axis=1)
|
| 334 |
+
expected = Series(["foo"], index=[0])
|
| 335 |
+
tm.assert_series_equal(result, expected)
|
| 336 |
+
|
| 337 |
+
result = df.apply(lambda x: x["B"], axis=1)
|
| 338 |
+
expected = Series([1.0], index=[0])
|
| 339 |
+
tm.assert_series_equal(result, expected)
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
@pytest.mark.filterwarnings("ignore::RuntimeWarning")
|
| 343 |
+
@pytest.mark.parametrize("ax", ["index", "columns"])
|
| 344 |
+
@pytest.mark.parametrize(
|
| 345 |
+
"func", [lambda x: x, lambda x: x.mean()], ids=["identity", "mean"]
|
| 346 |
+
)
|
| 347 |
+
@pytest.mark.parametrize("raw", [True, False])
|
| 348 |
+
@pytest.mark.parametrize("axis", [0, 1])
|
| 349 |
+
def test_apply_empty_infer_type(ax, func, raw, axis, engine, request):
|
| 350 |
+
df = DataFrame(**{ax: ["a", "b", "c"]})
|
| 351 |
+
|
| 352 |
+
with np.errstate(all="ignore"):
|
| 353 |
+
test_res = func(np.array([], dtype="f8"))
|
| 354 |
+
is_reduction = not isinstance(test_res, np.ndarray)
|
| 355 |
+
|
| 356 |
+
result = df.apply(func, axis=axis, engine=engine, raw=raw)
|
| 357 |
+
if is_reduction:
|
| 358 |
+
agg_axis = df._get_agg_axis(axis)
|
| 359 |
+
assert isinstance(result, Series)
|
| 360 |
+
assert result.index is agg_axis
|
| 361 |
+
else:
|
| 362 |
+
assert isinstance(result, DataFrame)
|
| 363 |
+
|
| 364 |
+
|
| 365 |
+
def test_apply_empty_infer_type_broadcast():
|
| 366 |
+
no_cols = DataFrame(index=["a", "b", "c"])
|
| 367 |
+
result = no_cols.apply(lambda x: x.mean(), result_type="broadcast")
|
| 368 |
+
assert isinstance(result, DataFrame)
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
def test_apply_with_args_kwds_add_some(float_frame):
|
| 372 |
+
def add_some(x, howmuch=0):
|
| 373 |
+
return x + howmuch
|
| 374 |
+
|
| 375 |
+
result = float_frame.apply(add_some, howmuch=2)
|
| 376 |
+
expected = float_frame.apply(lambda x: x + 2)
|
| 377 |
+
tm.assert_frame_equal(result, expected)
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
def test_apply_with_args_kwds_agg_and_add(float_frame):
|
| 381 |
+
def agg_and_add(x, howmuch=0):
|
| 382 |
+
return x.mean() + howmuch
|
| 383 |
+
|
| 384 |
+
result = float_frame.apply(agg_and_add, howmuch=2)
|
| 385 |
+
expected = float_frame.apply(lambda x: x.mean() + 2)
|
| 386 |
+
tm.assert_series_equal(result, expected)
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
def test_apply_with_args_kwds_subtract_and_divide(float_frame):
|
| 390 |
+
def subtract_and_divide(x, sub, divide=1):
|
| 391 |
+
return (x - sub) / divide
|
| 392 |
+
|
| 393 |
+
result = float_frame.apply(subtract_and_divide, args=(2,), divide=2)
|
| 394 |
+
expected = float_frame.apply(lambda x: (x - 2.0) / 2.0)
|
| 395 |
+
tm.assert_frame_equal(result, expected)
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
def test_apply_yield_list(float_frame):
|
| 399 |
+
result = float_frame.apply(list)
|
| 400 |
+
tm.assert_frame_equal(result, float_frame)
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
def test_apply_reduce_Series(float_frame):
|
| 404 |
+
float_frame.iloc[::2, float_frame.columns.get_loc("A")] = np.nan
|
| 405 |
+
expected = float_frame.mean(1)
|
| 406 |
+
result = float_frame.apply(np.mean, axis=1)
|
| 407 |
+
tm.assert_series_equal(result, expected)
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
def test_apply_reduce_to_dict():
|
| 411 |
+
# GH 25196 37544
|
| 412 |
+
data = DataFrame([[1, 2], [3, 4]], columns=["c0", "c1"], index=["i0", "i1"])
|
| 413 |
+
|
| 414 |
+
result = data.apply(dict, axis=0)
|
| 415 |
+
expected = Series([{"i0": 1, "i1": 3}, {"i0": 2, "i1": 4}], index=data.columns)
|
| 416 |
+
tm.assert_series_equal(result, expected)
|
| 417 |
+
|
| 418 |
+
result = data.apply(dict, axis=1)
|
| 419 |
+
expected = Series([{"c0": 1, "c1": 2}, {"c0": 3, "c1": 4}], index=data.index)
|
| 420 |
+
tm.assert_series_equal(result, expected)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
def test_apply_differently_indexed():
|
| 424 |
+
df = DataFrame(np.random.default_rng(2).standard_normal((20, 10)))
|
| 425 |
+
|
| 426 |
+
result = df.apply(Series.describe, axis=0)
|
| 427 |
+
expected = DataFrame({i: v.describe() for i, v in df.items()}, columns=df.columns)
|
| 428 |
+
tm.assert_frame_equal(result, expected)
|
| 429 |
+
|
| 430 |
+
result = df.apply(Series.describe, axis=1)
|
| 431 |
+
expected = DataFrame({i: v.describe() for i, v in df.T.items()}, columns=df.index).T
|
| 432 |
+
tm.assert_frame_equal(result, expected)
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
def test_apply_bug():
|
| 436 |
+
# GH 6125
|
| 437 |
+
positions = DataFrame(
|
| 438 |
+
[
|
| 439 |
+
[1, "ABC0", 50],
|
| 440 |
+
[1, "YUM0", 20],
|
| 441 |
+
[1, "DEF0", 20],
|
| 442 |
+
[2, "ABC1", 50],
|
| 443 |
+
[2, "YUM1", 20],
|
| 444 |
+
[2, "DEF1", 20],
|
| 445 |
+
],
|
| 446 |
+
columns=["a", "market", "position"],
|
| 447 |
+
)
|
| 448 |
+
|
| 449 |
+
def f(r):
|
| 450 |
+
return r["market"]
|
| 451 |
+
|
| 452 |
+
expected = positions.apply(f, axis=1)
|
| 453 |
+
|
| 454 |
+
positions = DataFrame(
|
| 455 |
+
[
|
| 456 |
+
[datetime(2013, 1, 1), "ABC0", 50],
|
| 457 |
+
[datetime(2013, 1, 2), "YUM0", 20],
|
| 458 |
+
[datetime(2013, 1, 3), "DEF0", 20],
|
| 459 |
+
[datetime(2013, 1, 4), "ABC1", 50],
|
| 460 |
+
[datetime(2013, 1, 5), "YUM1", 20],
|
| 461 |
+
[datetime(2013, 1, 6), "DEF1", 20],
|
| 462 |
+
],
|
| 463 |
+
columns=["a", "market", "position"],
|
| 464 |
+
)
|
| 465 |
+
result = positions.apply(f, axis=1)
|
| 466 |
+
tm.assert_series_equal(result, expected)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def test_apply_convert_objects():
|
| 470 |
+
expected = DataFrame(
|
| 471 |
+
{
|
| 472 |
+
"A": [
|
| 473 |
+
"foo",
|
| 474 |
+
"foo",
|
| 475 |
+
"foo",
|
| 476 |
+
"foo",
|
| 477 |
+
"bar",
|
| 478 |
+
"bar",
|
| 479 |
+
"bar",
|
| 480 |
+
"bar",
|
| 481 |
+
"foo",
|
| 482 |
+
"foo",
|
| 483 |
+
"foo",
|
| 484 |
+
],
|
| 485 |
+
"B": [
|
| 486 |
+
"one",
|
| 487 |
+
"one",
|
| 488 |
+
"one",
|
| 489 |
+
"two",
|
| 490 |
+
"one",
|
| 491 |
+
"one",
|
| 492 |
+
"one",
|
| 493 |
+
"two",
|
| 494 |
+
"two",
|
| 495 |
+
"two",
|
| 496 |
+
"one",
|
| 497 |
+
],
|
| 498 |
+
"C": [
|
| 499 |
+
"dull",
|
| 500 |
+
"dull",
|
| 501 |
+
"shiny",
|
| 502 |
+
"dull",
|
| 503 |
+
"dull",
|
| 504 |
+
"shiny",
|
| 505 |
+
"shiny",
|
| 506 |
+
"dull",
|
| 507 |
+
"shiny",
|
| 508 |
+
"shiny",
|
| 509 |
+
"shiny",
|
| 510 |
+
],
|
| 511 |
+
"D": np.random.default_rng(2).standard_normal(11),
|
| 512 |
+
"E": np.random.default_rng(2).standard_normal(11),
|
| 513 |
+
"F": np.random.default_rng(2).standard_normal(11),
|
| 514 |
+
}
|
| 515 |
+
)
|
| 516 |
+
|
| 517 |
+
result = expected.apply(lambda x: x, axis=1)
|
| 518 |
+
tm.assert_frame_equal(result, expected)
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
def test_apply_attach_name(float_frame):
|
| 522 |
+
result = float_frame.apply(lambda x: x.name)
|
| 523 |
+
expected = Series(float_frame.columns, index=float_frame.columns)
|
| 524 |
+
tm.assert_series_equal(result, expected)
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
def test_apply_attach_name_axis1(float_frame):
|
| 528 |
+
result = float_frame.apply(lambda x: x.name, axis=1)
|
| 529 |
+
expected = Series(float_frame.index, index=float_frame.index)
|
| 530 |
+
tm.assert_series_equal(result, expected)
|
| 531 |
+
|
| 532 |
+
|
| 533 |
+
def test_apply_attach_name_non_reduction(float_frame):
|
| 534 |
+
# non-reductions
|
| 535 |
+
result = float_frame.apply(lambda x: np.repeat(x.name, len(x)))
|
| 536 |
+
expected = DataFrame(
|
| 537 |
+
np.tile(float_frame.columns, (len(float_frame.index), 1)),
|
| 538 |
+
index=float_frame.index,
|
| 539 |
+
columns=float_frame.columns,
|
| 540 |
+
)
|
| 541 |
+
tm.assert_frame_equal(result, expected)
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
def test_apply_attach_name_non_reduction_axis1(float_frame):
|
| 545 |
+
result = float_frame.apply(lambda x: np.repeat(x.name, len(x)), axis=1)
|
| 546 |
+
expected = Series(
|
| 547 |
+
np.repeat(t[0], len(float_frame.columns)) for t in float_frame.itertuples()
|
| 548 |
+
)
|
| 549 |
+
expected.index = float_frame.index
|
| 550 |
+
tm.assert_series_equal(result, expected)
|
| 551 |
+
|
| 552 |
+
|
| 553 |
+
def test_apply_multi_index():
|
| 554 |
+
index = MultiIndex.from_arrays([["a", "a", "b"], ["c", "d", "d"]])
|
| 555 |
+
s = DataFrame([[1, 2], [3, 4], [5, 6]], index=index, columns=["col1", "col2"])
|
| 556 |
+
result = s.apply(lambda x: Series({"min": min(x), "max": max(x)}), 1)
|
| 557 |
+
expected = DataFrame([[1, 2], [3, 4], [5, 6]], index=index, columns=["min", "max"])
|
| 558 |
+
tm.assert_frame_equal(result, expected, check_like=True)
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
@pytest.mark.parametrize(
|
| 562 |
+
"df, dicts",
|
| 563 |
+
[
|
| 564 |
+
[
|
| 565 |
+
DataFrame([["foo", "bar"], ["spam", "eggs"]]),
|
| 566 |
+
Series([{0: "foo", 1: "spam"}, {0: "bar", 1: "eggs"}]),
|
| 567 |
+
],
|
| 568 |
+
[DataFrame([[0, 1], [2, 3]]), Series([{0: 0, 1: 2}, {0: 1, 1: 3}])],
|
| 569 |
+
],
|
| 570 |
+
)
|
| 571 |
+
def test_apply_dict(df, dicts):
|
| 572 |
+
# GH 8735
|
| 573 |
+
fn = lambda x: x.to_dict()
|
| 574 |
+
reduce_true = df.apply(fn, result_type="reduce")
|
| 575 |
+
reduce_false = df.apply(fn, result_type="expand")
|
| 576 |
+
reduce_none = df.apply(fn)
|
| 577 |
+
|
| 578 |
+
tm.assert_series_equal(reduce_true, dicts)
|
| 579 |
+
tm.assert_frame_equal(reduce_false, df)
|
| 580 |
+
tm.assert_series_equal(reduce_none, dicts)
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
def test_apply_non_numpy_dtype():
|
| 584 |
+
# GH 12244
|
| 585 |
+
df = DataFrame({"dt": date_range("2015-01-01", periods=3, tz="Europe/Brussels")})
|
| 586 |
+
result = df.apply(lambda x: x)
|
| 587 |
+
tm.assert_frame_equal(result, df)
|
| 588 |
+
|
| 589 |
+
result = df.apply(lambda x: x + pd.Timedelta("1day"))
|
| 590 |
+
expected = DataFrame(
|
| 591 |
+
{"dt": date_range("2015-01-02", periods=3, tz="Europe/Brussels")}
|
| 592 |
+
)
|
| 593 |
+
tm.assert_frame_equal(result, expected)
|
| 594 |
+
|
| 595 |
+
|
| 596 |
+
def test_apply_non_numpy_dtype_category():
|
| 597 |
+
df = DataFrame({"dt": ["a", "b", "c", "a"]}, dtype="category")
|
| 598 |
+
result = df.apply(lambda x: x)
|
| 599 |
+
tm.assert_frame_equal(result, df)
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
def test_apply_dup_names_multi_agg():
|
| 603 |
+
# GH 21063
|
| 604 |
+
df = DataFrame([[0, 1], [2, 3]], columns=["a", "a"])
|
| 605 |
+
expected = DataFrame([[0, 1]], columns=["a", "a"], index=["min"])
|
| 606 |
+
result = df.agg(["min"])
|
| 607 |
+
|
| 608 |
+
tm.assert_frame_equal(result, expected)
|
| 609 |
+
|
| 610 |
+
|
| 611 |
+
@pytest.mark.parametrize("op", ["apply", "agg"])
|
| 612 |
+
def test_apply_nested_result_axis_1(op):
|
| 613 |
+
# GH 13820
|
| 614 |
+
def apply_list(row):
|
| 615 |
+
return [2 * row["A"], 2 * row["C"], 2 * row["B"]]
|
| 616 |
+
|
| 617 |
+
df = DataFrame(np.zeros((4, 4)), columns=list("ABCD"))
|
| 618 |
+
result = getattr(df, op)(apply_list, axis=1)
|
| 619 |
+
expected = Series(
|
| 620 |
+
[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]
|
| 621 |
+
)
|
| 622 |
+
tm.assert_series_equal(result, expected)
|
| 623 |
+
|
| 624 |
+
|
| 625 |
+
def test_apply_noreduction_tzaware_object():
|
| 626 |
+
# https://github.com/pandas-dev/pandas/issues/31505
|
| 627 |
+
expected = DataFrame(
|
| 628 |
+
{"foo": [Timestamp("2020", tz="UTC")]}, dtype="datetime64[ns, UTC]"
|
| 629 |
+
)
|
| 630 |
+
result = expected.apply(lambda x: x)
|
| 631 |
+
tm.assert_frame_equal(result, expected)
|
| 632 |
+
result = expected.apply(lambda x: x.copy())
|
| 633 |
+
tm.assert_frame_equal(result, expected)
|
| 634 |
+
|
| 635 |
+
|
| 636 |
+
def test_apply_function_runs_once():
|
| 637 |
+
# https://github.com/pandas-dev/pandas/issues/30815
|
| 638 |
+
|
| 639 |
+
df = DataFrame({"a": [1, 2, 3]})
|
| 640 |
+
names = [] # Save row names function is applied to
|
| 641 |
+
|
| 642 |
+
def reducing_function(row):
|
| 643 |
+
names.append(row.name)
|
| 644 |
+
|
| 645 |
+
def non_reducing_function(row):
|
| 646 |
+
names.append(row.name)
|
| 647 |
+
return row
|
| 648 |
+
|
| 649 |
+
for func in [reducing_function, non_reducing_function]:
|
| 650 |
+
del names[:]
|
| 651 |
+
|
| 652 |
+
df.apply(func, axis=1)
|
| 653 |
+
assert names == list(df.index)
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
def test_apply_raw_function_runs_once(engine):
|
| 657 |
+
# https://github.com/pandas-dev/pandas/issues/34506
|
| 658 |
+
if engine == "numba":
|
| 659 |
+
pytest.skip("appending to list outside of numba func is not supported")
|
| 660 |
+
|
| 661 |
+
df = DataFrame({"a": [1, 2, 3]})
|
| 662 |
+
values = [] # Save row values function is applied to
|
| 663 |
+
|
| 664 |
+
def reducing_function(row):
|
| 665 |
+
values.extend(row)
|
| 666 |
+
|
| 667 |
+
def non_reducing_function(row):
|
| 668 |
+
values.extend(row)
|
| 669 |
+
return row
|
| 670 |
+
|
| 671 |
+
for func in [reducing_function, non_reducing_function]:
|
| 672 |
+
del values[:]
|
| 673 |
+
|
| 674 |
+
df.apply(func, engine=engine, raw=True, axis=1)
|
| 675 |
+
assert values == list(df.a.to_list())
|
| 676 |
+
|
| 677 |
+
|
| 678 |
+
def test_apply_with_byte_string():
|
| 679 |
+
# GH 34529
|
| 680 |
+
df = DataFrame(np.array([b"abcd", b"efgh"]), columns=["col"])
|
| 681 |
+
expected = DataFrame(np.array([b"abcd", b"efgh"]), columns=["col"], dtype=object)
|
| 682 |
+
# After we make the apply we expect a dataframe just
|
| 683 |
+
# like the original but with the object datatype
|
| 684 |
+
result = df.apply(lambda x: x.astype("object"))
|
| 685 |
+
tm.assert_frame_equal(result, expected)
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
@pytest.mark.parametrize("val", ["asd", 12, None, np.nan])
|
| 689 |
+
def test_apply_category_equalness(val):
|
| 690 |
+
# Check if categorical comparisons on apply, GH 21239
|
| 691 |
+
df_values = ["asd", None, 12, "asd", "cde", np.nan]
|
| 692 |
+
df = DataFrame({"a": df_values}, dtype="category")
|
| 693 |
+
|
| 694 |
+
result = df.a.apply(lambda x: x == val)
|
| 695 |
+
expected = Series(
|
| 696 |
+
[np.nan if pd.isnull(x) else x == val for x in df_values], name="a"
|
| 697 |
+
)
|
| 698 |
+
tm.assert_series_equal(result, expected)
|
| 699 |
+
|
| 700 |
+
|
| 701 |
+
# the user has supplied an opaque UDF where
|
| 702 |
+
# they are transforming the input that requires
|
| 703 |
+
# us to infer the output
|
| 704 |
+
|
| 705 |
+
|
| 706 |
+
def test_infer_row_shape():
|
| 707 |
+
# GH 17437
|
| 708 |
+
# if row shape is changing, infer it
|
| 709 |
+
df = DataFrame(np.random.default_rng(2).random((10, 2)))
|
| 710 |
+
result = df.apply(np.fft.fft, axis=0).shape
|
| 711 |
+
assert result == (10, 2)
|
| 712 |
+
|
| 713 |
+
result = df.apply(np.fft.rfft, axis=0).shape
|
| 714 |
+
assert result == (6, 2)
|
| 715 |
+
|
| 716 |
+
|
| 717 |
+
@pytest.mark.parametrize(
|
| 718 |
+
"ops, by_row, expected",
|
| 719 |
+
[
|
| 720 |
+
({"a": lambda x: x + 1}, "compat", DataFrame({"a": [2, 3]})),
|
| 721 |
+
({"a": lambda x: x + 1}, False, DataFrame({"a": [2, 3]})),
|
| 722 |
+
({"a": lambda x: x.sum()}, "compat", Series({"a": 3})),
|
| 723 |
+
({"a": lambda x: x.sum()}, False, Series({"a": 3})),
|
| 724 |
+
(
|
| 725 |
+
{"a": ["sum", np.sum, lambda x: x.sum()]},
|
| 726 |
+
"compat",
|
| 727 |
+
DataFrame({"a": [3, 3, 3]}, index=["sum", "sum", "<lambda>"]),
|
| 728 |
+
),
|
| 729 |
+
(
|
| 730 |
+
{"a": ["sum", np.sum, lambda x: x.sum()]},
|
| 731 |
+
False,
|
| 732 |
+
DataFrame({"a": [3, 3, 3]}, index=["sum", "sum", "<lambda>"]),
|
| 733 |
+
),
|
| 734 |
+
({"a": lambda x: 1}, "compat", DataFrame({"a": [1, 1]})),
|
| 735 |
+
({"a": lambda x: 1}, False, Series({"a": 1})),
|
| 736 |
+
],
|
| 737 |
+
)
|
| 738 |
+
def test_dictlike_lambda(ops, by_row, expected):
|
| 739 |
+
# GH53601
|
| 740 |
+
df = DataFrame({"a": [1, 2]})
|
| 741 |
+
result = df.apply(ops, by_row=by_row)
|
| 742 |
+
tm.assert_equal(result, expected)
|
| 743 |
+
|
| 744 |
+
|
| 745 |
+
@pytest.mark.parametrize(
|
| 746 |
+
"ops",
|
| 747 |
+
[
|
| 748 |
+
{"a": lambda x: x + 1},
|
| 749 |
+
{"a": lambda x: x.sum()},
|
| 750 |
+
{"a": ["sum", np.sum, lambda x: x.sum()]},
|
| 751 |
+
{"a": lambda x: 1},
|
| 752 |
+
],
|
| 753 |
+
)
|
| 754 |
+
def test_dictlike_lambda_raises(ops):
|
| 755 |
+
# GH53601
|
| 756 |
+
df = DataFrame({"a": [1, 2]})
|
| 757 |
+
with pytest.raises(ValueError, match="by_row=True not allowed"):
|
| 758 |
+
df.apply(ops, by_row=True)
|
| 759 |
+
|
| 760 |
+
|
| 761 |
+
def test_with_dictlike_columns():
|
| 762 |
+
# GH 17602
|
| 763 |
+
df = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
|
| 764 |
+
result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1)
|
| 765 |
+
expected = Series([{"s": 3} for t in df.itertuples()])
|
| 766 |
+
tm.assert_series_equal(result, expected)
|
| 767 |
+
|
| 768 |
+
df["tm"] = [
|
| 769 |
+
Timestamp("2017-05-01 00:00:00"),
|
| 770 |
+
Timestamp("2017-05-02 00:00:00"),
|
| 771 |
+
]
|
| 772 |
+
result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1)
|
| 773 |
+
tm.assert_series_equal(result, expected)
|
| 774 |
+
|
| 775 |
+
# compose a series
|
| 776 |
+
result = (df["a"] + df["b"]).apply(lambda x: {"s": x})
|
| 777 |
+
expected = Series([{"s": 3}, {"s": 3}])
|
| 778 |
+
tm.assert_series_equal(result, expected)
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
def test_with_dictlike_columns_with_datetime():
|
| 782 |
+
# GH 18775
|
| 783 |
+
df = DataFrame()
|
| 784 |
+
df["author"] = ["X", "Y", "Z"]
|
| 785 |
+
df["publisher"] = ["BBC", "NBC", "N24"]
|
| 786 |
+
df["date"] = pd.to_datetime(
|
| 787 |
+
["17-10-2010 07:15:30", "13-05-2011 08:20:35", "15-01-2013 09:09:09"],
|
| 788 |
+
dayfirst=True,
|
| 789 |
+
)
|
| 790 |
+
result = df.apply(lambda x: {}, axis=1)
|
| 791 |
+
expected = Series([{}, {}, {}])
|
| 792 |
+
tm.assert_series_equal(result, expected)
|
| 793 |
+
|
| 794 |
+
|
| 795 |
+
def test_with_dictlike_columns_with_infer():
|
| 796 |
+
# GH 17602
|
| 797 |
+
df = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
|
| 798 |
+
result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1, result_type="expand")
|
| 799 |
+
expected = DataFrame({"s": [3, 3]})
|
| 800 |
+
tm.assert_frame_equal(result, expected)
|
| 801 |
+
|
| 802 |
+
df["tm"] = [
|
| 803 |
+
Timestamp("2017-05-01 00:00:00"),
|
| 804 |
+
Timestamp("2017-05-02 00:00:00"),
|
| 805 |
+
]
|
| 806 |
+
result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1, result_type="expand")
|
| 807 |
+
tm.assert_frame_equal(result, expected)
|
| 808 |
+
|
| 809 |
+
|
| 810 |
+
@pytest.mark.parametrize(
|
| 811 |
+
"ops, by_row, expected",
|
| 812 |
+
[
|
| 813 |
+
([lambda x: x + 1], "compat", DataFrame({("a", "<lambda>"): [2, 3]})),
|
| 814 |
+
([lambda x: x + 1], False, DataFrame({("a", "<lambda>"): [2, 3]})),
|
| 815 |
+
([lambda x: x.sum()], "compat", DataFrame({"a": [3]}, index=["<lambda>"])),
|
| 816 |
+
([lambda x: x.sum()], False, DataFrame({"a": [3]}, index=["<lambda>"])),
|
| 817 |
+
(
|
| 818 |
+
["sum", np.sum, lambda x: x.sum()],
|
| 819 |
+
"compat",
|
| 820 |
+
DataFrame({"a": [3, 3, 3]}, index=["sum", "sum", "<lambda>"]),
|
| 821 |
+
),
|
| 822 |
+
(
|
| 823 |
+
["sum", np.sum, lambda x: x.sum()],
|
| 824 |
+
False,
|
| 825 |
+
DataFrame({"a": [3, 3, 3]}, index=["sum", "sum", "<lambda>"]),
|
| 826 |
+
),
|
| 827 |
+
(
|
| 828 |
+
[lambda x: x + 1, lambda x: 3],
|
| 829 |
+
"compat",
|
| 830 |
+
DataFrame([[2, 3], [3, 3]], columns=[["a", "a"], ["<lambda>", "<lambda>"]]),
|
| 831 |
+
),
|
| 832 |
+
(
|
| 833 |
+
[lambda x: 2, lambda x: 3],
|
| 834 |
+
False,
|
| 835 |
+
DataFrame({"a": [2, 3]}, ["<lambda>", "<lambda>"]),
|
| 836 |
+
),
|
| 837 |
+
],
|
| 838 |
+
)
|
| 839 |
+
def test_listlike_lambda(ops, by_row, expected):
|
| 840 |
+
# GH53601
|
| 841 |
+
df = DataFrame({"a": [1, 2]})
|
| 842 |
+
result = df.apply(ops, by_row=by_row)
|
| 843 |
+
tm.assert_equal(result, expected)
|
| 844 |
+
|
| 845 |
+
|
| 846 |
+
@pytest.mark.parametrize(
|
| 847 |
+
"ops",
|
| 848 |
+
[
|
| 849 |
+
[lambda x: x + 1],
|
| 850 |
+
[lambda x: x.sum()],
|
| 851 |
+
["sum", np.sum, lambda x: x.sum()],
|
| 852 |
+
[lambda x: x + 1, lambda x: 3],
|
| 853 |
+
],
|
| 854 |
+
)
|
| 855 |
+
def test_listlike_lambda_raises(ops):
|
| 856 |
+
# GH53601
|
| 857 |
+
df = DataFrame({"a": [1, 2]})
|
| 858 |
+
with pytest.raises(ValueError, match="by_row=True not allowed"):
|
| 859 |
+
df.apply(ops, by_row=True)
|
| 860 |
+
|
| 861 |
+
|
| 862 |
+
def test_with_listlike_columns():
|
| 863 |
+
# GH 17348
|
| 864 |
+
df = DataFrame(
|
| 865 |
+
{
|
| 866 |
+
"a": Series(np.random.default_rng(2).standard_normal(4)),
|
| 867 |
+
"b": ["a", "list", "of", "words"],
|
| 868 |
+
"ts": date_range("2016-10-01", periods=4, freq="h"),
|
| 869 |
+
}
|
| 870 |
+
)
|
| 871 |
+
|
| 872 |
+
result = df[["a", "b"]].apply(tuple, axis=1)
|
| 873 |
+
expected = Series([t[1:] for t in df[["a", "b"]].itertuples()])
|
| 874 |
+
tm.assert_series_equal(result, expected)
|
| 875 |
+
|
| 876 |
+
result = df[["a", "ts"]].apply(tuple, axis=1)
|
| 877 |
+
expected = Series([t[1:] for t in df[["a", "ts"]].itertuples()])
|
| 878 |
+
tm.assert_series_equal(result, expected)
|
| 879 |
+
|
| 880 |
+
|
| 881 |
+
def test_with_listlike_columns_returning_list():
|
| 882 |
+
# GH 18919
|
| 883 |
+
df = DataFrame({"x": Series([["a", "b"], ["q"]]), "y": Series([["z"], ["q", "t"]])})
|
| 884 |
+
df.index = MultiIndex.from_tuples([("i0", "j0"), ("i1", "j1")])
|
| 885 |
+
|
| 886 |
+
result = df.apply(lambda row: [el for el in row["x"] if el in row["y"]], axis=1)
|
| 887 |
+
expected = Series([[], ["q"]], index=df.index)
|
| 888 |
+
tm.assert_series_equal(result, expected)
|
| 889 |
+
|
| 890 |
+
|
| 891 |
+
def test_infer_output_shape_columns():
|
| 892 |
+
# GH 18573
|
| 893 |
+
|
| 894 |
+
df = DataFrame(
|
| 895 |
+
{
|
| 896 |
+
"number": [1.0, 2.0],
|
| 897 |
+
"string": ["foo", "bar"],
|
| 898 |
+
"datetime": [
|
| 899 |
+
Timestamp("2017-11-29 03:30:00"),
|
| 900 |
+
Timestamp("2017-11-29 03:45:00"),
|
| 901 |
+
],
|
| 902 |
+
}
|
| 903 |
+
)
|
| 904 |
+
result = df.apply(lambda row: (row.number, row.string), axis=1)
|
| 905 |
+
expected = Series([(t.number, t.string) for t in df.itertuples()])
|
| 906 |
+
tm.assert_series_equal(result, expected)
|
| 907 |
+
|
| 908 |
+
|
| 909 |
+
def test_infer_output_shape_listlike_columns():
|
| 910 |
+
# GH 16353
|
| 911 |
+
|
| 912 |
+
df = DataFrame(
|
| 913 |
+
np.random.default_rng(2).standard_normal((6, 3)), columns=["A", "B", "C"]
|
| 914 |
+
)
|
| 915 |
+
|
| 916 |
+
result = df.apply(lambda x: [1, 2, 3], axis=1)
|
| 917 |
+
expected = Series([[1, 2, 3] for t in df.itertuples()])
|
| 918 |
+
tm.assert_series_equal(result, expected)
|
| 919 |
+
|
| 920 |
+
result = df.apply(lambda x: [1, 2], axis=1)
|
| 921 |
+
expected = Series([[1, 2] for t in df.itertuples()])
|
| 922 |
+
tm.assert_series_equal(result, expected)
|
| 923 |
+
|
| 924 |
+
|
| 925 |
+
@pytest.mark.parametrize("val", [1, 2])
|
| 926 |
+
def test_infer_output_shape_listlike_columns_np_func(val):
|
| 927 |
+
# GH 17970
|
| 928 |
+
df = DataFrame({"a": [1, 2, 3]}, index=list("abc"))
|
| 929 |
+
|
| 930 |
+
result = df.apply(lambda row: np.ones(val), axis=1)
|
| 931 |
+
expected = Series([np.ones(val) for t in df.itertuples()], index=df.index)
|
| 932 |
+
tm.assert_series_equal(result, expected)
|
| 933 |
+
|
| 934 |
+
|
| 935 |
+
def test_infer_output_shape_listlike_columns_with_timestamp():
|
| 936 |
+
# GH 17892
|
| 937 |
+
df = DataFrame(
|
| 938 |
+
{
|
| 939 |
+
"a": [
|
| 940 |
+
Timestamp("2010-02-01"),
|
| 941 |
+
Timestamp("2010-02-04"),
|
| 942 |
+
Timestamp("2010-02-05"),
|
| 943 |
+
Timestamp("2010-02-06"),
|
| 944 |
+
],
|
| 945 |
+
"b": [9, 5, 4, 3],
|
| 946 |
+
"c": [5, 3, 4, 2],
|
| 947 |
+
"d": [1, 2, 3, 4],
|
| 948 |
+
}
|
| 949 |
+
)
|
| 950 |
+
|
| 951 |
+
def fun(x):
|
| 952 |
+
return (1, 2)
|
| 953 |
+
|
| 954 |
+
result = df.apply(fun, axis=1)
|
| 955 |
+
expected = Series([(1, 2) for t in df.itertuples()])
|
| 956 |
+
tm.assert_series_equal(result, expected)
|
| 957 |
+
|
| 958 |
+
|
| 959 |
+
@pytest.mark.parametrize("lst", [[1, 2, 3], [1, 2]])
|
| 960 |
+
def test_consistent_coerce_for_shapes(lst):
|
| 961 |
+
# we want column names to NOT be propagated
|
| 962 |
+
# just because the shape matches the input shape
|
| 963 |
+
df = DataFrame(
|
| 964 |
+
np.random.default_rng(2).standard_normal((4, 3)), columns=["A", "B", "C"]
|
| 965 |
+
)
|
| 966 |
+
|
| 967 |
+
result = df.apply(lambda x: lst, axis=1)
|
| 968 |
+
expected = Series([lst for t in df.itertuples()])
|
| 969 |
+
tm.assert_series_equal(result, expected)
|
| 970 |
+
|
| 971 |
+
|
| 972 |
+
def test_consistent_names(int_frame_const_col):
|
| 973 |
+
# if a Series is returned, we should use the resulting index names
|
| 974 |
+
df = int_frame_const_col
|
| 975 |
+
|
| 976 |
+
result = df.apply(
|
| 977 |
+
lambda x: Series([1, 2, 3], index=["test", "other", "cols"]), axis=1
|
| 978 |
+
)
|
| 979 |
+
expected = int_frame_const_col.rename(
|
| 980 |
+
columns={"A": "test", "B": "other", "C": "cols"}
|
| 981 |
+
)
|
| 982 |
+
tm.assert_frame_equal(result, expected)
|
| 983 |
+
|
| 984 |
+
result = df.apply(lambda x: Series([1, 2], index=["test", "other"]), axis=1)
|
| 985 |
+
expected = expected[["test", "other"]]
|
| 986 |
+
tm.assert_frame_equal(result, expected)
|
| 987 |
+
|
| 988 |
+
|
| 989 |
+
def test_result_type(int_frame_const_col):
|
| 990 |
+
# result_type should be consistent no matter which
|
| 991 |
+
# path we take in the code
|
| 992 |
+
df = int_frame_const_col
|
| 993 |
+
|
| 994 |
+
result = df.apply(lambda x: [1, 2, 3], axis=1, result_type="expand")
|
| 995 |
+
expected = df.copy()
|
| 996 |
+
expected.columns = [0, 1, 2]
|
| 997 |
+
tm.assert_frame_equal(result, expected)
|
| 998 |
+
|
| 999 |
+
|
| 1000 |
+
def test_result_type_shorter_list(int_frame_const_col):
|
| 1001 |
+
# result_type should be consistent no matter which
|
| 1002 |
+
# path we take in the code
|
| 1003 |
+
df = int_frame_const_col
|
| 1004 |
+
result = df.apply(lambda x: [1, 2], axis=1, result_type="expand")
|
| 1005 |
+
expected = df[["A", "B"]].copy()
|
| 1006 |
+
expected.columns = [0, 1]
|
| 1007 |
+
tm.assert_frame_equal(result, expected)
|
| 1008 |
+
|
| 1009 |
+
|
| 1010 |
+
def test_result_type_broadcast(int_frame_const_col, request, engine):
|
| 1011 |
+
# result_type should be consistent no matter which
|
| 1012 |
+
# path we take in the code
|
| 1013 |
+
if engine == "numba":
|
| 1014 |
+
mark = pytest.mark.xfail(reason="numba engine doesn't support list return")
|
| 1015 |
+
request.node.add_marker(mark)
|
| 1016 |
+
df = int_frame_const_col
|
| 1017 |
+
# broadcast result
|
| 1018 |
+
result = df.apply(
|
| 1019 |
+
lambda x: [1, 2, 3], axis=1, result_type="broadcast", engine=engine
|
| 1020 |
+
)
|
| 1021 |
+
expected = df.copy()
|
| 1022 |
+
tm.assert_frame_equal(result, expected)
|
| 1023 |
+
|
| 1024 |
+
|
| 1025 |
+
def test_result_type_broadcast_series_func(int_frame_const_col, engine, request):
|
| 1026 |
+
# result_type should be consistent no matter which
|
| 1027 |
+
# path we take in the code
|
| 1028 |
+
if engine == "numba":
|
| 1029 |
+
mark = pytest.mark.xfail(
|
| 1030 |
+
reason="numba Series constructor only support ndarrays not list data"
|
| 1031 |
+
)
|
| 1032 |
+
request.node.add_marker(mark)
|
| 1033 |
+
df = int_frame_const_col
|
| 1034 |
+
columns = ["other", "col", "names"]
|
| 1035 |
+
result = df.apply(
|
| 1036 |
+
lambda x: Series([1, 2, 3], index=columns),
|
| 1037 |
+
axis=1,
|
| 1038 |
+
result_type="broadcast",
|
| 1039 |
+
engine=engine,
|
| 1040 |
+
)
|
| 1041 |
+
expected = df.copy()
|
| 1042 |
+
tm.assert_frame_equal(result, expected)
|
| 1043 |
+
|
| 1044 |
+
|
| 1045 |
+
def test_result_type_series_result(int_frame_const_col, engine, request):
|
| 1046 |
+
# result_type should be consistent no matter which
|
| 1047 |
+
# path we take in the code
|
| 1048 |
+
if engine == "numba":
|
| 1049 |
+
mark = pytest.mark.xfail(
|
| 1050 |
+
reason="numba Series constructor only support ndarrays not list data"
|
| 1051 |
+
)
|
| 1052 |
+
request.node.add_marker(mark)
|
| 1053 |
+
df = int_frame_const_col
|
| 1054 |
+
# series result
|
| 1055 |
+
result = df.apply(lambda x: Series([1, 2, 3], index=x.index), axis=1, engine=engine)
|
| 1056 |
+
expected = df.copy()
|
| 1057 |
+
tm.assert_frame_equal(result, expected)
|
| 1058 |
+
|
| 1059 |
+
|
| 1060 |
+
def test_result_type_series_result_other_index(int_frame_const_col, engine, request):
|
| 1061 |
+
# result_type should be consistent no matter which
|
| 1062 |
+
# path we take in the code
|
| 1063 |
+
|
| 1064 |
+
if engine == "numba":
|
| 1065 |
+
mark = pytest.mark.xfail(
|
| 1066 |
+
reason="no support in numba Series constructor for list of columns"
|
| 1067 |
+
)
|
| 1068 |
+
request.node.add_marker(mark)
|
| 1069 |
+
df = int_frame_const_col
|
| 1070 |
+
# series result with other index
|
| 1071 |
+
columns = ["other", "col", "names"]
|
| 1072 |
+
result = df.apply(lambda x: Series([1, 2, 3], index=columns), axis=1, engine=engine)
|
| 1073 |
+
expected = df.copy()
|
| 1074 |
+
expected.columns = columns
|
| 1075 |
+
tm.assert_frame_equal(result, expected)
|
| 1076 |
+
|
| 1077 |
+
|
| 1078 |
+
@pytest.mark.parametrize(
|
| 1079 |
+
"box",
|
| 1080 |
+
[lambda x: list(x), lambda x: tuple(x), lambda x: np.array(x, dtype="int64")],
|
| 1081 |
+
ids=["list", "tuple", "array"],
|
| 1082 |
+
)
|
| 1083 |
+
def test_consistency_for_boxed(box, int_frame_const_col):
|
| 1084 |
+
# passing an array or list should not affect the output shape
|
| 1085 |
+
df = int_frame_const_col
|
| 1086 |
+
|
| 1087 |
+
result = df.apply(lambda x: box([1, 2]), axis=1)
|
| 1088 |
+
expected = Series([box([1, 2]) for t in df.itertuples()])
|
| 1089 |
+
tm.assert_series_equal(result, expected)
|
| 1090 |
+
|
| 1091 |
+
result = df.apply(lambda x: box([1, 2]), axis=1, result_type="expand")
|
| 1092 |
+
expected = int_frame_const_col[["A", "B"]].rename(columns={"A": 0, "B": 1})
|
| 1093 |
+
tm.assert_frame_equal(result, expected)
|
| 1094 |
+
|
| 1095 |
+
|
| 1096 |
+
def test_agg_transform(axis, float_frame):
|
| 1097 |
+
other_axis = 1 if axis in {0, "index"} else 0
|
| 1098 |
+
|
| 1099 |
+
with np.errstate(all="ignore"):
|
| 1100 |
+
f_abs = np.abs(float_frame)
|
| 1101 |
+
f_sqrt = np.sqrt(float_frame)
|
| 1102 |
+
|
| 1103 |
+
# ufunc
|
| 1104 |
+
expected = f_sqrt.copy()
|
| 1105 |
+
result = float_frame.apply(np.sqrt, axis=axis)
|
| 1106 |
+
tm.assert_frame_equal(result, expected)
|
| 1107 |
+
|
| 1108 |
+
# list-like
|
| 1109 |
+
result = float_frame.apply([np.sqrt], axis=axis)
|
| 1110 |
+
expected = f_sqrt.copy()
|
| 1111 |
+
if axis in {0, "index"}:
|
| 1112 |
+
expected.columns = MultiIndex.from_product([float_frame.columns, ["sqrt"]])
|
| 1113 |
+
else:
|
| 1114 |
+
expected.index = MultiIndex.from_product([float_frame.index, ["sqrt"]])
|
| 1115 |
+
tm.assert_frame_equal(result, expected)
|
| 1116 |
+
|
| 1117 |
+
# multiple items in list
|
| 1118 |
+
# these are in the order as if we are applying both
|
| 1119 |
+
# functions per series and then concatting
|
| 1120 |
+
result = float_frame.apply([np.abs, np.sqrt], axis=axis)
|
| 1121 |
+
expected = zip_frames([f_abs, f_sqrt], axis=other_axis)
|
| 1122 |
+
if axis in {0, "index"}:
|
| 1123 |
+
expected.columns = MultiIndex.from_product(
|
| 1124 |
+
[float_frame.columns, ["absolute", "sqrt"]]
|
| 1125 |
+
)
|
| 1126 |
+
else:
|
| 1127 |
+
expected.index = MultiIndex.from_product(
|
| 1128 |
+
[float_frame.index, ["absolute", "sqrt"]]
|
| 1129 |
+
)
|
| 1130 |
+
tm.assert_frame_equal(result, expected)
|
| 1131 |
+
|
| 1132 |
+
|
| 1133 |
+
def test_demo():
|
| 1134 |
+
# demonstration tests
|
| 1135 |
+
df = DataFrame({"A": range(5), "B": 5})
|
| 1136 |
+
|
| 1137 |
+
result = df.agg(["min", "max"])
|
| 1138 |
+
expected = DataFrame(
|
| 1139 |
+
{"A": [0, 4], "B": [5, 5]}, columns=["A", "B"], index=["min", "max"]
|
| 1140 |
+
)
|
| 1141 |
+
tm.assert_frame_equal(result, expected)
|
| 1142 |
+
|
| 1143 |
+
|
| 1144 |
+
def test_demo_dict_agg():
|
| 1145 |
+
# demonstration tests
|
| 1146 |
+
df = DataFrame({"A": range(5), "B": 5})
|
| 1147 |
+
result = df.agg({"A": ["min", "max"], "B": ["sum", "max"]})
|
| 1148 |
+
expected = DataFrame(
|
| 1149 |
+
{"A": [4.0, 0.0, np.nan], "B": [5.0, np.nan, 25.0]},
|
| 1150 |
+
columns=["A", "B"],
|
| 1151 |
+
index=["max", "min", "sum"],
|
| 1152 |
+
)
|
| 1153 |
+
tm.assert_frame_equal(result.reindex_like(expected), expected)
|
| 1154 |
+
|
| 1155 |
+
|
| 1156 |
+
def test_agg_with_name_as_column_name():
|
| 1157 |
+
# GH 36212 - Column name is "name"
|
| 1158 |
+
data = {"name": ["foo", "bar"]}
|
| 1159 |
+
df = DataFrame(data)
|
| 1160 |
+
|
| 1161 |
+
# result's name should be None
|
| 1162 |
+
result = df.agg({"name": "count"})
|
| 1163 |
+
expected = Series({"name": 2})
|
| 1164 |
+
tm.assert_series_equal(result, expected)
|
| 1165 |
+
|
| 1166 |
+
# Check if name is still preserved when aggregating series instead
|
| 1167 |
+
result = df["name"].agg({"name": "count"})
|
| 1168 |
+
expected = Series({"name": 2}, name="name")
|
| 1169 |
+
tm.assert_series_equal(result, expected)
|
| 1170 |
+
|
| 1171 |
+
|
| 1172 |
+
def test_agg_multiple_mixed():
|
| 1173 |
+
# GH 20909
|
| 1174 |
+
mdf = DataFrame(
|
| 1175 |
+
{
|
| 1176 |
+
"A": [1, 2, 3],
|
| 1177 |
+
"B": [1.0, 2.0, 3.0],
|
| 1178 |
+
"C": ["foo", "bar", "baz"],
|
| 1179 |
+
}
|
| 1180 |
+
)
|
| 1181 |
+
expected = DataFrame(
|
| 1182 |
+
{
|
| 1183 |
+
"A": [1, 6],
|
| 1184 |
+
"B": [1.0, 6.0],
|
| 1185 |
+
"C": ["bar", "foobarbaz"],
|
| 1186 |
+
},
|
| 1187 |
+
index=["min", "sum"],
|
| 1188 |
+
)
|
| 1189 |
+
# sorted index
|
| 1190 |
+
result = mdf.agg(["min", "sum"])
|
| 1191 |
+
tm.assert_frame_equal(result, expected)
|
| 1192 |
+
|
| 1193 |
+
result = mdf[["C", "B", "A"]].agg(["sum", "min"])
|
| 1194 |
+
# GH40420: the result of .agg should have an index that is sorted
|
| 1195 |
+
# according to the arguments provided to agg.
|
| 1196 |
+
expected = expected[["C", "B", "A"]].reindex(["sum", "min"])
|
| 1197 |
+
tm.assert_frame_equal(result, expected)
|
| 1198 |
+
|
| 1199 |
+
|
| 1200 |
+
def test_agg_multiple_mixed_raises():
|
| 1201 |
+
# GH 20909
|
| 1202 |
+
mdf = DataFrame(
|
| 1203 |
+
{
|
| 1204 |
+
"A": [1, 2, 3],
|
| 1205 |
+
"B": [1.0, 2.0, 3.0],
|
| 1206 |
+
"C": ["foo", "bar", "baz"],
|
| 1207 |
+
"D": date_range("20130101", periods=3),
|
| 1208 |
+
}
|
| 1209 |
+
)
|
| 1210 |
+
|
| 1211 |
+
# sorted index
|
| 1212 |
+
msg = "does not support reduction"
|
| 1213 |
+
with pytest.raises(TypeError, match=msg):
|
| 1214 |
+
mdf.agg(["min", "sum"])
|
| 1215 |
+
|
| 1216 |
+
with pytest.raises(TypeError, match=msg):
|
| 1217 |
+
mdf[["D", "C", "B", "A"]].agg(["sum", "min"])
|
| 1218 |
+
|
| 1219 |
+
|
| 1220 |
+
def test_agg_reduce(axis, float_frame):
|
| 1221 |
+
other_axis = 1 if axis in {0, "index"} else 0
|
| 1222 |
+
name1, name2 = float_frame.axes[other_axis].unique()[:2].sort_values()
|
| 1223 |
+
|
| 1224 |
+
# all reducers
|
| 1225 |
+
expected = pd.concat(
|
| 1226 |
+
[
|
| 1227 |
+
float_frame.mean(axis=axis),
|
| 1228 |
+
float_frame.max(axis=axis),
|
| 1229 |
+
float_frame.sum(axis=axis),
|
| 1230 |
+
],
|
| 1231 |
+
axis=1,
|
| 1232 |
+
)
|
| 1233 |
+
expected.columns = ["mean", "max", "sum"]
|
| 1234 |
+
expected = expected.T if axis in {0, "index"} else expected
|
| 1235 |
+
|
| 1236 |
+
result = float_frame.agg(["mean", "max", "sum"], axis=axis)
|
| 1237 |
+
tm.assert_frame_equal(result, expected)
|
| 1238 |
+
|
| 1239 |
+
# dict input with scalars
|
| 1240 |
+
func = {name1: "mean", name2: "sum"}
|
| 1241 |
+
result = float_frame.agg(func, axis=axis)
|
| 1242 |
+
expected = Series(
|
| 1243 |
+
[
|
| 1244 |
+
float_frame.loc(other_axis)[name1].mean(),
|
| 1245 |
+
float_frame.loc(other_axis)[name2].sum(),
|
| 1246 |
+
],
|
| 1247 |
+
index=[name1, name2],
|
| 1248 |
+
)
|
| 1249 |
+
tm.assert_series_equal(result, expected)
|
| 1250 |
+
|
| 1251 |
+
# dict input with lists
|
| 1252 |
+
func = {name1: ["mean"], name2: ["sum"]}
|
| 1253 |
+
result = float_frame.agg(func, axis=axis)
|
| 1254 |
+
expected = DataFrame(
|
| 1255 |
+
{
|
| 1256 |
+
name1: Series([float_frame.loc(other_axis)[name1].mean()], index=["mean"]),
|
| 1257 |
+
name2: Series([float_frame.loc(other_axis)[name2].sum()], index=["sum"]),
|
| 1258 |
+
}
|
| 1259 |
+
)
|
| 1260 |
+
expected = expected.T if axis in {1, "columns"} else expected
|
| 1261 |
+
tm.assert_frame_equal(result, expected)
|
| 1262 |
+
|
| 1263 |
+
# dict input with lists with multiple
|
| 1264 |
+
func = {name1: ["mean", "sum"], name2: ["sum", "max"]}
|
| 1265 |
+
result = float_frame.agg(func, axis=axis)
|
| 1266 |
+
expected = pd.concat(
|
| 1267 |
+
{
|
| 1268 |
+
name1: Series(
|
| 1269 |
+
[
|
| 1270 |
+
float_frame.loc(other_axis)[name1].mean(),
|
| 1271 |
+
float_frame.loc(other_axis)[name1].sum(),
|
| 1272 |
+
],
|
| 1273 |
+
index=["mean", "sum"],
|
| 1274 |
+
),
|
| 1275 |
+
name2: Series(
|
| 1276 |
+
[
|
| 1277 |
+
float_frame.loc(other_axis)[name2].sum(),
|
| 1278 |
+
float_frame.loc(other_axis)[name2].max(),
|
| 1279 |
+
],
|
| 1280 |
+
index=["sum", "max"],
|
| 1281 |
+
),
|
| 1282 |
+
},
|
| 1283 |
+
axis=1,
|
| 1284 |
+
)
|
| 1285 |
+
expected = expected.T if axis in {1, "columns"} else expected
|
| 1286 |
+
tm.assert_frame_equal(result, expected)
|
| 1287 |
+
|
| 1288 |
+
|
| 1289 |
+
def test_nuiscance_columns():
|
| 1290 |
+
# GH 15015
|
| 1291 |
+
df = DataFrame(
|
| 1292 |
+
{
|
| 1293 |
+
"A": [1, 2, 3],
|
| 1294 |
+
"B": [1.0, 2.0, 3.0],
|
| 1295 |
+
"C": ["foo", "bar", "baz"],
|
| 1296 |
+
"D": date_range("20130101", periods=3),
|
| 1297 |
+
}
|
| 1298 |
+
)
|
| 1299 |
+
|
| 1300 |
+
result = df.agg("min")
|
| 1301 |
+
expected = Series([1, 1.0, "bar", Timestamp("20130101")], index=df.columns)
|
| 1302 |
+
tm.assert_series_equal(result, expected)
|
| 1303 |
+
|
| 1304 |
+
result = df.agg(["min"])
|
| 1305 |
+
expected = DataFrame(
|
| 1306 |
+
[[1, 1.0, "bar", Timestamp("20130101").as_unit("ns")]],
|
| 1307 |
+
index=["min"],
|
| 1308 |
+
columns=df.columns,
|
| 1309 |
+
)
|
| 1310 |
+
tm.assert_frame_equal(result, expected)
|
| 1311 |
+
|
| 1312 |
+
msg = "does not support reduction"
|
| 1313 |
+
with pytest.raises(TypeError, match=msg):
|
| 1314 |
+
df.agg("sum")
|
| 1315 |
+
|
| 1316 |
+
result = df[["A", "B", "C"]].agg("sum")
|
| 1317 |
+
expected = Series([6, 6.0, "foobarbaz"], index=["A", "B", "C"])
|
| 1318 |
+
tm.assert_series_equal(result, expected)
|
| 1319 |
+
|
| 1320 |
+
msg = "does not support reduction"
|
| 1321 |
+
with pytest.raises(TypeError, match=msg):
|
| 1322 |
+
df.agg(["sum"])
|
| 1323 |
+
|
| 1324 |
+
|
| 1325 |
+
@pytest.mark.parametrize("how", ["agg", "apply"])
|
| 1326 |
+
def test_non_callable_aggregates(how):
|
| 1327 |
+
# GH 16405
|
| 1328 |
+
# 'size' is a property of frame/series
|
| 1329 |
+
# validate that this is working
|
| 1330 |
+
# GH 39116 - expand to apply
|
| 1331 |
+
df = DataFrame(
|
| 1332 |
+
{"A": [None, 2, 3], "B": [1.0, np.nan, 3.0], "C": ["foo", None, "bar"]}
|
| 1333 |
+
)
|
| 1334 |
+
|
| 1335 |
+
# Function aggregate
|
| 1336 |
+
result = getattr(df, how)({"A": "count"})
|
| 1337 |
+
expected = Series({"A": 2})
|
| 1338 |
+
|
| 1339 |
+
tm.assert_series_equal(result, expected)
|
| 1340 |
+
|
| 1341 |
+
# Non-function aggregate
|
| 1342 |
+
result = getattr(df, how)({"A": "size"})
|
| 1343 |
+
expected = Series({"A": 3})
|
| 1344 |
+
|
| 1345 |
+
tm.assert_series_equal(result, expected)
|
| 1346 |
+
|
| 1347 |
+
# Mix function and non-function aggs
|
| 1348 |
+
result1 = getattr(df, how)(["count", "size"])
|
| 1349 |
+
result2 = getattr(df, how)(
|
| 1350 |
+
{"A": ["count", "size"], "B": ["count", "size"], "C": ["count", "size"]}
|
| 1351 |
+
)
|
| 1352 |
+
expected = DataFrame(
|
| 1353 |
+
{
|
| 1354 |
+
"A": {"count": 2, "size": 3},
|
| 1355 |
+
"B": {"count": 2, "size": 3},
|
| 1356 |
+
"C": {"count": 2, "size": 3},
|
| 1357 |
+
}
|
| 1358 |
+
)
|
| 1359 |
+
|
| 1360 |
+
tm.assert_frame_equal(result1, result2, check_like=True)
|
| 1361 |
+
tm.assert_frame_equal(result2, expected, check_like=True)
|
| 1362 |
+
|
| 1363 |
+
# Just functional string arg is same as calling df.arg()
|
| 1364 |
+
result = getattr(df, how)("count")
|
| 1365 |
+
expected = df.count()
|
| 1366 |
+
|
| 1367 |
+
tm.assert_series_equal(result, expected)
|
| 1368 |
+
|
| 1369 |
+
|
| 1370 |
+
@pytest.mark.parametrize("how", ["agg", "apply"])
|
| 1371 |
+
def test_size_as_str(how, axis):
|
| 1372 |
+
# GH 39934
|
| 1373 |
+
df = DataFrame(
|
| 1374 |
+
{"A": [None, 2, 3], "B": [1.0, np.nan, 3.0], "C": ["foo", None, "bar"]}
|
| 1375 |
+
)
|
| 1376 |
+
# Just a string attribute arg same as calling df.arg
|
| 1377 |
+
# on the columns
|
| 1378 |
+
result = getattr(df, how)("size", axis=axis)
|
| 1379 |
+
if axis in (0, "index"):
|
| 1380 |
+
expected = Series(df.shape[0], index=df.columns)
|
| 1381 |
+
else:
|
| 1382 |
+
expected = Series(df.shape[1], index=df.index)
|
| 1383 |
+
tm.assert_series_equal(result, expected)
|
| 1384 |
+
|
| 1385 |
+
|
| 1386 |
+
def test_agg_listlike_result():
|
| 1387 |
+
# GH-29587 user defined function returning list-likes
|
| 1388 |
+
df = DataFrame({"A": [2, 2, 3], "B": [1.5, np.nan, 1.5], "C": ["foo", None, "bar"]})
|
| 1389 |
+
|
| 1390 |
+
def func(group_col):
|
| 1391 |
+
return list(group_col.dropna().unique())
|
| 1392 |
+
|
| 1393 |
+
result = df.agg(func)
|
| 1394 |
+
expected = Series([[2, 3], [1.5], ["foo", "bar"]], index=["A", "B", "C"])
|
| 1395 |
+
tm.assert_series_equal(result, expected)
|
| 1396 |
+
|
| 1397 |
+
result = df.agg([func])
|
| 1398 |
+
expected = expected.to_frame("func").T
|
| 1399 |
+
tm.assert_frame_equal(result, expected)
|
| 1400 |
+
|
| 1401 |
+
|
| 1402 |
+
@pytest.mark.parametrize("axis", [0, 1])
|
| 1403 |
+
@pytest.mark.parametrize(
|
| 1404 |
+
"args, kwargs",
|
| 1405 |
+
[
|
| 1406 |
+
((1, 2, 3), {}),
|
| 1407 |
+
((8, 7, 15), {}),
|
| 1408 |
+
((1, 2), {}),
|
| 1409 |
+
((1,), {"b": 2}),
|
| 1410 |
+
((), {"a": 1, "b": 2}),
|
| 1411 |
+
((), {"a": 2, "b": 1}),
|
| 1412 |
+
((), {"a": 1, "b": 2, "c": 3}),
|
| 1413 |
+
],
|
| 1414 |
+
)
|
| 1415 |
+
def test_agg_args_kwargs(axis, args, kwargs):
|
| 1416 |
+
def f(x, a, b, c=3):
|
| 1417 |
+
return x.sum() + (a + b) / c
|
| 1418 |
+
|
| 1419 |
+
df = DataFrame([[1, 2], [3, 4]])
|
| 1420 |
+
|
| 1421 |
+
if axis == 0:
|
| 1422 |
+
expected = Series([5.0, 7.0])
|
| 1423 |
+
else:
|
| 1424 |
+
expected = Series([4.0, 8.0])
|
| 1425 |
+
|
| 1426 |
+
result = df.agg(f, axis, *args, **kwargs)
|
| 1427 |
+
|
| 1428 |
+
tm.assert_series_equal(result, expected)
|
| 1429 |
+
|
| 1430 |
+
|
| 1431 |
+
@pytest.mark.parametrize("num_cols", [2, 3, 5])
|
| 1432 |
+
def test_frequency_is_original(num_cols, engine, request):
|
| 1433 |
+
# GH 22150
|
| 1434 |
+
if engine == "numba":
|
| 1435 |
+
mark = pytest.mark.xfail(reason="numba engine only supports numeric indices")
|
| 1436 |
+
request.node.add_marker(mark)
|
| 1437 |
+
index = pd.DatetimeIndex(["1950-06-30", "1952-10-24", "1953-05-29"])
|
| 1438 |
+
original = index.copy()
|
| 1439 |
+
df = DataFrame(1, index=index, columns=range(num_cols))
|
| 1440 |
+
df.apply(lambda x: x, engine=engine)
|
| 1441 |
+
assert index.freq == original.freq
|
| 1442 |
+
|
| 1443 |
+
|
| 1444 |
+
def test_apply_datetime_tz_issue(engine, request):
|
| 1445 |
+
# GH 29052
|
| 1446 |
+
|
| 1447 |
+
if engine == "numba":
|
| 1448 |
+
mark = pytest.mark.xfail(
|
| 1449 |
+
reason="numba engine doesn't support non-numeric indexes"
|
| 1450 |
+
)
|
| 1451 |
+
request.node.add_marker(mark)
|
| 1452 |
+
|
| 1453 |
+
timestamps = [
|
| 1454 |
+
Timestamp("2019-03-15 12:34:31.909000+0000", tz="UTC"),
|
| 1455 |
+
Timestamp("2019-03-15 12:34:34.359000+0000", tz="UTC"),
|
| 1456 |
+
Timestamp("2019-03-15 12:34:34.660000+0000", tz="UTC"),
|
| 1457 |
+
]
|
| 1458 |
+
df = DataFrame(data=[0, 1, 2], index=timestamps)
|
| 1459 |
+
result = df.apply(lambda x: x.name, axis=1, engine=engine)
|
| 1460 |
+
expected = Series(index=timestamps, data=timestamps)
|
| 1461 |
+
|
| 1462 |
+
tm.assert_series_equal(result, expected)
|
| 1463 |
+
|
| 1464 |
+
|
| 1465 |
+
@pytest.mark.parametrize("df", [DataFrame({"A": ["a", None], "B": ["c", "d"]})])
|
| 1466 |
+
@pytest.mark.parametrize("method", ["min", "max", "sum"])
|
| 1467 |
+
def test_mixed_column_raises(df, method, using_infer_string):
|
| 1468 |
+
# GH 16832
|
| 1469 |
+
if method == "sum":
|
| 1470 |
+
msg = r'can only concatenate str \(not "int"\) to str|does not support'
|
| 1471 |
+
else:
|
| 1472 |
+
msg = "not supported between instances of 'str' and 'float'"
|
| 1473 |
+
if not using_infer_string:
|
| 1474 |
+
with pytest.raises(TypeError, match=msg):
|
| 1475 |
+
getattr(df, method)()
|
| 1476 |
+
else:
|
| 1477 |
+
getattr(df, method)()
|
| 1478 |
+
|
| 1479 |
+
|
| 1480 |
+
@pytest.mark.parametrize("col", [1, 1.0, True, "a", np.nan])
|
| 1481 |
+
def test_apply_dtype(col):
|
| 1482 |
+
# GH 31466
|
| 1483 |
+
df = DataFrame([[1.0, col]], columns=["a", "b"])
|
| 1484 |
+
result = df.apply(lambda x: x.dtype)
|
| 1485 |
+
expected = df.dtypes
|
| 1486 |
+
|
| 1487 |
+
tm.assert_series_equal(result, expected)
|
| 1488 |
+
|
| 1489 |
+
|
| 1490 |
+
def test_apply_mutating(using_array_manager, using_copy_on_write, warn_copy_on_write):
|
| 1491 |
+
# GH#35462 case where applied func pins a new BlockManager to a row
|
| 1492 |
+
df = DataFrame({"a": range(100), "b": range(100, 200)})
|
| 1493 |
+
df_orig = df.copy()
|
| 1494 |
+
|
| 1495 |
+
def func(row):
|
| 1496 |
+
mgr = row._mgr
|
| 1497 |
+
row.loc["a"] += 1
|
| 1498 |
+
assert row._mgr is not mgr
|
| 1499 |
+
return row
|
| 1500 |
+
|
| 1501 |
+
expected = df.copy()
|
| 1502 |
+
expected["a"] += 1
|
| 1503 |
+
|
| 1504 |
+
with tm.assert_cow_warning(warn_copy_on_write):
|
| 1505 |
+
result = df.apply(func, axis=1)
|
| 1506 |
+
|
| 1507 |
+
tm.assert_frame_equal(result, expected)
|
| 1508 |
+
if using_copy_on_write or using_array_manager:
|
| 1509 |
+
# INFO(CoW) With copy on write, mutating a viewing row doesn't mutate the parent
|
| 1510 |
+
# INFO(ArrayManager) With BlockManager, the row is a view and mutated in place,
|
| 1511 |
+
# with ArrayManager the row is not a view, and thus not mutated in place
|
| 1512 |
+
tm.assert_frame_equal(df, df_orig)
|
| 1513 |
+
else:
|
| 1514 |
+
tm.assert_frame_equal(df, result)
|
| 1515 |
+
|
| 1516 |
+
|
| 1517 |
+
def test_apply_empty_list_reduce():
|
| 1518 |
+
# GH#35683 get columns correct
|
| 1519 |
+
df = DataFrame([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]], columns=["a", "b"])
|
| 1520 |
+
|
| 1521 |
+
result = df.apply(lambda x: [], result_type="reduce")
|
| 1522 |
+
expected = Series({"a": [], "b": []}, dtype=object)
|
| 1523 |
+
tm.assert_series_equal(result, expected)
|
| 1524 |
+
|
| 1525 |
+
|
| 1526 |
+
def test_apply_no_suffix_index(engine, request):
|
| 1527 |
+
# GH36189
|
| 1528 |
+
if engine == "numba":
|
| 1529 |
+
mark = pytest.mark.xfail(
|
| 1530 |
+
reason="numba engine doesn't support list-likes/dict-like callables"
|
| 1531 |
+
)
|
| 1532 |
+
request.node.add_marker(mark)
|
| 1533 |
+
pdf = DataFrame([[4, 9]] * 3, columns=["A", "B"])
|
| 1534 |
+
result = pdf.apply(["sum", lambda x: x.sum(), lambda x: x.sum()], engine=engine)
|
| 1535 |
+
expected = DataFrame(
|
| 1536 |
+
{"A": [12, 12, 12], "B": [27, 27, 27]}, index=["sum", "<lambda>", "<lambda>"]
|
| 1537 |
+
)
|
| 1538 |
+
|
| 1539 |
+
tm.assert_frame_equal(result, expected)
|
| 1540 |
+
|
| 1541 |
+
|
| 1542 |
+
def test_apply_raw_returns_string(engine):
|
| 1543 |
+
# https://github.com/pandas-dev/pandas/issues/35940
|
| 1544 |
+
if engine == "numba":
|
| 1545 |
+
pytest.skip("No object dtype support in numba")
|
| 1546 |
+
df = DataFrame({"A": ["aa", "bbb"]})
|
| 1547 |
+
result = df.apply(lambda x: x[0], engine=engine, axis=1, raw=True)
|
| 1548 |
+
expected = Series(["aa", "bbb"])
|
| 1549 |
+
tm.assert_series_equal(result, expected)
|
| 1550 |
+
|
| 1551 |
+
|
| 1552 |
+
def test_aggregation_func_column_order():
|
| 1553 |
+
# GH40420: the result of .agg should have an index that is sorted
|
| 1554 |
+
# according to the arguments provided to agg.
|
| 1555 |
+
df = DataFrame(
|
| 1556 |
+
[
|
| 1557 |
+
(1, 0, 0),
|
| 1558 |
+
(2, 0, 0),
|
| 1559 |
+
(3, 0, 0),
|
| 1560 |
+
(4, 5, 4),
|
| 1561 |
+
(5, 6, 6),
|
| 1562 |
+
(6, 7, 7),
|
| 1563 |
+
],
|
| 1564 |
+
columns=("att1", "att2", "att3"),
|
| 1565 |
+
)
|
| 1566 |
+
|
| 1567 |
+
def sum_div2(s):
|
| 1568 |
+
return s.sum() / 2
|
| 1569 |
+
|
| 1570 |
+
aggs = ["sum", sum_div2, "count", "min"]
|
| 1571 |
+
result = df.agg(aggs)
|
| 1572 |
+
expected = DataFrame(
|
| 1573 |
+
{
|
| 1574 |
+
"att1": [21.0, 10.5, 6.0, 1.0],
|
| 1575 |
+
"att2": [18.0, 9.0, 6.0, 0.0],
|
| 1576 |
+
"att3": [17.0, 8.5, 6.0, 0.0],
|
| 1577 |
+
},
|
| 1578 |
+
index=["sum", "sum_div2", "count", "min"],
|
| 1579 |
+
)
|
| 1580 |
+
tm.assert_frame_equal(result, expected)
|
| 1581 |
+
|
| 1582 |
+
|
| 1583 |
+
def test_apply_getitem_axis_1(engine, request):
|
| 1584 |
+
# GH 13427
|
| 1585 |
+
if engine == "numba":
|
| 1586 |
+
mark = pytest.mark.xfail(
|
| 1587 |
+
reason="numba engine not supporting duplicate index values"
|
| 1588 |
+
)
|
| 1589 |
+
request.node.add_marker(mark)
|
| 1590 |
+
df = DataFrame({"a": [0, 1, 2], "b": [1, 2, 3]})
|
| 1591 |
+
result = df[["a", "a"]].apply(
|
| 1592 |
+
lambda x: x.iloc[0] + x.iloc[1], axis=1, engine=engine
|
| 1593 |
+
)
|
| 1594 |
+
expected = Series([0, 2, 4])
|
| 1595 |
+
tm.assert_series_equal(result, expected)
|
| 1596 |
+
|
| 1597 |
+
|
| 1598 |
+
def test_nuisance_depr_passes_through_warnings():
|
| 1599 |
+
# GH 43740
|
| 1600 |
+
# DataFrame.agg with list-likes may emit warnings for both individual
|
| 1601 |
+
# args and for entire columns, but we only want to emit once. We
|
| 1602 |
+
# catch and suppress the warnings for individual args, but need to make
|
| 1603 |
+
# sure if some other warnings were raised, they get passed through to
|
| 1604 |
+
# the user.
|
| 1605 |
+
|
| 1606 |
+
def expected_warning(x):
|
| 1607 |
+
warnings.warn("Hello, World!")
|
| 1608 |
+
return x.sum()
|
| 1609 |
+
|
| 1610 |
+
df = DataFrame({"a": [1, 2, 3]})
|
| 1611 |
+
with tm.assert_produces_warning(UserWarning, match="Hello, World!"):
|
| 1612 |
+
df.agg([expected_warning])
|
| 1613 |
+
|
| 1614 |
+
|
| 1615 |
+
def test_apply_type():
|
| 1616 |
+
# GH 46719
|
| 1617 |
+
df = DataFrame(
|
| 1618 |
+
{"col1": [3, "string", float], "col2": [0.25, datetime(2020, 1, 1), np.nan]},
|
| 1619 |
+
index=["a", "b", "c"],
|
| 1620 |
+
)
|
| 1621 |
+
|
| 1622 |
+
# axis=0
|
| 1623 |
+
result = df.apply(type, axis=0)
|
| 1624 |
+
expected = Series({"col1": Series, "col2": Series})
|
| 1625 |
+
tm.assert_series_equal(result, expected)
|
| 1626 |
+
|
| 1627 |
+
# axis=1
|
| 1628 |
+
result = df.apply(type, axis=1)
|
| 1629 |
+
expected = Series({"a": Series, "b": Series, "c": Series})
|
| 1630 |
+
tm.assert_series_equal(result, expected)
|
| 1631 |
+
|
| 1632 |
+
|
| 1633 |
+
def test_apply_on_empty_dataframe(engine):
|
| 1634 |
+
# GH 39111
|
| 1635 |
+
df = DataFrame({"a": [1, 2], "b": [3, 0]})
|
| 1636 |
+
result = df.head(0).apply(lambda x: max(x["a"], x["b"]), axis=1, engine=engine)
|
| 1637 |
+
expected = Series([], dtype=np.float64)
|
| 1638 |
+
tm.assert_series_equal(result, expected)
|
| 1639 |
+
|
| 1640 |
+
|
| 1641 |
+
def test_apply_return_list():
|
| 1642 |
+
df = DataFrame({"a": [1, 2], "b": [2, 3]})
|
| 1643 |
+
result = df.apply(lambda x: [x.values])
|
| 1644 |
+
expected = DataFrame({"a": [[1, 2]], "b": [[2, 3]]})
|
| 1645 |
+
tm.assert_frame_equal(result, expected)
|
| 1646 |
+
|
| 1647 |
+
|
| 1648 |
+
@pytest.mark.parametrize(
|
| 1649 |
+
"test, constant",
|
| 1650 |
+
[
|
| 1651 |
+
({"a": [1, 2, 3], "b": [1, 1, 1]}, {"a": [1, 2, 3], "b": [1]}),
|
| 1652 |
+
({"a": [2, 2, 2], "b": [1, 1, 1]}, {"a": [2], "b": [1]}),
|
| 1653 |
+
],
|
| 1654 |
+
)
|
| 1655 |
+
def test_unique_agg_type_is_series(test, constant):
|
| 1656 |
+
# GH#22558
|
| 1657 |
+
df1 = DataFrame(test)
|
| 1658 |
+
expected = Series(data=constant, index=["a", "b"], dtype="object")
|
| 1659 |
+
aggregation = {"a": "unique", "b": "unique"}
|
| 1660 |
+
|
| 1661 |
+
result = df1.agg(aggregation)
|
| 1662 |
+
|
| 1663 |
+
tm.assert_series_equal(result, expected)
|
| 1664 |
+
|
| 1665 |
+
|
| 1666 |
+
def test_any_apply_keyword_non_zero_axis_regression():
|
| 1667 |
+
# https://github.com/pandas-dev/pandas/issues/48656
|
| 1668 |
+
df = DataFrame({"A": [1, 2, 0], "B": [0, 2, 0], "C": [0, 0, 0]})
|
| 1669 |
+
expected = Series([True, True, False])
|
| 1670 |
+
tm.assert_series_equal(df.any(axis=1), expected)
|
| 1671 |
+
|
| 1672 |
+
result = df.apply("any", axis=1)
|
| 1673 |
+
tm.assert_series_equal(result, expected)
|
| 1674 |
+
|
| 1675 |
+
result = df.apply("any", 1)
|
| 1676 |
+
tm.assert_series_equal(result, expected)
|
| 1677 |
+
|
| 1678 |
+
|
| 1679 |
+
def test_agg_mapping_func_deprecated():
|
| 1680 |
+
# GH 53325
|
| 1681 |
+
df = DataFrame({"x": [1, 2, 3]})
|
| 1682 |
+
|
| 1683 |
+
def foo1(x, a=1, c=0):
|
| 1684 |
+
return x + a + c
|
| 1685 |
+
|
| 1686 |
+
def foo2(x, b=2, c=0):
|
| 1687 |
+
return x + b + c
|
| 1688 |
+
|
| 1689 |
+
# single func already takes the vectorized path
|
| 1690 |
+
result = df.agg(foo1, 0, 3, c=4)
|
| 1691 |
+
expected = df + 7
|
| 1692 |
+
tm.assert_frame_equal(result, expected)
|
| 1693 |
+
|
| 1694 |
+
msg = "using .+ in Series.agg cannot aggregate and"
|
| 1695 |
+
|
| 1696 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 1697 |
+
result = df.agg([foo1, foo2], 0, 3, c=4)
|
| 1698 |
+
expected = DataFrame(
|
| 1699 |
+
[[8, 8], [9, 9], [10, 10]], columns=[["x", "x"], ["foo1", "foo2"]]
|
| 1700 |
+
)
|
| 1701 |
+
tm.assert_frame_equal(result, expected)
|
| 1702 |
+
|
| 1703 |
+
# TODO: the result below is wrong, should be fixed (GH53325)
|
| 1704 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 1705 |
+
result = df.agg({"x": foo1}, 0, 3, c=4)
|
| 1706 |
+
expected = DataFrame([2, 3, 4], columns=["x"])
|
| 1707 |
+
tm.assert_frame_equal(result, expected)
|
| 1708 |
+
|
| 1709 |
+
|
| 1710 |
+
def test_agg_std():
|
| 1711 |
+
df = DataFrame(np.arange(6).reshape(3, 2), columns=["A", "B"])
|
| 1712 |
+
|
| 1713 |
+
with tm.assert_produces_warning(FutureWarning, match="using DataFrame.std"):
|
| 1714 |
+
result = df.agg(np.std)
|
| 1715 |
+
expected = Series({"A": 2.0, "B": 2.0}, dtype=float)
|
| 1716 |
+
tm.assert_series_equal(result, expected)
|
| 1717 |
+
|
| 1718 |
+
with tm.assert_produces_warning(FutureWarning, match="using Series.std"):
|
| 1719 |
+
result = df.agg([np.std])
|
| 1720 |
+
expected = DataFrame({"A": 2.0, "B": 2.0}, index=["std"])
|
| 1721 |
+
tm.assert_frame_equal(result, expected)
|
| 1722 |
+
|
| 1723 |
+
|
| 1724 |
+
def test_agg_dist_like_and_nonunique_columns():
|
| 1725 |
+
# GH#51099
|
| 1726 |
+
df = DataFrame(
|
| 1727 |
+
{"A": [None, 2, 3], "B": [1.0, np.nan, 3.0], "C": ["foo", None, "bar"]}
|
| 1728 |
+
)
|
| 1729 |
+
df.columns = ["A", "A", "C"]
|
| 1730 |
+
|
| 1731 |
+
result = df.agg({"A": "count"})
|
| 1732 |
+
expected = df["A"].count()
|
| 1733 |
+
tm.assert_series_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply_relabeling.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
from pandas.compat.numpy import np_version_gte1p25
|
| 5 |
+
|
| 6 |
+
import pandas as pd
|
| 7 |
+
import pandas._testing as tm
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def test_agg_relabel():
|
| 11 |
+
# GH 26513
|
| 12 |
+
df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4], "C": [3, 4, 5, 6]})
|
| 13 |
+
|
| 14 |
+
# simplest case with one column, one func
|
| 15 |
+
result = df.agg(foo=("B", "sum"))
|
| 16 |
+
expected = pd.DataFrame({"B": [10]}, index=pd.Index(["foo"]))
|
| 17 |
+
tm.assert_frame_equal(result, expected)
|
| 18 |
+
|
| 19 |
+
# test on same column with different methods
|
| 20 |
+
result = df.agg(foo=("B", "sum"), bar=("B", "min"))
|
| 21 |
+
expected = pd.DataFrame({"B": [10, 1]}, index=pd.Index(["foo", "bar"]))
|
| 22 |
+
|
| 23 |
+
tm.assert_frame_equal(result, expected)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def test_agg_relabel_multi_columns_multi_methods():
|
| 27 |
+
# GH 26513, test on multiple columns with multiple methods
|
| 28 |
+
df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4], "C": [3, 4, 5, 6]})
|
| 29 |
+
result = df.agg(
|
| 30 |
+
foo=("A", "sum"),
|
| 31 |
+
bar=("B", "mean"),
|
| 32 |
+
cat=("A", "min"),
|
| 33 |
+
dat=("B", "max"),
|
| 34 |
+
f=("A", "max"),
|
| 35 |
+
g=("C", "min"),
|
| 36 |
+
)
|
| 37 |
+
expected = pd.DataFrame(
|
| 38 |
+
{
|
| 39 |
+
"A": [6.0, np.nan, 1.0, np.nan, 2.0, np.nan],
|
| 40 |
+
"B": [np.nan, 2.5, np.nan, 4.0, np.nan, np.nan],
|
| 41 |
+
"C": [np.nan, np.nan, np.nan, np.nan, np.nan, 3.0],
|
| 42 |
+
},
|
| 43 |
+
index=pd.Index(["foo", "bar", "cat", "dat", "f", "g"]),
|
| 44 |
+
)
|
| 45 |
+
tm.assert_frame_equal(result, expected)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@pytest.mark.xfail(np_version_gte1p25, reason="name of min now equals name of np.min")
|
| 49 |
+
def test_agg_relabel_partial_functions():
|
| 50 |
+
# GH 26513, test on partial, functools or more complex cases
|
| 51 |
+
df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4], "C": [3, 4, 5, 6]})
|
| 52 |
+
msg = "using Series.[mean|min]"
|
| 53 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 54 |
+
result = df.agg(foo=("A", np.mean), bar=("A", "mean"), cat=("A", min))
|
| 55 |
+
expected = pd.DataFrame(
|
| 56 |
+
{"A": [1.5, 1.5, 1.0]}, index=pd.Index(["foo", "bar", "cat"])
|
| 57 |
+
)
|
| 58 |
+
tm.assert_frame_equal(result, expected)
|
| 59 |
+
|
| 60 |
+
msg = "using Series.[mean|min|max|sum]"
|
| 61 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 62 |
+
result = df.agg(
|
| 63 |
+
foo=("A", min),
|
| 64 |
+
bar=("A", np.min),
|
| 65 |
+
cat=("B", max),
|
| 66 |
+
dat=("C", "min"),
|
| 67 |
+
f=("B", np.sum),
|
| 68 |
+
kk=("B", lambda x: min(x)),
|
| 69 |
+
)
|
| 70 |
+
expected = pd.DataFrame(
|
| 71 |
+
{
|
| 72 |
+
"A": [1.0, 1.0, np.nan, np.nan, np.nan, np.nan],
|
| 73 |
+
"B": [np.nan, np.nan, 4.0, np.nan, 10.0, 1.0],
|
| 74 |
+
"C": [np.nan, np.nan, np.nan, 3.0, np.nan, np.nan],
|
| 75 |
+
},
|
| 76 |
+
index=pd.Index(["foo", "bar", "cat", "dat", "f", "kk"]),
|
| 77 |
+
)
|
| 78 |
+
tm.assert_frame_equal(result, expected)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def test_agg_namedtuple():
|
| 82 |
+
# GH 26513
|
| 83 |
+
df = pd.DataFrame({"A": [0, 1], "B": [1, 2]})
|
| 84 |
+
result = df.agg(
|
| 85 |
+
foo=pd.NamedAgg("B", "sum"),
|
| 86 |
+
bar=pd.NamedAgg("B", "min"),
|
| 87 |
+
cat=pd.NamedAgg(column="B", aggfunc="count"),
|
| 88 |
+
fft=pd.NamedAgg("B", aggfunc="max"),
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
expected = pd.DataFrame(
|
| 92 |
+
{"B": [3, 1, 2, 2]}, index=pd.Index(["foo", "bar", "cat", "fft"])
|
| 93 |
+
)
|
| 94 |
+
tm.assert_frame_equal(result, expected)
|
| 95 |
+
|
| 96 |
+
result = df.agg(
|
| 97 |
+
foo=pd.NamedAgg("A", "min"),
|
| 98 |
+
bar=pd.NamedAgg(column="B", aggfunc="max"),
|
| 99 |
+
cat=pd.NamedAgg(column="A", aggfunc="max"),
|
| 100 |
+
)
|
| 101 |
+
expected = pd.DataFrame(
|
| 102 |
+
{"A": [0.0, np.nan, 1.0], "B": [np.nan, 2.0, np.nan]},
|
| 103 |
+
index=pd.Index(["foo", "bar", "cat"]),
|
| 104 |
+
)
|
| 105 |
+
tm.assert_frame_equal(result, expected)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def test_reconstruct_func():
|
| 109 |
+
# GH 28472, test to ensure reconstruct_func isn't moved;
|
| 110 |
+
# This method is used by other libraries (e.g. dask)
|
| 111 |
+
result = pd.core.apply.reconstruct_func("min")
|
| 112 |
+
expected = (False, "min", None, None)
|
| 113 |
+
tm.assert_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_frame_transform.py
ADDED
|
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
from pandas import (
|
| 5 |
+
DataFrame,
|
| 6 |
+
MultiIndex,
|
| 7 |
+
Series,
|
| 8 |
+
)
|
| 9 |
+
import pandas._testing as tm
|
| 10 |
+
from pandas.tests.apply.common import frame_transform_kernels
|
| 11 |
+
from pandas.tests.frame.common import zip_frames
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def unpack_obj(obj, klass, axis):
|
| 15 |
+
"""
|
| 16 |
+
Helper to ensure we have the right type of object for a test parametrized
|
| 17 |
+
over frame_or_series.
|
| 18 |
+
"""
|
| 19 |
+
if klass is not DataFrame:
|
| 20 |
+
obj = obj["A"]
|
| 21 |
+
if axis != 0:
|
| 22 |
+
pytest.skip(f"Test is only for DataFrame with axis={axis}")
|
| 23 |
+
return obj
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def test_transform_ufunc(axis, float_frame, frame_or_series):
|
| 27 |
+
# GH 35964
|
| 28 |
+
obj = unpack_obj(float_frame, frame_or_series, axis)
|
| 29 |
+
|
| 30 |
+
with np.errstate(all="ignore"):
|
| 31 |
+
f_sqrt = np.sqrt(obj)
|
| 32 |
+
|
| 33 |
+
# ufunc
|
| 34 |
+
result = obj.transform(np.sqrt, axis=axis)
|
| 35 |
+
expected = f_sqrt
|
| 36 |
+
tm.assert_equal(result, expected)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@pytest.mark.parametrize(
|
| 40 |
+
"ops, names",
|
| 41 |
+
[
|
| 42 |
+
([np.sqrt], ["sqrt"]),
|
| 43 |
+
([np.abs, np.sqrt], ["absolute", "sqrt"]),
|
| 44 |
+
(np.array([np.sqrt]), ["sqrt"]),
|
| 45 |
+
(np.array([np.abs, np.sqrt]), ["absolute", "sqrt"]),
|
| 46 |
+
],
|
| 47 |
+
)
|
| 48 |
+
def test_transform_listlike(axis, float_frame, ops, names):
|
| 49 |
+
# GH 35964
|
| 50 |
+
other_axis = 1 if axis in {0, "index"} else 0
|
| 51 |
+
with np.errstate(all="ignore"):
|
| 52 |
+
expected = zip_frames([op(float_frame) for op in ops], axis=other_axis)
|
| 53 |
+
if axis in {0, "index"}:
|
| 54 |
+
expected.columns = MultiIndex.from_product([float_frame.columns, names])
|
| 55 |
+
else:
|
| 56 |
+
expected.index = MultiIndex.from_product([float_frame.index, names])
|
| 57 |
+
result = float_frame.transform(ops, axis=axis)
|
| 58 |
+
tm.assert_frame_equal(result, expected)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@pytest.mark.parametrize("ops", [[], np.array([])])
|
| 62 |
+
def test_transform_empty_listlike(float_frame, ops, frame_or_series):
|
| 63 |
+
obj = unpack_obj(float_frame, frame_or_series, 0)
|
| 64 |
+
|
| 65 |
+
with pytest.raises(ValueError, match="No transform functions were provided"):
|
| 66 |
+
obj.transform(ops)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def test_transform_listlike_func_with_args():
|
| 70 |
+
# GH 50624
|
| 71 |
+
df = DataFrame({"x": [1, 2, 3]})
|
| 72 |
+
|
| 73 |
+
def foo1(x, a=1, c=0):
|
| 74 |
+
return x + a + c
|
| 75 |
+
|
| 76 |
+
def foo2(x, b=2, c=0):
|
| 77 |
+
return x + b + c
|
| 78 |
+
|
| 79 |
+
msg = r"foo1\(\) got an unexpected keyword argument 'b'"
|
| 80 |
+
with pytest.raises(TypeError, match=msg):
|
| 81 |
+
df.transform([foo1, foo2], 0, 3, b=3, c=4)
|
| 82 |
+
|
| 83 |
+
result = df.transform([foo1, foo2], 0, 3, c=4)
|
| 84 |
+
expected = DataFrame(
|
| 85 |
+
[[8, 8], [9, 9], [10, 10]],
|
| 86 |
+
columns=MultiIndex.from_tuples([("x", "foo1"), ("x", "foo2")]),
|
| 87 |
+
)
|
| 88 |
+
tm.assert_frame_equal(result, expected)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@pytest.mark.parametrize("box", [dict, Series])
|
| 92 |
+
def test_transform_dictlike(axis, float_frame, box):
|
| 93 |
+
# GH 35964
|
| 94 |
+
if axis in (0, "index"):
|
| 95 |
+
e = float_frame.columns[0]
|
| 96 |
+
expected = float_frame[[e]].transform(np.abs)
|
| 97 |
+
else:
|
| 98 |
+
e = float_frame.index[0]
|
| 99 |
+
expected = float_frame.iloc[[0]].transform(np.abs)
|
| 100 |
+
result = float_frame.transform(box({e: np.abs}), axis=axis)
|
| 101 |
+
tm.assert_frame_equal(result, expected)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def test_transform_dictlike_mixed():
|
| 105 |
+
# GH 40018 - mix of lists and non-lists in values of a dictionary
|
| 106 |
+
df = DataFrame({"a": [1, 2], "b": [1, 4], "c": [1, 4]})
|
| 107 |
+
result = df.transform({"b": ["sqrt", "abs"], "c": "sqrt"})
|
| 108 |
+
expected = DataFrame(
|
| 109 |
+
[[1.0, 1, 1.0], [2.0, 4, 2.0]],
|
| 110 |
+
columns=MultiIndex([("b", "c"), ("sqrt", "abs")], [(0, 0, 1), (0, 1, 0)]),
|
| 111 |
+
)
|
| 112 |
+
tm.assert_frame_equal(result, expected)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@pytest.mark.parametrize(
|
| 116 |
+
"ops",
|
| 117 |
+
[
|
| 118 |
+
{},
|
| 119 |
+
{"A": []},
|
| 120 |
+
{"A": [], "B": "cumsum"},
|
| 121 |
+
{"A": "cumsum", "B": []},
|
| 122 |
+
{"A": [], "B": ["cumsum"]},
|
| 123 |
+
{"A": ["cumsum"], "B": []},
|
| 124 |
+
],
|
| 125 |
+
)
|
| 126 |
+
def test_transform_empty_dictlike(float_frame, ops, frame_or_series):
|
| 127 |
+
obj = unpack_obj(float_frame, frame_or_series, 0)
|
| 128 |
+
|
| 129 |
+
with pytest.raises(ValueError, match="No transform functions were provided"):
|
| 130 |
+
obj.transform(ops)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
@pytest.mark.parametrize("use_apply", [True, False])
|
| 134 |
+
def test_transform_udf(axis, float_frame, use_apply, frame_or_series):
|
| 135 |
+
# GH 35964
|
| 136 |
+
obj = unpack_obj(float_frame, frame_or_series, axis)
|
| 137 |
+
|
| 138 |
+
# transform uses UDF either via apply or passing the entire DataFrame
|
| 139 |
+
def func(x):
|
| 140 |
+
# transform is using apply iff x is not a DataFrame
|
| 141 |
+
if use_apply == isinstance(x, frame_or_series):
|
| 142 |
+
# Force transform to fallback
|
| 143 |
+
raise ValueError
|
| 144 |
+
return x + 1
|
| 145 |
+
|
| 146 |
+
result = obj.transform(func, axis=axis)
|
| 147 |
+
expected = obj + 1
|
| 148 |
+
tm.assert_equal(result, expected)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
wont_fail = ["ffill", "bfill", "fillna", "pad", "backfill", "shift"]
|
| 152 |
+
frame_kernels_raise = [x for x in frame_transform_kernels if x not in wont_fail]
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
@pytest.mark.parametrize("op", [*frame_kernels_raise, lambda x: x + 1])
|
| 156 |
+
def test_transform_bad_dtype(op, frame_or_series, request):
|
| 157 |
+
# GH 35964
|
| 158 |
+
if op == "ngroup":
|
| 159 |
+
request.applymarker(
|
| 160 |
+
pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
obj = DataFrame({"A": 3 * [object]}) # DataFrame that will fail on most transforms
|
| 164 |
+
obj = tm.get_obj(obj, frame_or_series)
|
| 165 |
+
error = TypeError
|
| 166 |
+
msg = "|".join(
|
| 167 |
+
[
|
| 168 |
+
"not supported between instances of 'type' and 'type'",
|
| 169 |
+
"unsupported operand type",
|
| 170 |
+
]
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
with pytest.raises(error, match=msg):
|
| 174 |
+
obj.transform(op)
|
| 175 |
+
with pytest.raises(error, match=msg):
|
| 176 |
+
obj.transform([op])
|
| 177 |
+
with pytest.raises(error, match=msg):
|
| 178 |
+
obj.transform({"A": op})
|
| 179 |
+
with pytest.raises(error, match=msg):
|
| 180 |
+
obj.transform({"A": [op]})
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
@pytest.mark.parametrize("op", frame_kernels_raise)
|
| 184 |
+
def test_transform_failure_typeerror(request, op):
|
| 185 |
+
# GH 35964
|
| 186 |
+
|
| 187 |
+
if op == "ngroup":
|
| 188 |
+
request.applymarker(
|
| 189 |
+
pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
# Using object makes most transform kernels fail
|
| 193 |
+
df = DataFrame({"A": 3 * [object], "B": [1, 2, 3]})
|
| 194 |
+
error = TypeError
|
| 195 |
+
msg = "|".join(
|
| 196 |
+
[
|
| 197 |
+
"not supported between instances of 'type' and 'type'",
|
| 198 |
+
"unsupported operand type",
|
| 199 |
+
]
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
with pytest.raises(error, match=msg):
|
| 203 |
+
df.transform([op])
|
| 204 |
+
|
| 205 |
+
with pytest.raises(error, match=msg):
|
| 206 |
+
df.transform({"A": op, "B": op})
|
| 207 |
+
|
| 208 |
+
with pytest.raises(error, match=msg):
|
| 209 |
+
df.transform({"A": [op], "B": [op]})
|
| 210 |
+
|
| 211 |
+
with pytest.raises(error, match=msg):
|
| 212 |
+
df.transform({"A": [op, "shift"], "B": [op]})
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def test_transform_failure_valueerror():
|
| 216 |
+
# GH 40211
|
| 217 |
+
def op(x):
|
| 218 |
+
if np.sum(np.sum(x)) < 10:
|
| 219 |
+
raise ValueError
|
| 220 |
+
return x
|
| 221 |
+
|
| 222 |
+
df = DataFrame({"A": [1, 2, 3], "B": [400, 500, 600]})
|
| 223 |
+
msg = "Transform function failed"
|
| 224 |
+
|
| 225 |
+
with pytest.raises(ValueError, match=msg):
|
| 226 |
+
df.transform([op])
|
| 227 |
+
|
| 228 |
+
with pytest.raises(ValueError, match=msg):
|
| 229 |
+
df.transform({"A": op, "B": op})
|
| 230 |
+
|
| 231 |
+
with pytest.raises(ValueError, match=msg):
|
| 232 |
+
df.transform({"A": [op], "B": [op]})
|
| 233 |
+
|
| 234 |
+
with pytest.raises(ValueError, match=msg):
|
| 235 |
+
df.transform({"A": [op, "shift"], "B": [op]})
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
@pytest.mark.parametrize("use_apply", [True, False])
|
| 239 |
+
def test_transform_passes_args(use_apply, frame_or_series):
|
| 240 |
+
# GH 35964
|
| 241 |
+
# transform uses UDF either via apply or passing the entire DataFrame
|
| 242 |
+
expected_args = [1, 2]
|
| 243 |
+
expected_kwargs = {"c": 3}
|
| 244 |
+
|
| 245 |
+
def f(x, a, b, c):
|
| 246 |
+
# transform is using apply iff x is not a DataFrame
|
| 247 |
+
if use_apply == isinstance(x, frame_or_series):
|
| 248 |
+
# Force transform to fallback
|
| 249 |
+
raise ValueError
|
| 250 |
+
assert [a, b] == expected_args
|
| 251 |
+
assert c == expected_kwargs["c"]
|
| 252 |
+
return x
|
| 253 |
+
|
| 254 |
+
frame_or_series([1]).transform(f, 0, *expected_args, **expected_kwargs)
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
def test_transform_empty_dataframe():
|
| 258 |
+
# https://github.com/pandas-dev/pandas/issues/39636
|
| 259 |
+
df = DataFrame([], columns=["col1", "col2"])
|
| 260 |
+
result = df.transform(lambda x: x + 10)
|
| 261 |
+
tm.assert_frame_equal(result, df)
|
| 262 |
+
|
| 263 |
+
result = df["col1"].transform(lambda x: x + 10)
|
| 264 |
+
tm.assert_series_equal(result, df["col1"])
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_invalid_arg.py
ADDED
|
@@ -0,0 +1,361 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Tests specifically aimed at detecting bad arguments.
|
| 2 |
+
# This file is organized by reason for exception.
|
| 3 |
+
# 1. always invalid argument values
|
| 4 |
+
# 2. missing column(s)
|
| 5 |
+
# 3. incompatible ops/dtype/args/kwargs
|
| 6 |
+
# 4. invalid result shape/type
|
| 7 |
+
# If your test does not fit into one of these categories, add to this list.
|
| 8 |
+
|
| 9 |
+
from itertools import chain
|
| 10 |
+
import re
|
| 11 |
+
|
| 12 |
+
import numpy as np
|
| 13 |
+
import pytest
|
| 14 |
+
|
| 15 |
+
from pandas.errors import SpecificationError
|
| 16 |
+
|
| 17 |
+
from pandas import (
|
| 18 |
+
DataFrame,
|
| 19 |
+
Series,
|
| 20 |
+
date_range,
|
| 21 |
+
)
|
| 22 |
+
import pandas._testing as tm
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@pytest.mark.parametrize("result_type", ["foo", 1])
|
| 26 |
+
def test_result_type_error(result_type):
|
| 27 |
+
# allowed result_type
|
| 28 |
+
df = DataFrame(
|
| 29 |
+
np.tile(np.arange(3, dtype="int64"), 6).reshape(6, -1) + 1,
|
| 30 |
+
columns=["A", "B", "C"],
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
msg = (
|
| 34 |
+
"invalid value for result_type, must be one of "
|
| 35 |
+
"{None, 'reduce', 'broadcast', 'expand'}"
|
| 36 |
+
)
|
| 37 |
+
with pytest.raises(ValueError, match=msg):
|
| 38 |
+
df.apply(lambda x: [1, 2, 3], axis=1, result_type=result_type)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def test_apply_invalid_axis_value():
|
| 42 |
+
df = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]], index=["a", "a", "c"])
|
| 43 |
+
msg = "No axis named 2 for object type DataFrame"
|
| 44 |
+
with pytest.raises(ValueError, match=msg):
|
| 45 |
+
df.apply(lambda x: x, 2)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def test_agg_raises():
|
| 49 |
+
# GH 26513
|
| 50 |
+
df = DataFrame({"A": [0, 1], "B": [1, 2]})
|
| 51 |
+
msg = "Must provide"
|
| 52 |
+
|
| 53 |
+
with pytest.raises(TypeError, match=msg):
|
| 54 |
+
df.agg()
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def test_map_with_invalid_na_action_raises():
|
| 58 |
+
# https://github.com/pandas-dev/pandas/issues/32815
|
| 59 |
+
s = Series([1, 2, 3])
|
| 60 |
+
msg = "na_action must either be 'ignore' or None"
|
| 61 |
+
with pytest.raises(ValueError, match=msg):
|
| 62 |
+
s.map(lambda x: x, na_action="____")
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
@pytest.mark.parametrize("input_na_action", ["____", True])
|
| 66 |
+
def test_map_arg_is_dict_with_invalid_na_action_raises(input_na_action):
|
| 67 |
+
# https://github.com/pandas-dev/pandas/issues/46588
|
| 68 |
+
s = Series([1, 2, 3])
|
| 69 |
+
msg = f"na_action must either be 'ignore' or None, {input_na_action} was passed"
|
| 70 |
+
with pytest.raises(ValueError, match=msg):
|
| 71 |
+
s.map({1: 2}, na_action=input_na_action)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@pytest.mark.parametrize("method", ["apply", "agg", "transform"])
|
| 75 |
+
@pytest.mark.parametrize("func", [{"A": {"B": "sum"}}, {"A": {"B": ["sum"]}}])
|
| 76 |
+
def test_nested_renamer(frame_or_series, method, func):
|
| 77 |
+
# GH 35964
|
| 78 |
+
obj = frame_or_series({"A": [1]})
|
| 79 |
+
match = "nested renamer is not supported"
|
| 80 |
+
with pytest.raises(SpecificationError, match=match):
|
| 81 |
+
getattr(obj, method)(func)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
@pytest.mark.parametrize(
|
| 85 |
+
"renamer",
|
| 86 |
+
[{"foo": ["min", "max"]}, {"foo": ["min", "max"], "bar": ["sum", "mean"]}],
|
| 87 |
+
)
|
| 88 |
+
def test_series_nested_renamer(renamer):
|
| 89 |
+
s = Series(range(6), dtype="int64", name="series")
|
| 90 |
+
msg = "nested renamer is not supported"
|
| 91 |
+
with pytest.raises(SpecificationError, match=msg):
|
| 92 |
+
s.agg(renamer)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def test_apply_dict_depr():
|
| 96 |
+
tsdf = DataFrame(
|
| 97 |
+
np.random.default_rng(2).standard_normal((10, 3)),
|
| 98 |
+
columns=["A", "B", "C"],
|
| 99 |
+
index=date_range("1/1/2000", periods=10),
|
| 100 |
+
)
|
| 101 |
+
msg = "nested renamer is not supported"
|
| 102 |
+
with pytest.raises(SpecificationError, match=msg):
|
| 103 |
+
tsdf.A.agg({"foo": ["sum", "mean"]})
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
@pytest.mark.parametrize("method", ["agg", "transform"])
|
| 107 |
+
def test_dict_nested_renaming_depr(method):
|
| 108 |
+
df = DataFrame({"A": range(5), "B": 5})
|
| 109 |
+
|
| 110 |
+
# nested renaming
|
| 111 |
+
msg = r"nested renamer is not supported"
|
| 112 |
+
with pytest.raises(SpecificationError, match=msg):
|
| 113 |
+
getattr(df, method)({"A": {"foo": "min"}, "B": {"bar": "max"}})
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
@pytest.mark.parametrize("method", ["apply", "agg", "transform"])
|
| 117 |
+
@pytest.mark.parametrize("func", [{"B": "sum"}, {"B": ["sum"]}])
|
| 118 |
+
def test_missing_column(method, func):
|
| 119 |
+
# GH 40004
|
| 120 |
+
obj = DataFrame({"A": [1]})
|
| 121 |
+
match = re.escape("Column(s) ['B'] do not exist")
|
| 122 |
+
with pytest.raises(KeyError, match=match):
|
| 123 |
+
getattr(obj, method)(func)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def test_transform_mixed_column_name_dtypes():
|
| 127 |
+
# GH39025
|
| 128 |
+
df = DataFrame({"a": ["1"]})
|
| 129 |
+
msg = r"Column\(s\) \[1, 'b'\] do not exist"
|
| 130 |
+
with pytest.raises(KeyError, match=msg):
|
| 131 |
+
df.transform({"a": int, 1: str, "b": int})
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
@pytest.mark.parametrize(
|
| 135 |
+
"how, args", [("pct_change", ()), ("nsmallest", (1, ["a", "b"])), ("tail", 1)]
|
| 136 |
+
)
|
| 137 |
+
def test_apply_str_axis_1_raises(how, args):
|
| 138 |
+
# GH 39211 - some ops don't support axis=1
|
| 139 |
+
df = DataFrame({"a": [1, 2], "b": [3, 4]})
|
| 140 |
+
msg = f"Operation {how} does not support axis=1"
|
| 141 |
+
with pytest.raises(ValueError, match=msg):
|
| 142 |
+
df.apply(how, axis=1, args=args)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def test_transform_axis_1_raises():
|
| 146 |
+
# GH 35964
|
| 147 |
+
msg = "No axis named 1 for object type Series"
|
| 148 |
+
with pytest.raises(ValueError, match=msg):
|
| 149 |
+
Series([1]).transform("sum", axis=1)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def test_apply_modify_traceback():
|
| 153 |
+
data = DataFrame(
|
| 154 |
+
{
|
| 155 |
+
"A": [
|
| 156 |
+
"foo",
|
| 157 |
+
"foo",
|
| 158 |
+
"foo",
|
| 159 |
+
"foo",
|
| 160 |
+
"bar",
|
| 161 |
+
"bar",
|
| 162 |
+
"bar",
|
| 163 |
+
"bar",
|
| 164 |
+
"foo",
|
| 165 |
+
"foo",
|
| 166 |
+
"foo",
|
| 167 |
+
],
|
| 168 |
+
"B": [
|
| 169 |
+
"one",
|
| 170 |
+
"one",
|
| 171 |
+
"one",
|
| 172 |
+
"two",
|
| 173 |
+
"one",
|
| 174 |
+
"one",
|
| 175 |
+
"one",
|
| 176 |
+
"two",
|
| 177 |
+
"two",
|
| 178 |
+
"two",
|
| 179 |
+
"one",
|
| 180 |
+
],
|
| 181 |
+
"C": [
|
| 182 |
+
"dull",
|
| 183 |
+
"dull",
|
| 184 |
+
"shiny",
|
| 185 |
+
"dull",
|
| 186 |
+
"dull",
|
| 187 |
+
"shiny",
|
| 188 |
+
"shiny",
|
| 189 |
+
"dull",
|
| 190 |
+
"shiny",
|
| 191 |
+
"shiny",
|
| 192 |
+
"shiny",
|
| 193 |
+
],
|
| 194 |
+
"D": np.random.default_rng(2).standard_normal(11),
|
| 195 |
+
"E": np.random.default_rng(2).standard_normal(11),
|
| 196 |
+
"F": np.random.default_rng(2).standard_normal(11),
|
| 197 |
+
}
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
data.loc[4, "C"] = np.nan
|
| 201 |
+
|
| 202 |
+
def transform(row):
|
| 203 |
+
if row["C"].startswith("shin") and row["A"] == "foo":
|
| 204 |
+
row["D"] = 7
|
| 205 |
+
return row
|
| 206 |
+
|
| 207 |
+
msg = "'float' object has no attribute 'startswith'"
|
| 208 |
+
with pytest.raises(AttributeError, match=msg):
|
| 209 |
+
data.apply(transform, axis=1)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
@pytest.mark.parametrize(
|
| 213 |
+
"df, func, expected",
|
| 214 |
+
tm.get_cython_table_params(
|
| 215 |
+
DataFrame([["a", "b"], ["b", "a"]]), [["cumprod", TypeError]]
|
| 216 |
+
),
|
| 217 |
+
)
|
| 218 |
+
def test_agg_cython_table_raises_frame(df, func, expected, axis, using_infer_string):
|
| 219 |
+
# GH 21224
|
| 220 |
+
if using_infer_string:
|
| 221 |
+
import pyarrow as pa
|
| 222 |
+
|
| 223 |
+
expected = (expected, pa.lib.ArrowNotImplementedError)
|
| 224 |
+
|
| 225 |
+
msg = "can't multiply sequence by non-int of type 'str'|has no kernel"
|
| 226 |
+
warn = None if isinstance(func, str) else FutureWarning
|
| 227 |
+
with pytest.raises(expected, match=msg):
|
| 228 |
+
with tm.assert_produces_warning(warn, match="using DataFrame.cumprod"):
|
| 229 |
+
df.agg(func, axis=axis)
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
@pytest.mark.parametrize(
|
| 233 |
+
"series, func, expected",
|
| 234 |
+
chain(
|
| 235 |
+
tm.get_cython_table_params(
|
| 236 |
+
Series("a b c".split()),
|
| 237 |
+
[
|
| 238 |
+
("mean", TypeError), # mean raises TypeError
|
| 239 |
+
("prod", TypeError),
|
| 240 |
+
("std", TypeError),
|
| 241 |
+
("var", TypeError),
|
| 242 |
+
("median", TypeError),
|
| 243 |
+
("cumprod", TypeError),
|
| 244 |
+
],
|
| 245 |
+
)
|
| 246 |
+
),
|
| 247 |
+
)
|
| 248 |
+
def test_agg_cython_table_raises_series(series, func, expected, using_infer_string):
|
| 249 |
+
# GH21224
|
| 250 |
+
msg = r"[Cc]ould not convert|can't multiply sequence by non-int of type"
|
| 251 |
+
if func == "median" or func is np.nanmedian or func is np.median:
|
| 252 |
+
msg = r"Cannot convert \['a' 'b' 'c'\] to numeric"
|
| 253 |
+
|
| 254 |
+
if using_infer_string:
|
| 255 |
+
import pyarrow as pa
|
| 256 |
+
|
| 257 |
+
expected = (expected, pa.lib.ArrowNotImplementedError)
|
| 258 |
+
|
| 259 |
+
msg = msg + "|does not support|has no kernel"
|
| 260 |
+
warn = None if isinstance(func, str) else FutureWarning
|
| 261 |
+
|
| 262 |
+
with pytest.raises(expected, match=msg):
|
| 263 |
+
# e.g. Series('a b'.split()).cumprod() will raise
|
| 264 |
+
with tm.assert_produces_warning(warn, match="is currently using Series.*"):
|
| 265 |
+
series.agg(func)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def test_agg_none_to_type():
|
| 269 |
+
# GH 40543
|
| 270 |
+
df = DataFrame({"a": [None]})
|
| 271 |
+
msg = re.escape("int() argument must be a string")
|
| 272 |
+
with pytest.raises(TypeError, match=msg):
|
| 273 |
+
df.agg({"a": lambda x: int(x.iloc[0])})
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
def test_transform_none_to_type():
|
| 277 |
+
# GH#34377
|
| 278 |
+
df = DataFrame({"a": [None]})
|
| 279 |
+
msg = "argument must be a"
|
| 280 |
+
with pytest.raises(TypeError, match=msg):
|
| 281 |
+
df.transform({"a": lambda x: int(x.iloc[0])})
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
@pytest.mark.parametrize(
|
| 285 |
+
"func",
|
| 286 |
+
[
|
| 287 |
+
lambda x: np.array([1, 2]).reshape(-1, 2),
|
| 288 |
+
lambda x: [1, 2],
|
| 289 |
+
lambda x: Series([1, 2]),
|
| 290 |
+
],
|
| 291 |
+
)
|
| 292 |
+
def test_apply_broadcast_error(func):
|
| 293 |
+
df = DataFrame(
|
| 294 |
+
np.tile(np.arange(3, dtype="int64"), 6).reshape(6, -1) + 1,
|
| 295 |
+
columns=["A", "B", "C"],
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
# > 1 ndim
|
| 299 |
+
msg = "too many dims to broadcast|cannot broadcast result"
|
| 300 |
+
with pytest.raises(ValueError, match=msg):
|
| 301 |
+
df.apply(func, axis=1, result_type="broadcast")
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def test_transform_and_agg_err_agg(axis, float_frame):
|
| 305 |
+
# cannot both transform and agg
|
| 306 |
+
msg = "cannot combine transform and aggregation operations"
|
| 307 |
+
with pytest.raises(ValueError, match=msg):
|
| 308 |
+
with np.errstate(all="ignore"):
|
| 309 |
+
float_frame.agg(["max", "sqrt"], axis=axis)
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
@pytest.mark.filterwarnings("ignore::FutureWarning") # GH53325
|
| 313 |
+
@pytest.mark.parametrize(
|
| 314 |
+
"func, msg",
|
| 315 |
+
[
|
| 316 |
+
(["sqrt", "max"], "cannot combine transform and aggregation"),
|
| 317 |
+
(
|
| 318 |
+
{"foo": np.sqrt, "bar": "sum"},
|
| 319 |
+
"cannot perform both aggregation and transformation",
|
| 320 |
+
),
|
| 321 |
+
],
|
| 322 |
+
)
|
| 323 |
+
def test_transform_and_agg_err_series(string_series, func, msg):
|
| 324 |
+
# we are trying to transform with an aggregator
|
| 325 |
+
with pytest.raises(ValueError, match=msg):
|
| 326 |
+
with np.errstate(all="ignore"):
|
| 327 |
+
string_series.agg(func)
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
@pytest.mark.parametrize("func", [["max", "min"], ["max", "sqrt"]])
|
| 331 |
+
def test_transform_wont_agg_frame(axis, float_frame, func):
|
| 332 |
+
# GH 35964
|
| 333 |
+
# cannot both transform and agg
|
| 334 |
+
msg = "Function did not transform"
|
| 335 |
+
with pytest.raises(ValueError, match=msg):
|
| 336 |
+
float_frame.transform(func, axis=axis)
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
@pytest.mark.parametrize("func", [["min", "max"], ["sqrt", "max"]])
|
| 340 |
+
def test_transform_wont_agg_series(string_series, func):
|
| 341 |
+
# GH 35964
|
| 342 |
+
# we are trying to transform with an aggregator
|
| 343 |
+
msg = "Function did not transform"
|
| 344 |
+
|
| 345 |
+
with pytest.raises(ValueError, match=msg):
|
| 346 |
+
string_series.transform(func)
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
@pytest.mark.parametrize(
|
| 350 |
+
"op_wrapper", [lambda x: x, lambda x: [x], lambda x: {"A": x}, lambda x: {"A": [x]}]
|
| 351 |
+
)
|
| 352 |
+
def test_transform_reducer_raises(all_reductions, frame_or_series, op_wrapper):
|
| 353 |
+
# GH 35964
|
| 354 |
+
op = op_wrapper(all_reductions)
|
| 355 |
+
|
| 356 |
+
obj = DataFrame({"A": [1, 2, 3]})
|
| 357 |
+
obj = tm.get_obj(obj, frame_or_series)
|
| 358 |
+
|
| 359 |
+
msg = "Function did not transform"
|
| 360 |
+
with pytest.raises(ValueError, match=msg):
|
| 361 |
+
obj.transform(op)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_numba.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas.util._test_decorators as td
|
| 5 |
+
|
| 6 |
+
from pandas import (
|
| 7 |
+
DataFrame,
|
| 8 |
+
Index,
|
| 9 |
+
)
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
|
| 12 |
+
pytestmark = [td.skip_if_no("numba"), pytest.mark.single_cpu]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@pytest.fixture(params=[0, 1])
|
| 16 |
+
def apply_axis(request):
|
| 17 |
+
return request.param
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def test_numba_vs_python_noop(float_frame, apply_axis):
|
| 21 |
+
func = lambda x: x
|
| 22 |
+
result = float_frame.apply(func, engine="numba", axis=apply_axis)
|
| 23 |
+
expected = float_frame.apply(func, engine="python", axis=apply_axis)
|
| 24 |
+
tm.assert_frame_equal(result, expected)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def test_numba_vs_python_string_index():
|
| 28 |
+
# GH#56189
|
| 29 |
+
pytest.importorskip("pyarrow")
|
| 30 |
+
df = DataFrame(
|
| 31 |
+
1,
|
| 32 |
+
index=Index(["a", "b"], dtype="string[pyarrow_numpy]"),
|
| 33 |
+
columns=Index(["x", "y"], dtype="string[pyarrow_numpy]"),
|
| 34 |
+
)
|
| 35 |
+
func = lambda x: x
|
| 36 |
+
result = df.apply(func, engine="numba", axis=0)
|
| 37 |
+
expected = df.apply(func, engine="python", axis=0)
|
| 38 |
+
tm.assert_frame_equal(
|
| 39 |
+
result, expected, check_column_type=False, check_index_type=False
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def test_numba_vs_python_indexing():
|
| 44 |
+
frame = DataFrame(
|
| 45 |
+
{"a": [1, 2, 3], "b": [4, 5, 6], "c": [7.0, 8.0, 9.0]},
|
| 46 |
+
index=Index(["A", "B", "C"]),
|
| 47 |
+
)
|
| 48 |
+
row_func = lambda x: x["c"]
|
| 49 |
+
result = frame.apply(row_func, engine="numba", axis=1)
|
| 50 |
+
expected = frame.apply(row_func, engine="python", axis=1)
|
| 51 |
+
tm.assert_series_equal(result, expected)
|
| 52 |
+
|
| 53 |
+
col_func = lambda x: x["A"]
|
| 54 |
+
result = frame.apply(col_func, engine="numba", axis=0)
|
| 55 |
+
expected = frame.apply(col_func, engine="python", axis=0)
|
| 56 |
+
tm.assert_series_equal(result, expected)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
@pytest.mark.parametrize(
|
| 60 |
+
"reduction",
|
| 61 |
+
[lambda x: x.mean(), lambda x: x.min(), lambda x: x.max(), lambda x: x.sum()],
|
| 62 |
+
)
|
| 63 |
+
def test_numba_vs_python_reductions(reduction, apply_axis):
|
| 64 |
+
df = DataFrame(np.ones((4, 4), dtype=np.float64))
|
| 65 |
+
result = df.apply(reduction, engine="numba", axis=apply_axis)
|
| 66 |
+
expected = df.apply(reduction, engine="python", axis=apply_axis)
|
| 67 |
+
tm.assert_series_equal(result, expected)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
@pytest.mark.parametrize("colnames", [[1, 2, 3], [1.0, 2.0, 3.0]])
|
| 71 |
+
def test_numba_numeric_colnames(colnames):
|
| 72 |
+
# Check that numeric column names lower properly and can be indxed on
|
| 73 |
+
df = DataFrame(
|
| 74 |
+
np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.int64), columns=colnames
|
| 75 |
+
)
|
| 76 |
+
first_col = colnames[0]
|
| 77 |
+
f = lambda x: x[first_col] # Get the first column
|
| 78 |
+
result = df.apply(f, engine="numba", axis=1)
|
| 79 |
+
expected = df.apply(f, engine="python", axis=1)
|
| 80 |
+
tm.assert_series_equal(result, expected)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def test_numba_parallel_unsupported(float_frame):
|
| 84 |
+
f = lambda x: x
|
| 85 |
+
with pytest.raises(
|
| 86 |
+
NotImplementedError,
|
| 87 |
+
match="Parallel apply is not supported when raw=False and engine='numba'",
|
| 88 |
+
):
|
| 89 |
+
float_frame.apply(f, engine="numba", engine_kwargs={"parallel": True})
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def test_numba_nonunique_unsupported(apply_axis):
|
| 93 |
+
f = lambda x: x
|
| 94 |
+
df = DataFrame({"a": [1, 2]}, index=Index(["a", "a"]))
|
| 95 |
+
with pytest.raises(
|
| 96 |
+
NotImplementedError,
|
| 97 |
+
match="The index/columns must be unique when raw=False and engine='numba'",
|
| 98 |
+
):
|
| 99 |
+
df.apply(f, engine="numba", axis=apply_axis)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def test_numba_unsupported_dtypes(apply_axis):
|
| 103 |
+
f = lambda x: x
|
| 104 |
+
df = DataFrame({"a": [1, 2], "b": ["a", "b"], "c": [4, 5]})
|
| 105 |
+
df["c"] = df["c"].astype("double[pyarrow]")
|
| 106 |
+
|
| 107 |
+
with pytest.raises(
|
| 108 |
+
ValueError,
|
| 109 |
+
match="Column b must have a numeric dtype. Found 'object|string' instead",
|
| 110 |
+
):
|
| 111 |
+
df.apply(f, engine="numba", axis=apply_axis)
|
| 112 |
+
|
| 113 |
+
with pytest.raises(
|
| 114 |
+
ValueError,
|
| 115 |
+
match="Column c is backed by an extension array, "
|
| 116 |
+
"which is not supported by the numba engine.",
|
| 117 |
+
):
|
| 118 |
+
df["c"].to_frame().apply(f, engine="numba", axis=apply_axis)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_series_apply_relabeling.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import pandas._testing as tm
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def test_relabel_no_duplicated_method():
|
| 6 |
+
# this is to test there is no duplicated method used in agg
|
| 7 |
+
df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4]})
|
| 8 |
+
|
| 9 |
+
result = df["A"].agg(foo="sum")
|
| 10 |
+
expected = df["A"].agg({"foo": "sum"})
|
| 11 |
+
tm.assert_series_equal(result, expected)
|
| 12 |
+
|
| 13 |
+
result = df["B"].agg(foo="min", bar="max")
|
| 14 |
+
expected = df["B"].agg({"foo": "min", "bar": "max"})
|
| 15 |
+
tm.assert_series_equal(result, expected)
|
| 16 |
+
|
| 17 |
+
msg = "using Series.[sum|min|max]"
|
| 18 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 19 |
+
result = df["B"].agg(foo=sum, bar=min, cat="max")
|
| 20 |
+
msg = "using Series.[sum|min|max]"
|
| 21 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 22 |
+
expected = df["B"].agg({"foo": sum, "bar": min, "cat": "max"})
|
| 23 |
+
tm.assert_series_equal(result, expected)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def test_relabel_duplicated_method():
|
| 27 |
+
# this is to test with nested renaming, duplicated method can be used
|
| 28 |
+
# if they are assigned with different new names
|
| 29 |
+
df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4]})
|
| 30 |
+
|
| 31 |
+
result = df["A"].agg(foo="sum", bar="sum")
|
| 32 |
+
expected = pd.Series([6, 6], index=["foo", "bar"], name="A")
|
| 33 |
+
tm.assert_series_equal(result, expected)
|
| 34 |
+
|
| 35 |
+
msg = "using Series.min"
|
| 36 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 37 |
+
result = df["B"].agg(foo=min, bar="min")
|
| 38 |
+
expected = pd.Series([1, 1], index=["foo", "bar"], name="B")
|
| 39 |
+
tm.assert_series_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_series_transform.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
from pandas import (
|
| 5 |
+
DataFrame,
|
| 6 |
+
MultiIndex,
|
| 7 |
+
Series,
|
| 8 |
+
concat,
|
| 9 |
+
)
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@pytest.mark.parametrize(
|
| 14 |
+
"args, kwargs, increment",
|
| 15 |
+
[((), {}, 0), ((), {"a": 1}, 1), ((2, 3), {}, 32), ((1,), {"c": 2}, 201)],
|
| 16 |
+
)
|
| 17 |
+
def test_agg_args(args, kwargs, increment):
|
| 18 |
+
# GH 43357
|
| 19 |
+
def f(x, a=0, b=0, c=0):
|
| 20 |
+
return x + a + 10 * b + 100 * c
|
| 21 |
+
|
| 22 |
+
s = Series([1, 2])
|
| 23 |
+
result = s.transform(f, 0, *args, **kwargs)
|
| 24 |
+
expected = s + increment
|
| 25 |
+
tm.assert_series_equal(result, expected)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
@pytest.mark.parametrize(
|
| 29 |
+
"ops, names",
|
| 30 |
+
[
|
| 31 |
+
([np.sqrt], ["sqrt"]),
|
| 32 |
+
([np.abs, np.sqrt], ["absolute", "sqrt"]),
|
| 33 |
+
(np.array([np.sqrt]), ["sqrt"]),
|
| 34 |
+
(np.array([np.abs, np.sqrt]), ["absolute", "sqrt"]),
|
| 35 |
+
],
|
| 36 |
+
)
|
| 37 |
+
def test_transform_listlike(string_series, ops, names):
|
| 38 |
+
# GH 35964
|
| 39 |
+
with np.errstate(all="ignore"):
|
| 40 |
+
expected = concat([op(string_series) for op in ops], axis=1)
|
| 41 |
+
expected.columns = names
|
| 42 |
+
result = string_series.transform(ops)
|
| 43 |
+
tm.assert_frame_equal(result, expected)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def test_transform_listlike_func_with_args():
|
| 47 |
+
# GH 50624
|
| 48 |
+
|
| 49 |
+
s = Series([1, 2, 3])
|
| 50 |
+
|
| 51 |
+
def foo1(x, a=1, c=0):
|
| 52 |
+
return x + a + c
|
| 53 |
+
|
| 54 |
+
def foo2(x, b=2, c=0):
|
| 55 |
+
return x + b + c
|
| 56 |
+
|
| 57 |
+
msg = r"foo1\(\) got an unexpected keyword argument 'b'"
|
| 58 |
+
with pytest.raises(TypeError, match=msg):
|
| 59 |
+
s.transform([foo1, foo2], 0, 3, b=3, c=4)
|
| 60 |
+
|
| 61 |
+
result = s.transform([foo1, foo2], 0, 3, c=4)
|
| 62 |
+
expected = DataFrame({"foo1": [8, 9, 10], "foo2": [8, 9, 10]})
|
| 63 |
+
tm.assert_frame_equal(result, expected)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@pytest.mark.parametrize("box", [dict, Series])
|
| 67 |
+
def test_transform_dictlike(string_series, box):
|
| 68 |
+
# GH 35964
|
| 69 |
+
with np.errstate(all="ignore"):
|
| 70 |
+
expected = concat([np.sqrt(string_series), np.abs(string_series)], axis=1)
|
| 71 |
+
expected.columns = ["foo", "bar"]
|
| 72 |
+
result = string_series.transform(box({"foo": np.sqrt, "bar": np.abs}))
|
| 73 |
+
tm.assert_frame_equal(result, expected)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def test_transform_dictlike_mixed():
|
| 77 |
+
# GH 40018 - mix of lists and non-lists in values of a dictionary
|
| 78 |
+
df = Series([1, 4])
|
| 79 |
+
result = df.transform({"b": ["sqrt", "abs"], "c": "sqrt"})
|
| 80 |
+
expected = DataFrame(
|
| 81 |
+
[[1.0, 1, 1.0], [2.0, 4, 2.0]],
|
| 82 |
+
columns=MultiIndex([("b", "c"), ("sqrt", "abs")], [(0, 0, 1), (0, 1, 0)]),
|
| 83 |
+
)
|
| 84 |
+
tm.assert_frame_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/apply/test_str.py
ADDED
|
@@ -0,0 +1,326 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import chain
|
| 2 |
+
import operator
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
from pandas.core.dtypes.common import is_number
|
| 8 |
+
|
| 9 |
+
from pandas import (
|
| 10 |
+
DataFrame,
|
| 11 |
+
Series,
|
| 12 |
+
)
|
| 13 |
+
import pandas._testing as tm
|
| 14 |
+
from pandas.tests.apply.common import (
|
| 15 |
+
frame_transform_kernels,
|
| 16 |
+
series_transform_kernels,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@pytest.mark.parametrize("func", ["sum", "mean", "min", "max", "std"])
|
| 21 |
+
@pytest.mark.parametrize(
|
| 22 |
+
"args,kwds",
|
| 23 |
+
[
|
| 24 |
+
pytest.param([], {}, id="no_args_or_kwds"),
|
| 25 |
+
pytest.param([1], {}, id="axis_from_args"),
|
| 26 |
+
pytest.param([], {"axis": 1}, id="axis_from_kwds"),
|
| 27 |
+
pytest.param([], {"numeric_only": True}, id="optional_kwds"),
|
| 28 |
+
pytest.param([1, True], {"numeric_only": True}, id="args_and_kwds"),
|
| 29 |
+
],
|
| 30 |
+
)
|
| 31 |
+
@pytest.mark.parametrize("how", ["agg", "apply"])
|
| 32 |
+
def test_apply_with_string_funcs(request, float_frame, func, args, kwds, how):
|
| 33 |
+
if len(args) > 1 and how == "agg":
|
| 34 |
+
request.applymarker(
|
| 35 |
+
pytest.mark.xfail(
|
| 36 |
+
raises=TypeError,
|
| 37 |
+
reason="agg/apply signature mismatch - agg passes 2nd "
|
| 38 |
+
"argument to func",
|
| 39 |
+
)
|
| 40 |
+
)
|
| 41 |
+
result = getattr(float_frame, how)(func, *args, **kwds)
|
| 42 |
+
expected = getattr(float_frame, func)(*args, **kwds)
|
| 43 |
+
tm.assert_series_equal(result, expected)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
@pytest.mark.parametrize("arg", ["sum", "mean", "min", "max", "std"])
|
| 47 |
+
def test_with_string_args(datetime_series, arg):
|
| 48 |
+
result = datetime_series.apply(arg)
|
| 49 |
+
expected = getattr(datetime_series, arg)()
|
| 50 |
+
assert result == expected
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
@pytest.mark.parametrize("op", ["mean", "median", "std", "var"])
|
| 54 |
+
@pytest.mark.parametrize("how", ["agg", "apply"])
|
| 55 |
+
def test_apply_np_reducer(op, how):
|
| 56 |
+
# GH 39116
|
| 57 |
+
float_frame = DataFrame({"a": [1, 2], "b": [3, 4]})
|
| 58 |
+
result = getattr(float_frame, how)(op)
|
| 59 |
+
# pandas ddof defaults to 1, numpy to 0
|
| 60 |
+
kwargs = {"ddof": 1} if op in ("std", "var") else {}
|
| 61 |
+
expected = Series(
|
| 62 |
+
getattr(np, op)(float_frame, axis=0, **kwargs), index=float_frame.columns
|
| 63 |
+
)
|
| 64 |
+
tm.assert_series_equal(result, expected)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@pytest.mark.parametrize(
|
| 68 |
+
"op", ["abs", "ceil", "cos", "cumsum", "exp", "log", "sqrt", "square"]
|
| 69 |
+
)
|
| 70 |
+
@pytest.mark.parametrize("how", ["transform", "apply"])
|
| 71 |
+
def test_apply_np_transformer(float_frame, op, how):
|
| 72 |
+
# GH 39116
|
| 73 |
+
|
| 74 |
+
# float_frame will _usually_ have negative values, which will
|
| 75 |
+
# trigger the warning here, but let's put one in just to be sure
|
| 76 |
+
float_frame.iloc[0, 0] = -1.0
|
| 77 |
+
warn = None
|
| 78 |
+
if op in ["log", "sqrt"]:
|
| 79 |
+
warn = RuntimeWarning
|
| 80 |
+
|
| 81 |
+
with tm.assert_produces_warning(warn, check_stacklevel=False):
|
| 82 |
+
# float_frame fixture is defined in conftest.py, so we don't check the
|
| 83 |
+
# stacklevel as otherwise the test would fail.
|
| 84 |
+
result = getattr(float_frame, how)(op)
|
| 85 |
+
expected = getattr(np, op)(float_frame)
|
| 86 |
+
tm.assert_frame_equal(result, expected)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
@pytest.mark.parametrize(
|
| 90 |
+
"series, func, expected",
|
| 91 |
+
chain(
|
| 92 |
+
tm.get_cython_table_params(
|
| 93 |
+
Series(dtype=np.float64),
|
| 94 |
+
[
|
| 95 |
+
("sum", 0),
|
| 96 |
+
("max", np.nan),
|
| 97 |
+
("min", np.nan),
|
| 98 |
+
("all", True),
|
| 99 |
+
("any", False),
|
| 100 |
+
("mean", np.nan),
|
| 101 |
+
("prod", 1),
|
| 102 |
+
("std", np.nan),
|
| 103 |
+
("var", np.nan),
|
| 104 |
+
("median", np.nan),
|
| 105 |
+
],
|
| 106 |
+
),
|
| 107 |
+
tm.get_cython_table_params(
|
| 108 |
+
Series([np.nan, 1, 2, 3]),
|
| 109 |
+
[
|
| 110 |
+
("sum", 6),
|
| 111 |
+
("max", 3),
|
| 112 |
+
("min", 1),
|
| 113 |
+
("all", True),
|
| 114 |
+
("any", True),
|
| 115 |
+
("mean", 2),
|
| 116 |
+
("prod", 6),
|
| 117 |
+
("std", 1),
|
| 118 |
+
("var", 1),
|
| 119 |
+
("median", 2),
|
| 120 |
+
],
|
| 121 |
+
),
|
| 122 |
+
tm.get_cython_table_params(
|
| 123 |
+
Series("a b c".split()),
|
| 124 |
+
[
|
| 125 |
+
("sum", "abc"),
|
| 126 |
+
("max", "c"),
|
| 127 |
+
("min", "a"),
|
| 128 |
+
("all", True),
|
| 129 |
+
("any", True),
|
| 130 |
+
],
|
| 131 |
+
),
|
| 132 |
+
),
|
| 133 |
+
)
|
| 134 |
+
def test_agg_cython_table_series(series, func, expected):
|
| 135 |
+
# GH21224
|
| 136 |
+
# test reducing functions in
|
| 137 |
+
# pandas.core.base.SelectionMixin._cython_table
|
| 138 |
+
warn = None if isinstance(func, str) else FutureWarning
|
| 139 |
+
with tm.assert_produces_warning(warn, match="is currently using Series.*"):
|
| 140 |
+
result = series.agg(func)
|
| 141 |
+
if is_number(expected):
|
| 142 |
+
assert np.isclose(result, expected, equal_nan=True)
|
| 143 |
+
else:
|
| 144 |
+
assert result == expected
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
@pytest.mark.parametrize(
|
| 148 |
+
"series, func, expected",
|
| 149 |
+
chain(
|
| 150 |
+
tm.get_cython_table_params(
|
| 151 |
+
Series(dtype=np.float64),
|
| 152 |
+
[
|
| 153 |
+
("cumprod", Series([], dtype=np.float64)),
|
| 154 |
+
("cumsum", Series([], dtype=np.float64)),
|
| 155 |
+
],
|
| 156 |
+
),
|
| 157 |
+
tm.get_cython_table_params(
|
| 158 |
+
Series([np.nan, 1, 2, 3]),
|
| 159 |
+
[
|
| 160 |
+
("cumprod", Series([np.nan, 1, 2, 6])),
|
| 161 |
+
("cumsum", Series([np.nan, 1, 3, 6])),
|
| 162 |
+
],
|
| 163 |
+
),
|
| 164 |
+
tm.get_cython_table_params(
|
| 165 |
+
Series("a b c".split()), [("cumsum", Series(["a", "ab", "abc"]))]
|
| 166 |
+
),
|
| 167 |
+
),
|
| 168 |
+
)
|
| 169 |
+
def test_agg_cython_table_transform_series(series, func, expected):
|
| 170 |
+
# GH21224
|
| 171 |
+
# test transforming functions in
|
| 172 |
+
# pandas.core.base.SelectionMixin._cython_table (cumprod, cumsum)
|
| 173 |
+
warn = None if isinstance(func, str) else FutureWarning
|
| 174 |
+
with tm.assert_produces_warning(warn, match="is currently using Series.*"):
|
| 175 |
+
result = series.agg(func)
|
| 176 |
+
tm.assert_series_equal(result, expected)
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
@pytest.mark.parametrize(
|
| 180 |
+
"df, func, expected",
|
| 181 |
+
chain(
|
| 182 |
+
tm.get_cython_table_params(
|
| 183 |
+
DataFrame(),
|
| 184 |
+
[
|
| 185 |
+
("sum", Series(dtype="float64")),
|
| 186 |
+
("max", Series(dtype="float64")),
|
| 187 |
+
("min", Series(dtype="float64")),
|
| 188 |
+
("all", Series(dtype=bool)),
|
| 189 |
+
("any", Series(dtype=bool)),
|
| 190 |
+
("mean", Series(dtype="float64")),
|
| 191 |
+
("prod", Series(dtype="float64")),
|
| 192 |
+
("std", Series(dtype="float64")),
|
| 193 |
+
("var", Series(dtype="float64")),
|
| 194 |
+
("median", Series(dtype="float64")),
|
| 195 |
+
],
|
| 196 |
+
),
|
| 197 |
+
tm.get_cython_table_params(
|
| 198 |
+
DataFrame([[np.nan, 1], [1, 2]]),
|
| 199 |
+
[
|
| 200 |
+
("sum", Series([1.0, 3])),
|
| 201 |
+
("max", Series([1.0, 2])),
|
| 202 |
+
("min", Series([1.0, 1])),
|
| 203 |
+
("all", Series([True, True])),
|
| 204 |
+
("any", Series([True, True])),
|
| 205 |
+
("mean", Series([1, 1.5])),
|
| 206 |
+
("prod", Series([1.0, 2])),
|
| 207 |
+
("std", Series([np.nan, 0.707107])),
|
| 208 |
+
("var", Series([np.nan, 0.5])),
|
| 209 |
+
("median", Series([1, 1.5])),
|
| 210 |
+
],
|
| 211 |
+
),
|
| 212 |
+
),
|
| 213 |
+
)
|
| 214 |
+
def test_agg_cython_table_frame(df, func, expected, axis):
|
| 215 |
+
# GH 21224
|
| 216 |
+
# test reducing functions in
|
| 217 |
+
# pandas.core.base.SelectionMixin._cython_table
|
| 218 |
+
warn = None if isinstance(func, str) else FutureWarning
|
| 219 |
+
with tm.assert_produces_warning(warn, match="is currently using DataFrame.*"):
|
| 220 |
+
# GH#53425
|
| 221 |
+
result = df.agg(func, axis=axis)
|
| 222 |
+
tm.assert_series_equal(result, expected)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
@pytest.mark.parametrize(
|
| 226 |
+
"df, func, expected",
|
| 227 |
+
chain(
|
| 228 |
+
tm.get_cython_table_params(
|
| 229 |
+
DataFrame(), [("cumprod", DataFrame()), ("cumsum", DataFrame())]
|
| 230 |
+
),
|
| 231 |
+
tm.get_cython_table_params(
|
| 232 |
+
DataFrame([[np.nan, 1], [1, 2]]),
|
| 233 |
+
[
|
| 234 |
+
("cumprod", DataFrame([[np.nan, 1], [1, 2]])),
|
| 235 |
+
("cumsum", DataFrame([[np.nan, 1], [1, 3]])),
|
| 236 |
+
],
|
| 237 |
+
),
|
| 238 |
+
),
|
| 239 |
+
)
|
| 240 |
+
def test_agg_cython_table_transform_frame(df, func, expected, axis):
|
| 241 |
+
# GH 21224
|
| 242 |
+
# test transforming functions in
|
| 243 |
+
# pandas.core.base.SelectionMixin._cython_table (cumprod, cumsum)
|
| 244 |
+
if axis in ("columns", 1):
|
| 245 |
+
# operating blockwise doesn't let us preserve dtypes
|
| 246 |
+
expected = expected.astype("float64")
|
| 247 |
+
|
| 248 |
+
warn = None if isinstance(func, str) else FutureWarning
|
| 249 |
+
with tm.assert_produces_warning(warn, match="is currently using DataFrame.*"):
|
| 250 |
+
# GH#53425
|
| 251 |
+
result = df.agg(func, axis=axis)
|
| 252 |
+
tm.assert_frame_equal(result, expected)
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
@pytest.mark.parametrize("op", series_transform_kernels)
|
| 256 |
+
def test_transform_groupby_kernel_series(request, string_series, op):
|
| 257 |
+
# GH 35964
|
| 258 |
+
if op == "ngroup":
|
| 259 |
+
request.applymarker(
|
| 260 |
+
pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
|
| 261 |
+
)
|
| 262 |
+
args = [0.0] if op == "fillna" else []
|
| 263 |
+
ones = np.ones(string_series.shape[0])
|
| 264 |
+
|
| 265 |
+
warn = FutureWarning if op == "fillna" else None
|
| 266 |
+
msg = "SeriesGroupBy.fillna is deprecated"
|
| 267 |
+
with tm.assert_produces_warning(warn, match=msg):
|
| 268 |
+
expected = string_series.groupby(ones).transform(op, *args)
|
| 269 |
+
result = string_series.transform(op, 0, *args)
|
| 270 |
+
tm.assert_series_equal(result, expected)
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
@pytest.mark.parametrize("op", frame_transform_kernels)
|
| 274 |
+
def test_transform_groupby_kernel_frame(request, axis, float_frame, op):
|
| 275 |
+
if op == "ngroup":
|
| 276 |
+
request.applymarker(
|
| 277 |
+
pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
# GH 35964
|
| 281 |
+
|
| 282 |
+
args = [0.0] if op == "fillna" else []
|
| 283 |
+
if axis in (0, "index"):
|
| 284 |
+
ones = np.ones(float_frame.shape[0])
|
| 285 |
+
msg = "The 'axis' keyword in DataFrame.groupby is deprecated"
|
| 286 |
+
else:
|
| 287 |
+
ones = np.ones(float_frame.shape[1])
|
| 288 |
+
msg = "DataFrame.groupby with axis=1 is deprecated"
|
| 289 |
+
|
| 290 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 291 |
+
gb = float_frame.groupby(ones, axis=axis)
|
| 292 |
+
|
| 293 |
+
warn = FutureWarning if op == "fillna" else None
|
| 294 |
+
op_msg = "DataFrameGroupBy.fillna is deprecated"
|
| 295 |
+
with tm.assert_produces_warning(warn, match=op_msg):
|
| 296 |
+
expected = gb.transform(op, *args)
|
| 297 |
+
|
| 298 |
+
result = float_frame.transform(op, axis, *args)
|
| 299 |
+
tm.assert_frame_equal(result, expected)
|
| 300 |
+
|
| 301 |
+
# same thing, but ensuring we have multiple blocks
|
| 302 |
+
assert "E" not in float_frame.columns
|
| 303 |
+
float_frame["E"] = float_frame["A"].copy()
|
| 304 |
+
assert len(float_frame._mgr.arrays) > 1
|
| 305 |
+
|
| 306 |
+
if axis in (0, "index"):
|
| 307 |
+
ones = np.ones(float_frame.shape[0])
|
| 308 |
+
else:
|
| 309 |
+
ones = np.ones(float_frame.shape[1])
|
| 310 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 311 |
+
gb2 = float_frame.groupby(ones, axis=axis)
|
| 312 |
+
warn = FutureWarning if op == "fillna" else None
|
| 313 |
+
op_msg = "DataFrameGroupBy.fillna is deprecated"
|
| 314 |
+
with tm.assert_produces_warning(warn, match=op_msg):
|
| 315 |
+
expected2 = gb2.transform(op, *args)
|
| 316 |
+
result2 = float_frame.transform(op, axis, *args)
|
| 317 |
+
tm.assert_frame_equal(result2, expected2)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
@pytest.mark.parametrize("method", ["abs", "shift", "pct_change", "cumsum", "rank"])
|
| 321 |
+
def test_transform_method_name(method):
|
| 322 |
+
# GH 19760
|
| 323 |
+
df = DataFrame({"A": [-1, 2]})
|
| 324 |
+
result = df.transform(method)
|
| 325 |
+
expected = operator.methodcaller(method)(df)
|
| 326 |
+
tm.assert_frame_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__init__.py
ADDED
|
File without changes
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/common.cpython-310.pyc
ADDED
|
Binary file (3.81 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_categorical.cpython-310.pyc
ADDED
|
Binary file (1.18 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_datetime64.cpython-310.pyc
ADDED
|
Binary file (59 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_interval.cpython-310.pyc
ADDED
|
Binary file (8.77 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_numeric.cpython-310.pyc
ADDED
|
Binary file (44.5 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_object.cpython-310.pyc
ADDED
|
Binary file (11.8 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_period.cpython-310.pyc
ADDED
|
Binary file (46.5 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_timedelta64.cpython-310.pyc
ADDED
|
Binary file (54.8 kB). View file
|
|
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/common.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Assertion helpers for arithmetic tests.
|
| 3 |
+
"""
|
| 4 |
+
import numpy as np
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
from pandas import (
|
| 8 |
+
DataFrame,
|
| 9 |
+
Index,
|
| 10 |
+
Series,
|
| 11 |
+
array,
|
| 12 |
+
)
|
| 13 |
+
import pandas._testing as tm
|
| 14 |
+
from pandas.core.arrays import (
|
| 15 |
+
BooleanArray,
|
| 16 |
+
NumpyExtensionArray,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def assert_cannot_add(left, right, msg="cannot add"):
|
| 21 |
+
"""
|
| 22 |
+
Helper to assert that left and right cannot be added.
|
| 23 |
+
|
| 24 |
+
Parameters
|
| 25 |
+
----------
|
| 26 |
+
left : object
|
| 27 |
+
right : object
|
| 28 |
+
msg : str, default "cannot add"
|
| 29 |
+
"""
|
| 30 |
+
with pytest.raises(TypeError, match=msg):
|
| 31 |
+
left + right
|
| 32 |
+
with pytest.raises(TypeError, match=msg):
|
| 33 |
+
right + left
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def assert_invalid_addsub_type(left, right, msg=None):
|
| 37 |
+
"""
|
| 38 |
+
Helper to assert that left and right can be neither added nor subtracted.
|
| 39 |
+
|
| 40 |
+
Parameters
|
| 41 |
+
----------
|
| 42 |
+
left : object
|
| 43 |
+
right : object
|
| 44 |
+
msg : str or None, default None
|
| 45 |
+
"""
|
| 46 |
+
with pytest.raises(TypeError, match=msg):
|
| 47 |
+
left + right
|
| 48 |
+
with pytest.raises(TypeError, match=msg):
|
| 49 |
+
right + left
|
| 50 |
+
with pytest.raises(TypeError, match=msg):
|
| 51 |
+
left - right
|
| 52 |
+
with pytest.raises(TypeError, match=msg):
|
| 53 |
+
right - left
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def get_upcast_box(left, right, is_cmp: bool = False):
|
| 57 |
+
"""
|
| 58 |
+
Get the box to use for 'expected' in an arithmetic or comparison operation.
|
| 59 |
+
|
| 60 |
+
Parameters
|
| 61 |
+
left : Any
|
| 62 |
+
right : Any
|
| 63 |
+
is_cmp : bool, default False
|
| 64 |
+
Whether the operation is a comparison method.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
if isinstance(left, DataFrame) or isinstance(right, DataFrame):
|
| 68 |
+
return DataFrame
|
| 69 |
+
if isinstance(left, Series) or isinstance(right, Series):
|
| 70 |
+
if is_cmp and isinstance(left, Index):
|
| 71 |
+
# Index does not defer for comparisons
|
| 72 |
+
return np.array
|
| 73 |
+
return Series
|
| 74 |
+
if isinstance(left, Index) or isinstance(right, Index):
|
| 75 |
+
if is_cmp:
|
| 76 |
+
return np.array
|
| 77 |
+
return Index
|
| 78 |
+
return tm.to_array
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def assert_invalid_comparison(left, right, box):
|
| 82 |
+
"""
|
| 83 |
+
Assert that comparison operations with mismatched types behave correctly.
|
| 84 |
+
|
| 85 |
+
Parameters
|
| 86 |
+
----------
|
| 87 |
+
left : np.ndarray, ExtensionArray, Index, or Series
|
| 88 |
+
right : object
|
| 89 |
+
box : {pd.DataFrame, pd.Series, pd.Index, pd.array, tm.to_array}
|
| 90 |
+
"""
|
| 91 |
+
# Not for tznaive-tzaware comparison
|
| 92 |
+
|
| 93 |
+
# Note: not quite the same as how we do this for tm.box_expected
|
| 94 |
+
xbox = box if box not in [Index, array] else np.array
|
| 95 |
+
|
| 96 |
+
def xbox2(x):
|
| 97 |
+
# Eventually we'd like this to be tighter, but for now we'll
|
| 98 |
+
# just exclude NumpyExtensionArray[bool]
|
| 99 |
+
if isinstance(x, NumpyExtensionArray):
|
| 100 |
+
return x._ndarray
|
| 101 |
+
if isinstance(x, BooleanArray):
|
| 102 |
+
# NB: we are assuming no pd.NAs for now
|
| 103 |
+
return x.astype(bool)
|
| 104 |
+
return x
|
| 105 |
+
|
| 106 |
+
# rev_box: box to use for reversed comparisons
|
| 107 |
+
rev_box = xbox
|
| 108 |
+
if isinstance(right, Index) and isinstance(left, Series):
|
| 109 |
+
rev_box = np.array
|
| 110 |
+
|
| 111 |
+
result = xbox2(left == right)
|
| 112 |
+
expected = xbox(np.zeros(result.shape, dtype=np.bool_))
|
| 113 |
+
|
| 114 |
+
tm.assert_equal(result, expected)
|
| 115 |
+
|
| 116 |
+
result = xbox2(right == left)
|
| 117 |
+
tm.assert_equal(result, rev_box(expected))
|
| 118 |
+
|
| 119 |
+
result = xbox2(left != right)
|
| 120 |
+
tm.assert_equal(result, ~expected)
|
| 121 |
+
|
| 122 |
+
result = xbox2(right != left)
|
| 123 |
+
tm.assert_equal(result, rev_box(~expected))
|
| 124 |
+
|
| 125 |
+
msg = "|".join(
|
| 126 |
+
[
|
| 127 |
+
"Invalid comparison between",
|
| 128 |
+
"Cannot compare type",
|
| 129 |
+
"not supported between",
|
| 130 |
+
"invalid type promotion",
|
| 131 |
+
(
|
| 132 |
+
# GH#36706 npdev 1.20.0 2020-09-28
|
| 133 |
+
r"The DTypes <class 'numpy.dtype\[datetime64\]'> and "
|
| 134 |
+
r"<class 'numpy.dtype\[int64\]'> do not have a common DType. "
|
| 135 |
+
"For example they cannot be stored in a single array unless the "
|
| 136 |
+
"dtype is `object`."
|
| 137 |
+
),
|
| 138 |
+
]
|
| 139 |
+
)
|
| 140 |
+
with pytest.raises(TypeError, match=msg):
|
| 141 |
+
left < right
|
| 142 |
+
with pytest.raises(TypeError, match=msg):
|
| 143 |
+
left <= right
|
| 144 |
+
with pytest.raises(TypeError, match=msg):
|
| 145 |
+
left > right
|
| 146 |
+
with pytest.raises(TypeError, match=msg):
|
| 147 |
+
left >= right
|
| 148 |
+
with pytest.raises(TypeError, match=msg):
|
| 149 |
+
right < left
|
| 150 |
+
with pytest.raises(TypeError, match=msg):
|
| 151 |
+
right <= left
|
| 152 |
+
with pytest.raises(TypeError, match=msg):
|
| 153 |
+
right > left
|
| 154 |
+
with pytest.raises(TypeError, match=msg):
|
| 155 |
+
right >= left
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/conftest.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pandas import Index
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@pytest.fixture(params=[1, np.array(1, dtype=np.int64)])
|
| 9 |
+
def one(request):
|
| 10 |
+
"""
|
| 11 |
+
Several variants of integer value 1. The zero-dim integer array
|
| 12 |
+
behaves like an integer.
|
| 13 |
+
|
| 14 |
+
This fixture can be used to check that datetimelike indexes handle
|
| 15 |
+
addition and subtraction of integers and zero-dimensional arrays
|
| 16 |
+
of integers.
|
| 17 |
+
|
| 18 |
+
Examples
|
| 19 |
+
--------
|
| 20 |
+
dti = pd.date_range('2016-01-01', periods=2, freq='h')
|
| 21 |
+
dti
|
| 22 |
+
DatetimeIndex(['2016-01-01 00:00:00', '2016-01-01 01:00:00'],
|
| 23 |
+
dtype='datetime64[ns]', freq='h')
|
| 24 |
+
dti + one
|
| 25 |
+
DatetimeIndex(['2016-01-01 01:00:00', '2016-01-01 02:00:00'],
|
| 26 |
+
dtype='datetime64[ns]', freq='h')
|
| 27 |
+
"""
|
| 28 |
+
return request.param
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
zeros = [
|
| 32 |
+
box_cls([0] * 5, dtype=dtype)
|
| 33 |
+
for box_cls in [Index, np.array, pd.array]
|
| 34 |
+
for dtype in [np.int64, np.uint64, np.float64]
|
| 35 |
+
]
|
| 36 |
+
zeros.extend([box_cls([-0.0] * 5, dtype=np.float64) for box_cls in [Index, np.array]])
|
| 37 |
+
zeros.extend([np.array(0, dtype=dtype) for dtype in [np.int64, np.uint64, np.float64]])
|
| 38 |
+
zeros.extend([np.array(-0.0, dtype=np.float64)])
|
| 39 |
+
zeros.extend([0, 0.0, -0.0])
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@pytest.fixture(params=zeros)
|
| 43 |
+
def zero(request):
|
| 44 |
+
"""
|
| 45 |
+
Several types of scalar zeros and length 5 vectors of zeros.
|
| 46 |
+
|
| 47 |
+
This fixture can be used to check that numeric-dtype indexes handle
|
| 48 |
+
division by any zero numeric-dtype.
|
| 49 |
+
|
| 50 |
+
Uses vector of length 5 for broadcasting with `numeric_idx` fixture,
|
| 51 |
+
which creates numeric-dtype vectors also of length 5.
|
| 52 |
+
|
| 53 |
+
Examples
|
| 54 |
+
--------
|
| 55 |
+
arr = RangeIndex(5)
|
| 56 |
+
arr / zeros
|
| 57 |
+
Index([nan, inf, inf, inf, inf], dtype='float64')
|
| 58 |
+
"""
|
| 59 |
+
return request.param
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
# ------------------------------------------------------------------
|
| 63 |
+
# Scalar Fixtures
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@pytest.fixture(
|
| 67 |
+
params=[
|
| 68 |
+
pd.Timedelta("10m7s").to_pytimedelta(),
|
| 69 |
+
pd.Timedelta("10m7s"),
|
| 70 |
+
pd.Timedelta("10m7s").to_timedelta64(),
|
| 71 |
+
],
|
| 72 |
+
ids=lambda x: type(x).__name__,
|
| 73 |
+
)
|
| 74 |
+
def scalar_td(request):
|
| 75 |
+
"""
|
| 76 |
+
Several variants of Timedelta scalars representing 10 minutes and 7 seconds.
|
| 77 |
+
"""
|
| 78 |
+
return request.param
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@pytest.fixture(
|
| 82 |
+
params=[
|
| 83 |
+
pd.offsets.Day(3),
|
| 84 |
+
pd.offsets.Hour(72),
|
| 85 |
+
pd.Timedelta(days=3).to_pytimedelta(),
|
| 86 |
+
pd.Timedelta("72:00:00"),
|
| 87 |
+
np.timedelta64(3, "D"),
|
| 88 |
+
np.timedelta64(72, "h"),
|
| 89 |
+
],
|
| 90 |
+
ids=lambda x: type(x).__name__,
|
| 91 |
+
)
|
| 92 |
+
def three_days(request):
|
| 93 |
+
"""
|
| 94 |
+
Several timedelta-like and DateOffset objects that each represent
|
| 95 |
+
a 3-day timedelta
|
| 96 |
+
"""
|
| 97 |
+
return request.param
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
@pytest.fixture(
|
| 101 |
+
params=[
|
| 102 |
+
pd.offsets.Hour(2),
|
| 103 |
+
pd.offsets.Minute(120),
|
| 104 |
+
pd.Timedelta(hours=2).to_pytimedelta(),
|
| 105 |
+
pd.Timedelta(seconds=2 * 3600),
|
| 106 |
+
np.timedelta64(2, "h"),
|
| 107 |
+
np.timedelta64(120, "m"),
|
| 108 |
+
],
|
| 109 |
+
ids=lambda x: type(x).__name__,
|
| 110 |
+
)
|
| 111 |
+
def two_hours(request):
|
| 112 |
+
"""
|
| 113 |
+
Several timedelta-like and DateOffset objects that each represent
|
| 114 |
+
a 2-hour timedelta
|
| 115 |
+
"""
|
| 116 |
+
return request.param
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
_common_mismatch = [
|
| 120 |
+
pd.offsets.YearBegin(2),
|
| 121 |
+
pd.offsets.MonthBegin(1),
|
| 122 |
+
pd.offsets.Minute(),
|
| 123 |
+
]
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
@pytest.fixture(
|
| 127 |
+
params=[
|
| 128 |
+
np.timedelta64(4, "h"),
|
| 129 |
+
pd.Timedelta(hours=23).to_pytimedelta(),
|
| 130 |
+
pd.Timedelta("23:00:00"),
|
| 131 |
+
]
|
| 132 |
+
+ _common_mismatch
|
| 133 |
+
)
|
| 134 |
+
def not_daily(request):
|
| 135 |
+
"""
|
| 136 |
+
Several timedelta-like and DateOffset instances that are _not_
|
| 137 |
+
compatible with Daily frequencies.
|
| 138 |
+
"""
|
| 139 |
+
return request.param
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_array_ops.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import operator
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
import pandas._testing as tm
|
| 7 |
+
from pandas.core.ops.array_ops import (
|
| 8 |
+
comparison_op,
|
| 9 |
+
na_logical_op,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def test_na_logical_op_2d():
|
| 14 |
+
left = np.arange(8).reshape(4, 2)
|
| 15 |
+
right = left.astype(object)
|
| 16 |
+
right[0, 0] = np.nan
|
| 17 |
+
|
| 18 |
+
# Check that we fall back to the vec_binop branch
|
| 19 |
+
with pytest.raises(TypeError, match="unsupported operand type"):
|
| 20 |
+
operator.or_(left, right)
|
| 21 |
+
|
| 22 |
+
result = na_logical_op(left, right, operator.or_)
|
| 23 |
+
expected = right
|
| 24 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def test_object_comparison_2d():
|
| 28 |
+
left = np.arange(9).reshape(3, 3).astype(object)
|
| 29 |
+
right = left.T
|
| 30 |
+
|
| 31 |
+
result = comparison_op(left, right, operator.eq)
|
| 32 |
+
expected = np.eye(3).astype(bool)
|
| 33 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 34 |
+
|
| 35 |
+
# Ensure that cython doesn't raise on non-writeable arg, which
|
| 36 |
+
# we can get from np.broadcast_to
|
| 37 |
+
right.flags.writeable = False
|
| 38 |
+
result = comparison_op(left, right, operator.ne)
|
| 39 |
+
tm.assert_numpy_array_equal(result, ~expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_categorical.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
|
| 3 |
+
from pandas import (
|
| 4 |
+
Categorical,
|
| 5 |
+
Series,
|
| 6 |
+
)
|
| 7 |
+
import pandas._testing as tm
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestCategoricalComparisons:
|
| 11 |
+
def test_categorical_nan_equality(self):
|
| 12 |
+
cat = Series(Categorical(["a", "b", "c", np.nan]))
|
| 13 |
+
expected = Series([True, True, True, False])
|
| 14 |
+
result = cat == cat
|
| 15 |
+
tm.assert_series_equal(result, expected)
|
| 16 |
+
|
| 17 |
+
def test_categorical_tuple_equality(self):
|
| 18 |
+
# GH 18050
|
| 19 |
+
ser = Series([(0, 0), (0, 1), (0, 0), (1, 0), (1, 1)])
|
| 20 |
+
expected = Series([True, False, True, False, False])
|
| 21 |
+
result = ser == (0, 0)
|
| 22 |
+
tm.assert_series_equal(result, expected)
|
| 23 |
+
|
| 24 |
+
result = ser.astype("category") == (0, 0)
|
| 25 |
+
tm.assert_series_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_datetime64.py
ADDED
|
@@ -0,0 +1,2469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Arithmetic tests for DataFrame/Series/Index/Array classes that should
|
| 2 |
+
# behave identically.
|
| 3 |
+
# Specifically for datetime64 and datetime64tz dtypes
|
| 4 |
+
from datetime import (
|
| 5 |
+
datetime,
|
| 6 |
+
time,
|
| 7 |
+
timedelta,
|
| 8 |
+
)
|
| 9 |
+
from itertools import (
|
| 10 |
+
product,
|
| 11 |
+
starmap,
|
| 12 |
+
)
|
| 13 |
+
import operator
|
| 14 |
+
|
| 15 |
+
import numpy as np
|
| 16 |
+
import pytest
|
| 17 |
+
import pytz
|
| 18 |
+
|
| 19 |
+
from pandas._libs.tslibs.conversion import localize_pydatetime
|
| 20 |
+
from pandas._libs.tslibs.offsets import shift_months
|
| 21 |
+
from pandas.errors import PerformanceWarning
|
| 22 |
+
|
| 23 |
+
import pandas as pd
|
| 24 |
+
from pandas import (
|
| 25 |
+
DateOffset,
|
| 26 |
+
DatetimeIndex,
|
| 27 |
+
NaT,
|
| 28 |
+
Period,
|
| 29 |
+
Series,
|
| 30 |
+
Timedelta,
|
| 31 |
+
TimedeltaIndex,
|
| 32 |
+
Timestamp,
|
| 33 |
+
date_range,
|
| 34 |
+
)
|
| 35 |
+
import pandas._testing as tm
|
| 36 |
+
from pandas.core import roperator
|
| 37 |
+
from pandas.tests.arithmetic.common import (
|
| 38 |
+
assert_cannot_add,
|
| 39 |
+
assert_invalid_addsub_type,
|
| 40 |
+
assert_invalid_comparison,
|
| 41 |
+
get_upcast_box,
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
# ------------------------------------------------------------------
|
| 45 |
+
# Comparisons
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TestDatetime64ArrayLikeComparisons:
|
| 49 |
+
# Comparison tests for datetime64 vectors fully parametrized over
|
| 50 |
+
# DataFrame/Series/DatetimeIndex/DatetimeArray. Ideally all comparison
|
| 51 |
+
# tests will eventually end up here.
|
| 52 |
+
|
| 53 |
+
def test_compare_zerodim(self, tz_naive_fixture, box_with_array):
|
| 54 |
+
# Test comparison with zero-dimensional array is unboxed
|
| 55 |
+
tz = tz_naive_fixture
|
| 56 |
+
box = box_with_array
|
| 57 |
+
dti = date_range("20130101", periods=3, tz=tz)
|
| 58 |
+
|
| 59 |
+
other = np.array(dti.to_numpy()[0])
|
| 60 |
+
|
| 61 |
+
dtarr = tm.box_expected(dti, box)
|
| 62 |
+
xbox = get_upcast_box(dtarr, other, True)
|
| 63 |
+
result = dtarr <= other
|
| 64 |
+
expected = np.array([True, False, False])
|
| 65 |
+
expected = tm.box_expected(expected, xbox)
|
| 66 |
+
tm.assert_equal(result, expected)
|
| 67 |
+
|
| 68 |
+
@pytest.mark.parametrize(
|
| 69 |
+
"other",
|
| 70 |
+
[
|
| 71 |
+
"foo",
|
| 72 |
+
-1,
|
| 73 |
+
99,
|
| 74 |
+
4.0,
|
| 75 |
+
object(),
|
| 76 |
+
timedelta(days=2),
|
| 77 |
+
# GH#19800, GH#19301 datetime.date comparison raises to
|
| 78 |
+
# match DatetimeIndex/Timestamp. This also matches the behavior
|
| 79 |
+
# of stdlib datetime.datetime
|
| 80 |
+
datetime(2001, 1, 1).date(),
|
| 81 |
+
# GH#19301 None and NaN are *not* cast to NaT for comparisons
|
| 82 |
+
None,
|
| 83 |
+
np.nan,
|
| 84 |
+
],
|
| 85 |
+
)
|
| 86 |
+
def test_dt64arr_cmp_scalar_invalid(self, other, tz_naive_fixture, box_with_array):
|
| 87 |
+
# GH#22074, GH#15966
|
| 88 |
+
tz = tz_naive_fixture
|
| 89 |
+
|
| 90 |
+
rng = date_range("1/1/2000", periods=10, tz=tz)
|
| 91 |
+
dtarr = tm.box_expected(rng, box_with_array)
|
| 92 |
+
assert_invalid_comparison(dtarr, other, box_with_array)
|
| 93 |
+
|
| 94 |
+
@pytest.mark.parametrize(
|
| 95 |
+
"other",
|
| 96 |
+
[
|
| 97 |
+
# GH#4968 invalid date/int comparisons
|
| 98 |
+
list(range(10)),
|
| 99 |
+
np.arange(10),
|
| 100 |
+
np.arange(10).astype(np.float32),
|
| 101 |
+
np.arange(10).astype(object),
|
| 102 |
+
pd.timedelta_range("1ns", periods=10).array,
|
| 103 |
+
np.array(pd.timedelta_range("1ns", periods=10)),
|
| 104 |
+
list(pd.timedelta_range("1ns", periods=10)),
|
| 105 |
+
pd.timedelta_range("1 Day", periods=10).astype(object),
|
| 106 |
+
pd.period_range("1971-01-01", freq="D", periods=10).array,
|
| 107 |
+
pd.period_range("1971-01-01", freq="D", periods=10).astype(object),
|
| 108 |
+
],
|
| 109 |
+
)
|
| 110 |
+
def test_dt64arr_cmp_arraylike_invalid(
|
| 111 |
+
self, other, tz_naive_fixture, box_with_array
|
| 112 |
+
):
|
| 113 |
+
tz = tz_naive_fixture
|
| 114 |
+
|
| 115 |
+
dta = date_range("1970-01-01", freq="ns", periods=10, tz=tz)._data
|
| 116 |
+
obj = tm.box_expected(dta, box_with_array)
|
| 117 |
+
assert_invalid_comparison(obj, other, box_with_array)
|
| 118 |
+
|
| 119 |
+
def test_dt64arr_cmp_mixed_invalid(self, tz_naive_fixture):
|
| 120 |
+
tz = tz_naive_fixture
|
| 121 |
+
|
| 122 |
+
dta = date_range("1970-01-01", freq="h", periods=5, tz=tz)._data
|
| 123 |
+
|
| 124 |
+
other = np.array([0, 1, 2, dta[3], Timedelta(days=1)])
|
| 125 |
+
result = dta == other
|
| 126 |
+
expected = np.array([False, False, False, True, False])
|
| 127 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 128 |
+
|
| 129 |
+
result = dta != other
|
| 130 |
+
tm.assert_numpy_array_equal(result, ~expected)
|
| 131 |
+
|
| 132 |
+
msg = "Invalid comparison between|Cannot compare type|not supported between"
|
| 133 |
+
with pytest.raises(TypeError, match=msg):
|
| 134 |
+
dta < other
|
| 135 |
+
with pytest.raises(TypeError, match=msg):
|
| 136 |
+
dta > other
|
| 137 |
+
with pytest.raises(TypeError, match=msg):
|
| 138 |
+
dta <= other
|
| 139 |
+
with pytest.raises(TypeError, match=msg):
|
| 140 |
+
dta >= other
|
| 141 |
+
|
| 142 |
+
def test_dt64arr_nat_comparison(self, tz_naive_fixture, box_with_array):
|
| 143 |
+
# GH#22242, GH#22163 DataFrame considered NaT == ts incorrectly
|
| 144 |
+
tz = tz_naive_fixture
|
| 145 |
+
box = box_with_array
|
| 146 |
+
|
| 147 |
+
ts = Timestamp("2021-01-01", tz=tz)
|
| 148 |
+
ser = Series([ts, NaT])
|
| 149 |
+
|
| 150 |
+
obj = tm.box_expected(ser, box)
|
| 151 |
+
xbox = get_upcast_box(obj, ts, True)
|
| 152 |
+
|
| 153 |
+
expected = Series([True, False], dtype=np.bool_)
|
| 154 |
+
expected = tm.box_expected(expected, xbox)
|
| 155 |
+
|
| 156 |
+
result = obj == ts
|
| 157 |
+
tm.assert_equal(result, expected)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
class TestDatetime64SeriesComparison:
|
| 161 |
+
# TODO: moved from tests.series.test_operators; needs cleanup
|
| 162 |
+
|
| 163 |
+
@pytest.mark.parametrize(
|
| 164 |
+
"pair",
|
| 165 |
+
[
|
| 166 |
+
(
|
| 167 |
+
[Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
|
| 168 |
+
[NaT, NaT, Timestamp("2011-01-03")],
|
| 169 |
+
),
|
| 170 |
+
(
|
| 171 |
+
[Timedelta("1 days"), NaT, Timedelta("3 days")],
|
| 172 |
+
[NaT, NaT, Timedelta("3 days")],
|
| 173 |
+
),
|
| 174 |
+
(
|
| 175 |
+
[Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
|
| 176 |
+
[NaT, NaT, Period("2011-03", freq="M")],
|
| 177 |
+
),
|
| 178 |
+
],
|
| 179 |
+
)
|
| 180 |
+
@pytest.mark.parametrize("reverse", [True, False])
|
| 181 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 182 |
+
@pytest.mark.parametrize(
|
| 183 |
+
"op, expected",
|
| 184 |
+
[
|
| 185 |
+
(operator.eq, Series([False, False, True])),
|
| 186 |
+
(operator.ne, Series([True, True, False])),
|
| 187 |
+
(operator.lt, Series([False, False, False])),
|
| 188 |
+
(operator.gt, Series([False, False, False])),
|
| 189 |
+
(operator.ge, Series([False, False, True])),
|
| 190 |
+
(operator.le, Series([False, False, True])),
|
| 191 |
+
],
|
| 192 |
+
)
|
| 193 |
+
def test_nat_comparisons(
|
| 194 |
+
self,
|
| 195 |
+
dtype,
|
| 196 |
+
index_or_series,
|
| 197 |
+
reverse,
|
| 198 |
+
pair,
|
| 199 |
+
op,
|
| 200 |
+
expected,
|
| 201 |
+
):
|
| 202 |
+
box = index_or_series
|
| 203 |
+
lhs, rhs = pair
|
| 204 |
+
if reverse:
|
| 205 |
+
# add lhs / rhs switched data
|
| 206 |
+
lhs, rhs = rhs, lhs
|
| 207 |
+
|
| 208 |
+
left = Series(lhs, dtype=dtype)
|
| 209 |
+
right = box(rhs, dtype=dtype)
|
| 210 |
+
|
| 211 |
+
result = op(left, right)
|
| 212 |
+
|
| 213 |
+
tm.assert_series_equal(result, expected)
|
| 214 |
+
|
| 215 |
+
@pytest.mark.parametrize(
|
| 216 |
+
"data",
|
| 217 |
+
[
|
| 218 |
+
[Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
|
| 219 |
+
[Timedelta("1 days"), NaT, Timedelta("3 days")],
|
| 220 |
+
[Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
|
| 221 |
+
],
|
| 222 |
+
)
|
| 223 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 224 |
+
def test_nat_comparisons_scalar(self, dtype, data, box_with_array):
|
| 225 |
+
box = box_with_array
|
| 226 |
+
|
| 227 |
+
left = Series(data, dtype=dtype)
|
| 228 |
+
left = tm.box_expected(left, box)
|
| 229 |
+
xbox = get_upcast_box(left, NaT, True)
|
| 230 |
+
|
| 231 |
+
expected = [False, False, False]
|
| 232 |
+
expected = tm.box_expected(expected, xbox)
|
| 233 |
+
if box is pd.array and dtype is object:
|
| 234 |
+
expected = pd.array(expected, dtype="bool")
|
| 235 |
+
|
| 236 |
+
tm.assert_equal(left == NaT, expected)
|
| 237 |
+
tm.assert_equal(NaT == left, expected)
|
| 238 |
+
|
| 239 |
+
expected = [True, True, True]
|
| 240 |
+
expected = tm.box_expected(expected, xbox)
|
| 241 |
+
if box is pd.array and dtype is object:
|
| 242 |
+
expected = pd.array(expected, dtype="bool")
|
| 243 |
+
tm.assert_equal(left != NaT, expected)
|
| 244 |
+
tm.assert_equal(NaT != left, expected)
|
| 245 |
+
|
| 246 |
+
expected = [False, False, False]
|
| 247 |
+
expected = tm.box_expected(expected, xbox)
|
| 248 |
+
if box is pd.array and dtype is object:
|
| 249 |
+
expected = pd.array(expected, dtype="bool")
|
| 250 |
+
tm.assert_equal(left < NaT, expected)
|
| 251 |
+
tm.assert_equal(NaT > left, expected)
|
| 252 |
+
tm.assert_equal(left <= NaT, expected)
|
| 253 |
+
tm.assert_equal(NaT >= left, expected)
|
| 254 |
+
|
| 255 |
+
tm.assert_equal(left > NaT, expected)
|
| 256 |
+
tm.assert_equal(NaT < left, expected)
|
| 257 |
+
tm.assert_equal(left >= NaT, expected)
|
| 258 |
+
tm.assert_equal(NaT <= left, expected)
|
| 259 |
+
|
| 260 |
+
@pytest.mark.parametrize("val", [datetime(2000, 1, 4), datetime(2000, 1, 5)])
|
| 261 |
+
def test_series_comparison_scalars(self, val):
|
| 262 |
+
series = Series(date_range("1/1/2000", periods=10))
|
| 263 |
+
|
| 264 |
+
result = series > val
|
| 265 |
+
expected = Series([x > val for x in series])
|
| 266 |
+
tm.assert_series_equal(result, expected)
|
| 267 |
+
|
| 268 |
+
@pytest.mark.parametrize(
|
| 269 |
+
"left,right", [("lt", "gt"), ("le", "ge"), ("eq", "eq"), ("ne", "ne")]
|
| 270 |
+
)
|
| 271 |
+
def test_timestamp_compare_series(self, left, right):
|
| 272 |
+
# see gh-4982
|
| 273 |
+
# Make sure we can compare Timestamps on the right AND left hand side.
|
| 274 |
+
ser = Series(date_range("20010101", periods=10), name="dates")
|
| 275 |
+
s_nat = ser.copy(deep=True)
|
| 276 |
+
|
| 277 |
+
ser[0] = Timestamp("nat")
|
| 278 |
+
ser[3] = Timestamp("nat")
|
| 279 |
+
|
| 280 |
+
left_f = getattr(operator, left)
|
| 281 |
+
right_f = getattr(operator, right)
|
| 282 |
+
|
| 283 |
+
# No NaT
|
| 284 |
+
expected = left_f(ser, Timestamp("20010109"))
|
| 285 |
+
result = right_f(Timestamp("20010109"), ser)
|
| 286 |
+
tm.assert_series_equal(result, expected)
|
| 287 |
+
|
| 288 |
+
# NaT
|
| 289 |
+
expected = left_f(ser, Timestamp("nat"))
|
| 290 |
+
result = right_f(Timestamp("nat"), ser)
|
| 291 |
+
tm.assert_series_equal(result, expected)
|
| 292 |
+
|
| 293 |
+
# Compare to Timestamp with series containing NaT
|
| 294 |
+
expected = left_f(s_nat, Timestamp("20010109"))
|
| 295 |
+
result = right_f(Timestamp("20010109"), s_nat)
|
| 296 |
+
tm.assert_series_equal(result, expected)
|
| 297 |
+
|
| 298 |
+
# Compare to NaT with series containing NaT
|
| 299 |
+
expected = left_f(s_nat, NaT)
|
| 300 |
+
result = right_f(NaT, s_nat)
|
| 301 |
+
tm.assert_series_equal(result, expected)
|
| 302 |
+
|
| 303 |
+
def test_dt64arr_timestamp_equality(self, box_with_array):
|
| 304 |
+
# GH#11034
|
| 305 |
+
box = box_with_array
|
| 306 |
+
|
| 307 |
+
ser = Series([Timestamp("2000-01-29 01:59:00"), Timestamp("2000-01-30"), NaT])
|
| 308 |
+
ser = tm.box_expected(ser, box)
|
| 309 |
+
xbox = get_upcast_box(ser, ser, True)
|
| 310 |
+
|
| 311 |
+
result = ser != ser
|
| 312 |
+
expected = tm.box_expected([False, False, True], xbox)
|
| 313 |
+
tm.assert_equal(result, expected)
|
| 314 |
+
|
| 315 |
+
if box is pd.DataFrame:
|
| 316 |
+
# alignment for frame vs series comparisons deprecated
|
| 317 |
+
# in GH#46795 enforced 2.0
|
| 318 |
+
with pytest.raises(ValueError, match="not aligned"):
|
| 319 |
+
ser != ser[0]
|
| 320 |
+
|
| 321 |
+
else:
|
| 322 |
+
result = ser != ser[0]
|
| 323 |
+
expected = tm.box_expected([False, True, True], xbox)
|
| 324 |
+
tm.assert_equal(result, expected)
|
| 325 |
+
|
| 326 |
+
if box is pd.DataFrame:
|
| 327 |
+
# alignment for frame vs series comparisons deprecated
|
| 328 |
+
# in GH#46795 enforced 2.0
|
| 329 |
+
with pytest.raises(ValueError, match="not aligned"):
|
| 330 |
+
ser != ser[2]
|
| 331 |
+
else:
|
| 332 |
+
result = ser != ser[2]
|
| 333 |
+
expected = tm.box_expected([True, True, True], xbox)
|
| 334 |
+
tm.assert_equal(result, expected)
|
| 335 |
+
|
| 336 |
+
result = ser == ser
|
| 337 |
+
expected = tm.box_expected([True, True, False], xbox)
|
| 338 |
+
tm.assert_equal(result, expected)
|
| 339 |
+
|
| 340 |
+
if box is pd.DataFrame:
|
| 341 |
+
# alignment for frame vs series comparisons deprecated
|
| 342 |
+
# in GH#46795 enforced 2.0
|
| 343 |
+
with pytest.raises(ValueError, match="not aligned"):
|
| 344 |
+
ser == ser[0]
|
| 345 |
+
else:
|
| 346 |
+
result = ser == ser[0]
|
| 347 |
+
expected = tm.box_expected([True, False, False], xbox)
|
| 348 |
+
tm.assert_equal(result, expected)
|
| 349 |
+
|
| 350 |
+
if box is pd.DataFrame:
|
| 351 |
+
# alignment for frame vs series comparisons deprecated
|
| 352 |
+
# in GH#46795 enforced 2.0
|
| 353 |
+
with pytest.raises(ValueError, match="not aligned"):
|
| 354 |
+
ser == ser[2]
|
| 355 |
+
else:
|
| 356 |
+
result = ser == ser[2]
|
| 357 |
+
expected = tm.box_expected([False, False, False], xbox)
|
| 358 |
+
tm.assert_equal(result, expected)
|
| 359 |
+
|
| 360 |
+
@pytest.mark.parametrize(
|
| 361 |
+
"datetimelike",
|
| 362 |
+
[
|
| 363 |
+
Timestamp("20130101"),
|
| 364 |
+
datetime(2013, 1, 1),
|
| 365 |
+
np.datetime64("2013-01-01T00:00", "ns"),
|
| 366 |
+
],
|
| 367 |
+
)
|
| 368 |
+
@pytest.mark.parametrize(
|
| 369 |
+
"op,expected",
|
| 370 |
+
[
|
| 371 |
+
(operator.lt, [True, False, False, False]),
|
| 372 |
+
(operator.le, [True, True, False, False]),
|
| 373 |
+
(operator.eq, [False, True, False, False]),
|
| 374 |
+
(operator.gt, [False, False, False, True]),
|
| 375 |
+
],
|
| 376 |
+
)
|
| 377 |
+
def test_dt64_compare_datetime_scalar(self, datetimelike, op, expected):
|
| 378 |
+
# GH#17965, test for ability to compare datetime64[ns] columns
|
| 379 |
+
# to datetimelike
|
| 380 |
+
ser = Series(
|
| 381 |
+
[
|
| 382 |
+
Timestamp("20120101"),
|
| 383 |
+
Timestamp("20130101"),
|
| 384 |
+
np.nan,
|
| 385 |
+
Timestamp("20130103"),
|
| 386 |
+
],
|
| 387 |
+
name="A",
|
| 388 |
+
)
|
| 389 |
+
result = op(ser, datetimelike)
|
| 390 |
+
expected = Series(expected, name="A")
|
| 391 |
+
tm.assert_series_equal(result, expected)
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
class TestDatetimeIndexComparisons:
|
| 395 |
+
# TODO: moved from tests.indexes.test_base; parametrize and de-duplicate
|
| 396 |
+
def test_comparators(self, comparison_op):
|
| 397 |
+
index = date_range("2020-01-01", periods=10)
|
| 398 |
+
element = index[len(index) // 2]
|
| 399 |
+
element = Timestamp(element).to_datetime64()
|
| 400 |
+
|
| 401 |
+
arr = np.array(index)
|
| 402 |
+
arr_result = comparison_op(arr, element)
|
| 403 |
+
index_result = comparison_op(index, element)
|
| 404 |
+
|
| 405 |
+
assert isinstance(index_result, np.ndarray)
|
| 406 |
+
tm.assert_numpy_array_equal(arr_result, index_result)
|
| 407 |
+
|
| 408 |
+
@pytest.mark.parametrize(
|
| 409 |
+
"other",
|
| 410 |
+
[datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
|
| 411 |
+
)
|
| 412 |
+
def test_dti_cmp_datetimelike(self, other, tz_naive_fixture):
|
| 413 |
+
tz = tz_naive_fixture
|
| 414 |
+
dti = date_range("2016-01-01", periods=2, tz=tz)
|
| 415 |
+
if tz is not None:
|
| 416 |
+
if isinstance(other, np.datetime64):
|
| 417 |
+
pytest.skip(f"{type(other).__name__} is not tz aware")
|
| 418 |
+
other = localize_pydatetime(other, dti.tzinfo)
|
| 419 |
+
|
| 420 |
+
result = dti == other
|
| 421 |
+
expected = np.array([True, False])
|
| 422 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 423 |
+
|
| 424 |
+
result = dti > other
|
| 425 |
+
expected = np.array([False, True])
|
| 426 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 427 |
+
|
| 428 |
+
result = dti >= other
|
| 429 |
+
expected = np.array([True, True])
|
| 430 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 431 |
+
|
| 432 |
+
result = dti < other
|
| 433 |
+
expected = np.array([False, False])
|
| 434 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 435 |
+
|
| 436 |
+
result = dti <= other
|
| 437 |
+
expected = np.array([True, False])
|
| 438 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 439 |
+
|
| 440 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 441 |
+
def test_dti_cmp_nat(self, dtype, box_with_array):
|
| 442 |
+
left = DatetimeIndex([Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")])
|
| 443 |
+
right = DatetimeIndex([NaT, NaT, Timestamp("2011-01-03")])
|
| 444 |
+
|
| 445 |
+
left = tm.box_expected(left, box_with_array)
|
| 446 |
+
right = tm.box_expected(right, box_with_array)
|
| 447 |
+
xbox = get_upcast_box(left, right, True)
|
| 448 |
+
|
| 449 |
+
lhs, rhs = left, right
|
| 450 |
+
if dtype is object:
|
| 451 |
+
lhs, rhs = left.astype(object), right.astype(object)
|
| 452 |
+
|
| 453 |
+
result = rhs == lhs
|
| 454 |
+
expected = np.array([False, False, True])
|
| 455 |
+
expected = tm.box_expected(expected, xbox)
|
| 456 |
+
tm.assert_equal(result, expected)
|
| 457 |
+
|
| 458 |
+
result = lhs != rhs
|
| 459 |
+
expected = np.array([True, True, False])
|
| 460 |
+
expected = tm.box_expected(expected, xbox)
|
| 461 |
+
tm.assert_equal(result, expected)
|
| 462 |
+
|
| 463 |
+
expected = np.array([False, False, False])
|
| 464 |
+
expected = tm.box_expected(expected, xbox)
|
| 465 |
+
tm.assert_equal(lhs == NaT, expected)
|
| 466 |
+
tm.assert_equal(NaT == rhs, expected)
|
| 467 |
+
|
| 468 |
+
expected = np.array([True, True, True])
|
| 469 |
+
expected = tm.box_expected(expected, xbox)
|
| 470 |
+
tm.assert_equal(lhs != NaT, expected)
|
| 471 |
+
tm.assert_equal(NaT != lhs, expected)
|
| 472 |
+
|
| 473 |
+
expected = np.array([False, False, False])
|
| 474 |
+
expected = tm.box_expected(expected, xbox)
|
| 475 |
+
tm.assert_equal(lhs < NaT, expected)
|
| 476 |
+
tm.assert_equal(NaT > lhs, expected)
|
| 477 |
+
|
| 478 |
+
def test_dti_cmp_nat_behaves_like_float_cmp_nan(self):
|
| 479 |
+
fidx1 = pd.Index([1.0, np.nan, 3.0, np.nan, 5.0, 7.0])
|
| 480 |
+
fidx2 = pd.Index([2.0, 3.0, np.nan, np.nan, 6.0, 7.0])
|
| 481 |
+
|
| 482 |
+
didx1 = DatetimeIndex(
|
| 483 |
+
["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
|
| 484 |
+
)
|
| 485 |
+
didx2 = DatetimeIndex(
|
| 486 |
+
["2014-02-01", "2014-03-01", NaT, NaT, "2014-06-01", "2014-07-01"]
|
| 487 |
+
)
|
| 488 |
+
darr = np.array(
|
| 489 |
+
[
|
| 490 |
+
np.datetime64("2014-02-01 00:00"),
|
| 491 |
+
np.datetime64("2014-03-01 00:00"),
|
| 492 |
+
np.datetime64("nat"),
|
| 493 |
+
np.datetime64("nat"),
|
| 494 |
+
np.datetime64("2014-06-01 00:00"),
|
| 495 |
+
np.datetime64("2014-07-01 00:00"),
|
| 496 |
+
]
|
| 497 |
+
)
|
| 498 |
+
|
| 499 |
+
cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]
|
| 500 |
+
|
| 501 |
+
# Check pd.NaT is handles as the same as np.nan
|
| 502 |
+
with tm.assert_produces_warning(None):
|
| 503 |
+
for idx1, idx2 in cases:
|
| 504 |
+
result = idx1 < idx2
|
| 505 |
+
expected = np.array([True, False, False, False, True, False])
|
| 506 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 507 |
+
|
| 508 |
+
result = idx2 > idx1
|
| 509 |
+
expected = np.array([True, False, False, False, True, False])
|
| 510 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 511 |
+
|
| 512 |
+
result = idx1 <= idx2
|
| 513 |
+
expected = np.array([True, False, False, False, True, True])
|
| 514 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 515 |
+
|
| 516 |
+
result = idx2 >= idx1
|
| 517 |
+
expected = np.array([True, False, False, False, True, True])
|
| 518 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 519 |
+
|
| 520 |
+
result = idx1 == idx2
|
| 521 |
+
expected = np.array([False, False, False, False, False, True])
|
| 522 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 523 |
+
|
| 524 |
+
result = idx1 != idx2
|
| 525 |
+
expected = np.array([True, True, True, True, True, False])
|
| 526 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 527 |
+
|
| 528 |
+
with tm.assert_produces_warning(None):
|
| 529 |
+
for idx1, val in [(fidx1, np.nan), (didx1, NaT)]:
|
| 530 |
+
result = idx1 < val
|
| 531 |
+
expected = np.array([False, False, False, False, False, False])
|
| 532 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 533 |
+
result = idx1 > val
|
| 534 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 535 |
+
|
| 536 |
+
result = idx1 <= val
|
| 537 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 538 |
+
result = idx1 >= val
|
| 539 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 540 |
+
|
| 541 |
+
result = idx1 == val
|
| 542 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 543 |
+
|
| 544 |
+
result = idx1 != val
|
| 545 |
+
expected = np.array([True, True, True, True, True, True])
|
| 546 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 547 |
+
|
| 548 |
+
# Check pd.NaT is handles as the same as np.nan
|
| 549 |
+
with tm.assert_produces_warning(None):
|
| 550 |
+
for idx1, val in [(fidx1, 3), (didx1, datetime(2014, 3, 1))]:
|
| 551 |
+
result = idx1 < val
|
| 552 |
+
expected = np.array([True, False, False, False, False, False])
|
| 553 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 554 |
+
result = idx1 > val
|
| 555 |
+
expected = np.array([False, False, False, False, True, True])
|
| 556 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 557 |
+
|
| 558 |
+
result = idx1 <= val
|
| 559 |
+
expected = np.array([True, False, True, False, False, False])
|
| 560 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 561 |
+
result = idx1 >= val
|
| 562 |
+
expected = np.array([False, False, True, False, True, True])
|
| 563 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 564 |
+
|
| 565 |
+
result = idx1 == val
|
| 566 |
+
expected = np.array([False, False, True, False, False, False])
|
| 567 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 568 |
+
|
| 569 |
+
result = idx1 != val
|
| 570 |
+
expected = np.array([True, True, False, True, True, True])
|
| 571 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 572 |
+
|
| 573 |
+
def test_comparison_tzawareness_compat(self, comparison_op, box_with_array):
|
| 574 |
+
# GH#18162
|
| 575 |
+
op = comparison_op
|
| 576 |
+
box = box_with_array
|
| 577 |
+
|
| 578 |
+
dr = date_range("2016-01-01", periods=6)
|
| 579 |
+
dz = dr.tz_localize("US/Pacific")
|
| 580 |
+
|
| 581 |
+
dr = tm.box_expected(dr, box)
|
| 582 |
+
dz = tm.box_expected(dz, box)
|
| 583 |
+
|
| 584 |
+
if box is pd.DataFrame:
|
| 585 |
+
tolist = lambda x: x.astype(object).values.tolist()[0]
|
| 586 |
+
else:
|
| 587 |
+
tolist = list
|
| 588 |
+
|
| 589 |
+
if op not in [operator.eq, operator.ne]:
|
| 590 |
+
msg = (
|
| 591 |
+
r"Invalid comparison between dtype=datetime64\[ns.*\] "
|
| 592 |
+
"and (Timestamp|DatetimeArray|list|ndarray)"
|
| 593 |
+
)
|
| 594 |
+
with pytest.raises(TypeError, match=msg):
|
| 595 |
+
op(dr, dz)
|
| 596 |
+
|
| 597 |
+
with pytest.raises(TypeError, match=msg):
|
| 598 |
+
op(dr, tolist(dz))
|
| 599 |
+
with pytest.raises(TypeError, match=msg):
|
| 600 |
+
op(dr, np.array(tolist(dz), dtype=object))
|
| 601 |
+
with pytest.raises(TypeError, match=msg):
|
| 602 |
+
op(dz, dr)
|
| 603 |
+
|
| 604 |
+
with pytest.raises(TypeError, match=msg):
|
| 605 |
+
op(dz, tolist(dr))
|
| 606 |
+
with pytest.raises(TypeError, match=msg):
|
| 607 |
+
op(dz, np.array(tolist(dr), dtype=object))
|
| 608 |
+
|
| 609 |
+
# The aware==aware and naive==naive comparisons should *not* raise
|
| 610 |
+
assert np.all(dr == dr)
|
| 611 |
+
assert np.all(dr == tolist(dr))
|
| 612 |
+
assert np.all(tolist(dr) == dr)
|
| 613 |
+
assert np.all(np.array(tolist(dr), dtype=object) == dr)
|
| 614 |
+
assert np.all(dr == np.array(tolist(dr), dtype=object))
|
| 615 |
+
|
| 616 |
+
assert np.all(dz == dz)
|
| 617 |
+
assert np.all(dz == tolist(dz))
|
| 618 |
+
assert np.all(tolist(dz) == dz)
|
| 619 |
+
assert np.all(np.array(tolist(dz), dtype=object) == dz)
|
| 620 |
+
assert np.all(dz == np.array(tolist(dz), dtype=object))
|
| 621 |
+
|
| 622 |
+
def test_comparison_tzawareness_compat_scalars(self, comparison_op, box_with_array):
|
| 623 |
+
# GH#18162
|
| 624 |
+
op = comparison_op
|
| 625 |
+
|
| 626 |
+
dr = date_range("2016-01-01", periods=6)
|
| 627 |
+
dz = dr.tz_localize("US/Pacific")
|
| 628 |
+
|
| 629 |
+
dr = tm.box_expected(dr, box_with_array)
|
| 630 |
+
dz = tm.box_expected(dz, box_with_array)
|
| 631 |
+
|
| 632 |
+
# Check comparisons against scalar Timestamps
|
| 633 |
+
ts = Timestamp("2000-03-14 01:59")
|
| 634 |
+
ts_tz = Timestamp("2000-03-14 01:59", tz="Europe/Amsterdam")
|
| 635 |
+
|
| 636 |
+
assert np.all(dr > ts)
|
| 637 |
+
msg = r"Invalid comparison between dtype=datetime64\[ns.*\] and Timestamp"
|
| 638 |
+
if op not in [operator.eq, operator.ne]:
|
| 639 |
+
with pytest.raises(TypeError, match=msg):
|
| 640 |
+
op(dr, ts_tz)
|
| 641 |
+
|
| 642 |
+
assert np.all(dz > ts_tz)
|
| 643 |
+
if op not in [operator.eq, operator.ne]:
|
| 644 |
+
with pytest.raises(TypeError, match=msg):
|
| 645 |
+
op(dz, ts)
|
| 646 |
+
|
| 647 |
+
if op not in [operator.eq, operator.ne]:
|
| 648 |
+
# GH#12601: Check comparison against Timestamps and DatetimeIndex
|
| 649 |
+
with pytest.raises(TypeError, match=msg):
|
| 650 |
+
op(ts, dz)
|
| 651 |
+
|
| 652 |
+
@pytest.mark.parametrize(
|
| 653 |
+
"other",
|
| 654 |
+
[datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
|
| 655 |
+
)
|
| 656 |
+
# Bug in NumPy? https://github.com/numpy/numpy/issues/13841
|
| 657 |
+
# Raising in __eq__ will fallback to NumPy, which warns, fails,
|
| 658 |
+
# then re-raises the original exception. So we just need to ignore.
|
| 659 |
+
@pytest.mark.filterwarnings("ignore:elementwise comp:DeprecationWarning")
|
| 660 |
+
def test_scalar_comparison_tzawareness(
|
| 661 |
+
self, comparison_op, other, tz_aware_fixture, box_with_array
|
| 662 |
+
):
|
| 663 |
+
op = comparison_op
|
| 664 |
+
tz = tz_aware_fixture
|
| 665 |
+
dti = date_range("2016-01-01", periods=2, tz=tz)
|
| 666 |
+
|
| 667 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 668 |
+
xbox = get_upcast_box(dtarr, other, True)
|
| 669 |
+
if op in [operator.eq, operator.ne]:
|
| 670 |
+
exbool = op is operator.ne
|
| 671 |
+
expected = np.array([exbool, exbool], dtype=bool)
|
| 672 |
+
expected = tm.box_expected(expected, xbox)
|
| 673 |
+
|
| 674 |
+
result = op(dtarr, other)
|
| 675 |
+
tm.assert_equal(result, expected)
|
| 676 |
+
|
| 677 |
+
result = op(other, dtarr)
|
| 678 |
+
tm.assert_equal(result, expected)
|
| 679 |
+
else:
|
| 680 |
+
msg = (
|
| 681 |
+
r"Invalid comparison between dtype=datetime64\[ns, .*\] "
|
| 682 |
+
f"and {type(other).__name__}"
|
| 683 |
+
)
|
| 684 |
+
with pytest.raises(TypeError, match=msg):
|
| 685 |
+
op(dtarr, other)
|
| 686 |
+
with pytest.raises(TypeError, match=msg):
|
| 687 |
+
op(other, dtarr)
|
| 688 |
+
|
| 689 |
+
def test_nat_comparison_tzawareness(self, comparison_op):
|
| 690 |
+
# GH#19276
|
| 691 |
+
# tzaware DatetimeIndex should not raise when compared to NaT
|
| 692 |
+
op = comparison_op
|
| 693 |
+
|
| 694 |
+
dti = DatetimeIndex(
|
| 695 |
+
["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
|
| 696 |
+
)
|
| 697 |
+
expected = np.array([op == operator.ne] * len(dti))
|
| 698 |
+
result = op(dti, NaT)
|
| 699 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 700 |
+
|
| 701 |
+
result = op(dti.tz_localize("US/Pacific"), NaT)
|
| 702 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 703 |
+
|
| 704 |
+
def test_dti_cmp_str(self, tz_naive_fixture):
|
| 705 |
+
# GH#22074
|
| 706 |
+
# regardless of tz, we expect these comparisons are valid
|
| 707 |
+
tz = tz_naive_fixture
|
| 708 |
+
rng = date_range("1/1/2000", periods=10, tz=tz)
|
| 709 |
+
other = "1/1/2000"
|
| 710 |
+
|
| 711 |
+
result = rng == other
|
| 712 |
+
expected = np.array([True] + [False] * 9)
|
| 713 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 714 |
+
|
| 715 |
+
result = rng != other
|
| 716 |
+
expected = np.array([False] + [True] * 9)
|
| 717 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 718 |
+
|
| 719 |
+
result = rng < other
|
| 720 |
+
expected = np.array([False] * 10)
|
| 721 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 722 |
+
|
| 723 |
+
result = rng <= other
|
| 724 |
+
expected = np.array([True] + [False] * 9)
|
| 725 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 726 |
+
|
| 727 |
+
result = rng > other
|
| 728 |
+
expected = np.array([False] + [True] * 9)
|
| 729 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 730 |
+
|
| 731 |
+
result = rng >= other
|
| 732 |
+
expected = np.array([True] * 10)
|
| 733 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 734 |
+
|
| 735 |
+
def test_dti_cmp_list(self):
|
| 736 |
+
rng = date_range("1/1/2000", periods=10)
|
| 737 |
+
|
| 738 |
+
result = rng == list(rng)
|
| 739 |
+
expected = rng == rng
|
| 740 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 741 |
+
|
| 742 |
+
@pytest.mark.parametrize(
|
| 743 |
+
"other",
|
| 744 |
+
[
|
| 745 |
+
pd.timedelta_range("1D", periods=10),
|
| 746 |
+
pd.timedelta_range("1D", periods=10).to_series(),
|
| 747 |
+
pd.timedelta_range("1D", periods=10).asi8.view("m8[ns]"),
|
| 748 |
+
],
|
| 749 |
+
ids=lambda x: type(x).__name__,
|
| 750 |
+
)
|
| 751 |
+
def test_dti_cmp_tdi_tzawareness(self, other):
|
| 752 |
+
# GH#22074
|
| 753 |
+
# reversion test that we _don't_ call _assert_tzawareness_compat
|
| 754 |
+
# when comparing against TimedeltaIndex
|
| 755 |
+
dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
|
| 756 |
+
|
| 757 |
+
result = dti == other
|
| 758 |
+
expected = np.array([False] * 10)
|
| 759 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 760 |
+
|
| 761 |
+
result = dti != other
|
| 762 |
+
expected = np.array([True] * 10)
|
| 763 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 764 |
+
msg = "Invalid comparison between"
|
| 765 |
+
with pytest.raises(TypeError, match=msg):
|
| 766 |
+
dti < other
|
| 767 |
+
with pytest.raises(TypeError, match=msg):
|
| 768 |
+
dti <= other
|
| 769 |
+
with pytest.raises(TypeError, match=msg):
|
| 770 |
+
dti > other
|
| 771 |
+
with pytest.raises(TypeError, match=msg):
|
| 772 |
+
dti >= other
|
| 773 |
+
|
| 774 |
+
def test_dti_cmp_object_dtype(self):
|
| 775 |
+
# GH#22074
|
| 776 |
+
dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
|
| 777 |
+
|
| 778 |
+
other = dti.astype("O")
|
| 779 |
+
|
| 780 |
+
result = dti == other
|
| 781 |
+
expected = np.array([True] * 10)
|
| 782 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 783 |
+
|
| 784 |
+
other = dti.tz_localize(None)
|
| 785 |
+
result = dti != other
|
| 786 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 787 |
+
|
| 788 |
+
other = np.array(list(dti[:5]) + [Timedelta(days=1)] * 5)
|
| 789 |
+
result = dti == other
|
| 790 |
+
expected = np.array([True] * 5 + [False] * 5)
|
| 791 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 792 |
+
msg = ">=' not supported between instances of 'Timestamp' and 'Timedelta'"
|
| 793 |
+
with pytest.raises(TypeError, match=msg):
|
| 794 |
+
dti >= other
|
| 795 |
+
|
| 796 |
+
|
| 797 |
+
# ------------------------------------------------------------------
|
| 798 |
+
# Arithmetic
|
| 799 |
+
|
| 800 |
+
|
| 801 |
+
class TestDatetime64Arithmetic:
|
| 802 |
+
# This class is intended for "finished" tests that are fully parametrized
|
| 803 |
+
# over DataFrame/Series/Index/DatetimeArray
|
| 804 |
+
|
| 805 |
+
# -------------------------------------------------------------
|
| 806 |
+
# Addition/Subtraction of timedelta-like
|
| 807 |
+
|
| 808 |
+
@pytest.mark.arm_slow
|
| 809 |
+
def test_dt64arr_add_timedeltalike_scalar(
|
| 810 |
+
self, tz_naive_fixture, two_hours, box_with_array
|
| 811 |
+
):
|
| 812 |
+
# GH#22005, GH#22163 check DataFrame doesn't raise TypeError
|
| 813 |
+
tz = tz_naive_fixture
|
| 814 |
+
|
| 815 |
+
rng = date_range("2000-01-01", "2000-02-01", tz=tz)
|
| 816 |
+
expected = date_range("2000-01-01 02:00", "2000-02-01 02:00", tz=tz)
|
| 817 |
+
|
| 818 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 819 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 820 |
+
|
| 821 |
+
result = rng + two_hours
|
| 822 |
+
tm.assert_equal(result, expected)
|
| 823 |
+
|
| 824 |
+
result = two_hours + rng
|
| 825 |
+
tm.assert_equal(result, expected)
|
| 826 |
+
|
| 827 |
+
rng += two_hours
|
| 828 |
+
tm.assert_equal(rng, expected)
|
| 829 |
+
|
| 830 |
+
def test_dt64arr_sub_timedeltalike_scalar(
|
| 831 |
+
self, tz_naive_fixture, two_hours, box_with_array
|
| 832 |
+
):
|
| 833 |
+
tz = tz_naive_fixture
|
| 834 |
+
|
| 835 |
+
rng = date_range("2000-01-01", "2000-02-01", tz=tz)
|
| 836 |
+
expected = date_range("1999-12-31 22:00", "2000-01-31 22:00", tz=tz)
|
| 837 |
+
|
| 838 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 839 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 840 |
+
|
| 841 |
+
result = rng - two_hours
|
| 842 |
+
tm.assert_equal(result, expected)
|
| 843 |
+
|
| 844 |
+
rng -= two_hours
|
| 845 |
+
tm.assert_equal(rng, expected)
|
| 846 |
+
|
| 847 |
+
def test_dt64_array_sub_dt_with_different_timezone(self, box_with_array):
|
| 848 |
+
t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
|
| 849 |
+
t1 = tm.box_expected(t1, box_with_array)
|
| 850 |
+
t2 = Timestamp("20130101").tz_localize("CET")
|
| 851 |
+
tnaive = Timestamp(20130101)
|
| 852 |
+
|
| 853 |
+
result = t1 - t2
|
| 854 |
+
expected = TimedeltaIndex(
|
| 855 |
+
["0 days 06:00:00", "1 days 06:00:00", "2 days 06:00:00"]
|
| 856 |
+
)
|
| 857 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 858 |
+
tm.assert_equal(result, expected)
|
| 859 |
+
|
| 860 |
+
result = t2 - t1
|
| 861 |
+
expected = TimedeltaIndex(
|
| 862 |
+
["-1 days +18:00:00", "-2 days +18:00:00", "-3 days +18:00:00"]
|
| 863 |
+
)
|
| 864 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 865 |
+
tm.assert_equal(result, expected)
|
| 866 |
+
|
| 867 |
+
msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
|
| 868 |
+
with pytest.raises(TypeError, match=msg):
|
| 869 |
+
t1 - tnaive
|
| 870 |
+
|
| 871 |
+
with pytest.raises(TypeError, match=msg):
|
| 872 |
+
tnaive - t1
|
| 873 |
+
|
| 874 |
+
def test_dt64_array_sub_dt64_array_with_different_timezone(self, box_with_array):
|
| 875 |
+
t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
|
| 876 |
+
t1 = tm.box_expected(t1, box_with_array)
|
| 877 |
+
t2 = date_range("20130101", periods=3).tz_localize("CET")
|
| 878 |
+
t2 = tm.box_expected(t2, box_with_array)
|
| 879 |
+
tnaive = date_range("20130101", periods=3)
|
| 880 |
+
|
| 881 |
+
result = t1 - t2
|
| 882 |
+
expected = TimedeltaIndex(
|
| 883 |
+
["0 days 06:00:00", "0 days 06:00:00", "0 days 06:00:00"]
|
| 884 |
+
)
|
| 885 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 886 |
+
tm.assert_equal(result, expected)
|
| 887 |
+
|
| 888 |
+
result = t2 - t1
|
| 889 |
+
expected = TimedeltaIndex(
|
| 890 |
+
["-1 days +18:00:00", "-1 days +18:00:00", "-1 days +18:00:00"]
|
| 891 |
+
)
|
| 892 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 893 |
+
tm.assert_equal(result, expected)
|
| 894 |
+
|
| 895 |
+
msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
|
| 896 |
+
with pytest.raises(TypeError, match=msg):
|
| 897 |
+
t1 - tnaive
|
| 898 |
+
|
| 899 |
+
with pytest.raises(TypeError, match=msg):
|
| 900 |
+
tnaive - t1
|
| 901 |
+
|
| 902 |
+
def test_dt64arr_add_sub_td64_nat(self, box_with_array, tz_naive_fixture):
|
| 903 |
+
# GH#23320 special handling for timedelta64("NaT")
|
| 904 |
+
tz = tz_naive_fixture
|
| 905 |
+
|
| 906 |
+
dti = date_range("1994-04-01", periods=9, tz=tz, freq="QS")
|
| 907 |
+
other = np.timedelta64("NaT")
|
| 908 |
+
expected = DatetimeIndex(["NaT"] * 9, tz=tz).as_unit("ns")
|
| 909 |
+
|
| 910 |
+
obj = tm.box_expected(dti, box_with_array)
|
| 911 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 912 |
+
|
| 913 |
+
result = obj + other
|
| 914 |
+
tm.assert_equal(result, expected)
|
| 915 |
+
result = other + obj
|
| 916 |
+
tm.assert_equal(result, expected)
|
| 917 |
+
result = obj - other
|
| 918 |
+
tm.assert_equal(result, expected)
|
| 919 |
+
msg = "cannot subtract"
|
| 920 |
+
with pytest.raises(TypeError, match=msg):
|
| 921 |
+
other - obj
|
| 922 |
+
|
| 923 |
+
def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array):
|
| 924 |
+
tz = tz_naive_fixture
|
| 925 |
+
dti = date_range("2016-01-01", periods=3, tz=tz)
|
| 926 |
+
tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
|
| 927 |
+
tdarr = tdi.values
|
| 928 |
+
|
| 929 |
+
expected = date_range("2015-12-31", "2016-01-02", periods=3, tz=tz)
|
| 930 |
+
|
| 931 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 932 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 933 |
+
|
| 934 |
+
result = dtarr + tdarr
|
| 935 |
+
tm.assert_equal(result, expected)
|
| 936 |
+
result = tdarr + dtarr
|
| 937 |
+
tm.assert_equal(result, expected)
|
| 938 |
+
|
| 939 |
+
expected = date_range("2016-01-02", "2016-01-04", periods=3, tz=tz)
|
| 940 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 941 |
+
|
| 942 |
+
result = dtarr - tdarr
|
| 943 |
+
tm.assert_equal(result, expected)
|
| 944 |
+
msg = "cannot subtract|(bad|unsupported) operand type for unary"
|
| 945 |
+
with pytest.raises(TypeError, match=msg):
|
| 946 |
+
tdarr - dtarr
|
| 947 |
+
|
| 948 |
+
# -----------------------------------------------------------------
|
| 949 |
+
# Subtraction of datetime-like scalars
|
| 950 |
+
|
| 951 |
+
@pytest.mark.parametrize(
|
| 952 |
+
"ts",
|
| 953 |
+
[
|
| 954 |
+
Timestamp("2013-01-01"),
|
| 955 |
+
Timestamp("2013-01-01").to_pydatetime(),
|
| 956 |
+
Timestamp("2013-01-01").to_datetime64(),
|
| 957 |
+
# GH#7996, GH#22163 ensure non-nano datetime64 is converted to nano
|
| 958 |
+
# for DataFrame operation
|
| 959 |
+
np.datetime64("2013-01-01", "D"),
|
| 960 |
+
],
|
| 961 |
+
)
|
| 962 |
+
def test_dt64arr_sub_dtscalar(self, box_with_array, ts):
|
| 963 |
+
# GH#8554, GH#22163 DataFrame op should _not_ return dt64 dtype
|
| 964 |
+
idx = date_range("2013-01-01", periods=3)._with_freq(None)
|
| 965 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 966 |
+
|
| 967 |
+
expected = TimedeltaIndex(["0 Days", "1 Day", "2 Days"])
|
| 968 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 969 |
+
|
| 970 |
+
result = idx - ts
|
| 971 |
+
tm.assert_equal(result, expected)
|
| 972 |
+
|
| 973 |
+
result = ts - idx
|
| 974 |
+
tm.assert_equal(result, -expected)
|
| 975 |
+
tm.assert_equal(result, -expected)
|
| 976 |
+
|
| 977 |
+
def test_dt64arr_sub_timestamp_tzaware(self, box_with_array):
|
| 978 |
+
ser = date_range("2014-03-17", periods=2, freq="D", tz="US/Eastern")
|
| 979 |
+
ser = ser._with_freq(None)
|
| 980 |
+
ts = ser[0]
|
| 981 |
+
|
| 982 |
+
ser = tm.box_expected(ser, box_with_array)
|
| 983 |
+
|
| 984 |
+
delta_series = Series([np.timedelta64(0, "D"), np.timedelta64(1, "D")])
|
| 985 |
+
expected = tm.box_expected(delta_series, box_with_array)
|
| 986 |
+
|
| 987 |
+
tm.assert_equal(ser - ts, expected)
|
| 988 |
+
tm.assert_equal(ts - ser, -expected)
|
| 989 |
+
|
| 990 |
+
def test_dt64arr_sub_NaT(self, box_with_array, unit):
|
| 991 |
+
# GH#18808
|
| 992 |
+
dti = DatetimeIndex([NaT, Timestamp("19900315")]).as_unit(unit)
|
| 993 |
+
ser = tm.box_expected(dti, box_with_array)
|
| 994 |
+
|
| 995 |
+
result = ser - NaT
|
| 996 |
+
expected = Series([NaT, NaT], dtype=f"timedelta64[{unit}]")
|
| 997 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 998 |
+
tm.assert_equal(result, expected)
|
| 999 |
+
|
| 1000 |
+
dti_tz = dti.tz_localize("Asia/Tokyo")
|
| 1001 |
+
ser_tz = tm.box_expected(dti_tz, box_with_array)
|
| 1002 |
+
|
| 1003 |
+
result = ser_tz - NaT
|
| 1004 |
+
expected = Series([NaT, NaT], dtype=f"timedelta64[{unit}]")
|
| 1005 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1006 |
+
tm.assert_equal(result, expected)
|
| 1007 |
+
|
| 1008 |
+
# -------------------------------------------------------------
|
| 1009 |
+
# Subtraction of datetime-like array-like
|
| 1010 |
+
|
| 1011 |
+
def test_dt64arr_sub_dt64object_array(self, box_with_array, tz_naive_fixture):
|
| 1012 |
+
dti = date_range("2016-01-01", periods=3, tz=tz_naive_fixture)
|
| 1013 |
+
expected = dti - dti
|
| 1014 |
+
|
| 1015 |
+
obj = tm.box_expected(dti, box_with_array)
|
| 1016 |
+
expected = tm.box_expected(expected, box_with_array).astype(object)
|
| 1017 |
+
|
| 1018 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1019 |
+
result = obj - obj.astype(object)
|
| 1020 |
+
tm.assert_equal(result, expected)
|
| 1021 |
+
|
| 1022 |
+
def test_dt64arr_naive_sub_dt64ndarray(self, box_with_array):
|
| 1023 |
+
dti = date_range("2016-01-01", periods=3, tz=None)
|
| 1024 |
+
dt64vals = dti.values
|
| 1025 |
+
|
| 1026 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 1027 |
+
|
| 1028 |
+
expected = dtarr - dtarr
|
| 1029 |
+
result = dtarr - dt64vals
|
| 1030 |
+
tm.assert_equal(result, expected)
|
| 1031 |
+
result = dt64vals - dtarr
|
| 1032 |
+
tm.assert_equal(result, expected)
|
| 1033 |
+
|
| 1034 |
+
def test_dt64arr_aware_sub_dt64ndarray_raises(
|
| 1035 |
+
self, tz_aware_fixture, box_with_array
|
| 1036 |
+
):
|
| 1037 |
+
tz = tz_aware_fixture
|
| 1038 |
+
dti = date_range("2016-01-01", periods=3, tz=tz)
|
| 1039 |
+
dt64vals = dti.values
|
| 1040 |
+
|
| 1041 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 1042 |
+
msg = "Cannot subtract tz-naive and tz-aware datetime"
|
| 1043 |
+
with pytest.raises(TypeError, match=msg):
|
| 1044 |
+
dtarr - dt64vals
|
| 1045 |
+
with pytest.raises(TypeError, match=msg):
|
| 1046 |
+
dt64vals - dtarr
|
| 1047 |
+
|
| 1048 |
+
# -------------------------------------------------------------
|
| 1049 |
+
# Addition of datetime-like others (invalid)
|
| 1050 |
+
|
| 1051 |
+
def test_dt64arr_add_dtlike_raises(self, tz_naive_fixture, box_with_array):
|
| 1052 |
+
# GH#22163 ensure DataFrame doesn't cast Timestamp to i8
|
| 1053 |
+
# GH#9631
|
| 1054 |
+
tz = tz_naive_fixture
|
| 1055 |
+
|
| 1056 |
+
dti = date_range("2016-01-01", periods=3, tz=tz)
|
| 1057 |
+
if tz is None:
|
| 1058 |
+
dti2 = dti.tz_localize("US/Eastern")
|
| 1059 |
+
else:
|
| 1060 |
+
dti2 = dti.tz_localize(None)
|
| 1061 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 1062 |
+
|
| 1063 |
+
assert_cannot_add(dtarr, dti.values)
|
| 1064 |
+
assert_cannot_add(dtarr, dti)
|
| 1065 |
+
assert_cannot_add(dtarr, dtarr)
|
| 1066 |
+
assert_cannot_add(dtarr, dti[0])
|
| 1067 |
+
assert_cannot_add(dtarr, dti[0].to_pydatetime())
|
| 1068 |
+
assert_cannot_add(dtarr, dti[0].to_datetime64())
|
| 1069 |
+
assert_cannot_add(dtarr, dti2[0])
|
| 1070 |
+
assert_cannot_add(dtarr, dti2[0].to_pydatetime())
|
| 1071 |
+
assert_cannot_add(dtarr, np.datetime64("2011-01-01", "D"))
|
| 1072 |
+
|
| 1073 |
+
# -------------------------------------------------------------
|
| 1074 |
+
# Other Invalid Addition/Subtraction
|
| 1075 |
+
|
| 1076 |
+
# Note: freq here includes both Tick and non-Tick offsets; this is
|
| 1077 |
+
# relevant because historically integer-addition was allowed if we had
|
| 1078 |
+
# a freq.
|
| 1079 |
+
@pytest.mark.parametrize("freq", ["h", "D", "W", "2ME", "MS", "QE", "B", None])
|
| 1080 |
+
@pytest.mark.parametrize("dtype", [None, "uint8"])
|
| 1081 |
+
def test_dt64arr_addsub_intlike(
|
| 1082 |
+
self, request, dtype, index_or_series_or_array, freq, tz_naive_fixture
|
| 1083 |
+
):
|
| 1084 |
+
# GH#19959, GH#19123, GH#19012
|
| 1085 |
+
# GH#55860 use index_or_series_or_array instead of box_with_array
|
| 1086 |
+
# bc DataFrame alignment makes it inapplicable
|
| 1087 |
+
tz = tz_naive_fixture
|
| 1088 |
+
|
| 1089 |
+
if freq is None:
|
| 1090 |
+
dti = DatetimeIndex(["NaT", "2017-04-05 06:07:08"], tz=tz)
|
| 1091 |
+
else:
|
| 1092 |
+
dti = date_range("2016-01-01", periods=2, freq=freq, tz=tz)
|
| 1093 |
+
|
| 1094 |
+
obj = index_or_series_or_array(dti)
|
| 1095 |
+
other = np.array([4, -1])
|
| 1096 |
+
if dtype is not None:
|
| 1097 |
+
other = other.astype(dtype)
|
| 1098 |
+
|
| 1099 |
+
msg = "|".join(
|
| 1100 |
+
[
|
| 1101 |
+
"Addition/subtraction of integers",
|
| 1102 |
+
"cannot subtract DatetimeArray from",
|
| 1103 |
+
# IntegerArray
|
| 1104 |
+
"can only perform ops with numeric values",
|
| 1105 |
+
"unsupported operand type.*Categorical",
|
| 1106 |
+
r"unsupported operand type\(s\) for -: 'int' and 'Timestamp'",
|
| 1107 |
+
]
|
| 1108 |
+
)
|
| 1109 |
+
assert_invalid_addsub_type(obj, 1, msg)
|
| 1110 |
+
assert_invalid_addsub_type(obj, np.int64(2), msg)
|
| 1111 |
+
assert_invalid_addsub_type(obj, np.array(3, dtype=np.int64), msg)
|
| 1112 |
+
assert_invalid_addsub_type(obj, other, msg)
|
| 1113 |
+
assert_invalid_addsub_type(obj, np.array(other), msg)
|
| 1114 |
+
assert_invalid_addsub_type(obj, pd.array(other), msg)
|
| 1115 |
+
assert_invalid_addsub_type(obj, pd.Categorical(other), msg)
|
| 1116 |
+
assert_invalid_addsub_type(obj, pd.Index(other), msg)
|
| 1117 |
+
assert_invalid_addsub_type(obj, Series(other), msg)
|
| 1118 |
+
|
| 1119 |
+
@pytest.mark.parametrize(
|
| 1120 |
+
"other",
|
| 1121 |
+
[
|
| 1122 |
+
3.14,
|
| 1123 |
+
np.array([2.0, 3.0]),
|
| 1124 |
+
# GH#13078 datetime +/- Period is invalid
|
| 1125 |
+
Period("2011-01-01", freq="D"),
|
| 1126 |
+
# https://github.com/pandas-dev/pandas/issues/10329
|
| 1127 |
+
time(1, 2, 3),
|
| 1128 |
+
],
|
| 1129 |
+
)
|
| 1130 |
+
@pytest.mark.parametrize("dti_freq", [None, "D"])
|
| 1131 |
+
def test_dt64arr_add_sub_invalid(self, dti_freq, other, box_with_array):
|
| 1132 |
+
dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
|
| 1133 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 1134 |
+
msg = "|".join(
|
| 1135 |
+
[
|
| 1136 |
+
"unsupported operand type",
|
| 1137 |
+
"cannot (add|subtract)",
|
| 1138 |
+
"cannot use operands with types",
|
| 1139 |
+
"ufunc '?(add|subtract)'? cannot use operands with types",
|
| 1140 |
+
"Concatenation operation is not implemented for NumPy arrays",
|
| 1141 |
+
]
|
| 1142 |
+
)
|
| 1143 |
+
assert_invalid_addsub_type(dtarr, other, msg)
|
| 1144 |
+
|
| 1145 |
+
@pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"])
|
| 1146 |
+
@pytest.mark.parametrize("dti_freq", [None, "D"])
|
| 1147 |
+
def test_dt64arr_add_sub_parr(
|
| 1148 |
+
self, dti_freq, pi_freq, box_with_array, box_with_array2
|
| 1149 |
+
):
|
| 1150 |
+
# GH#20049 subtracting PeriodIndex should raise TypeError
|
| 1151 |
+
dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
|
| 1152 |
+
pi = dti.to_period(pi_freq)
|
| 1153 |
+
|
| 1154 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 1155 |
+
parr = tm.box_expected(pi, box_with_array2)
|
| 1156 |
+
msg = "|".join(
|
| 1157 |
+
[
|
| 1158 |
+
"cannot (add|subtract)",
|
| 1159 |
+
"unsupported operand",
|
| 1160 |
+
"descriptor.*requires",
|
| 1161 |
+
"ufunc.*cannot use operands",
|
| 1162 |
+
]
|
| 1163 |
+
)
|
| 1164 |
+
assert_invalid_addsub_type(dtarr, parr, msg)
|
| 1165 |
+
|
| 1166 |
+
@pytest.mark.filterwarnings("ignore::pandas.errors.PerformanceWarning")
|
| 1167 |
+
def test_dt64arr_addsub_time_objects_raises(self, box_with_array, tz_naive_fixture):
|
| 1168 |
+
# https://github.com/pandas-dev/pandas/issues/10329
|
| 1169 |
+
|
| 1170 |
+
tz = tz_naive_fixture
|
| 1171 |
+
|
| 1172 |
+
obj1 = date_range("2012-01-01", periods=3, tz=tz)
|
| 1173 |
+
obj2 = [time(i, i, i) for i in range(3)]
|
| 1174 |
+
|
| 1175 |
+
obj1 = tm.box_expected(obj1, box_with_array)
|
| 1176 |
+
obj2 = tm.box_expected(obj2, box_with_array)
|
| 1177 |
+
|
| 1178 |
+
msg = "|".join(
|
| 1179 |
+
[
|
| 1180 |
+
"unsupported operand",
|
| 1181 |
+
"cannot subtract DatetimeArray from ndarray",
|
| 1182 |
+
]
|
| 1183 |
+
)
|
| 1184 |
+
# pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
|
| 1185 |
+
# applied to Series or DatetimeIndex
|
| 1186 |
+
# we aren't testing that here, so ignore.
|
| 1187 |
+
assert_invalid_addsub_type(obj1, obj2, msg=msg)
|
| 1188 |
+
|
| 1189 |
+
# -------------------------------------------------------------
|
| 1190 |
+
# Other invalid operations
|
| 1191 |
+
|
| 1192 |
+
@pytest.mark.parametrize(
|
| 1193 |
+
"dt64_series",
|
| 1194 |
+
[
|
| 1195 |
+
Series([Timestamp("19900315"), Timestamp("19900315")]),
|
| 1196 |
+
Series([NaT, Timestamp("19900315")]),
|
| 1197 |
+
Series([NaT, NaT], dtype="datetime64[ns]"),
|
| 1198 |
+
],
|
| 1199 |
+
)
|
| 1200 |
+
@pytest.mark.parametrize("one", [1, 1.0, np.array(1)])
|
| 1201 |
+
def test_dt64_mul_div_numeric_invalid(self, one, dt64_series, box_with_array):
|
| 1202 |
+
obj = tm.box_expected(dt64_series, box_with_array)
|
| 1203 |
+
|
| 1204 |
+
msg = "cannot perform .* with this index type"
|
| 1205 |
+
|
| 1206 |
+
# multiplication
|
| 1207 |
+
with pytest.raises(TypeError, match=msg):
|
| 1208 |
+
obj * one
|
| 1209 |
+
with pytest.raises(TypeError, match=msg):
|
| 1210 |
+
one * obj
|
| 1211 |
+
|
| 1212 |
+
# division
|
| 1213 |
+
with pytest.raises(TypeError, match=msg):
|
| 1214 |
+
obj / one
|
| 1215 |
+
with pytest.raises(TypeError, match=msg):
|
| 1216 |
+
one / obj
|
| 1217 |
+
|
| 1218 |
+
|
| 1219 |
+
class TestDatetime64DateOffsetArithmetic:
|
| 1220 |
+
# -------------------------------------------------------------
|
| 1221 |
+
# Tick DateOffsets
|
| 1222 |
+
|
| 1223 |
+
# TODO: parametrize over timezone?
|
| 1224 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
| 1225 |
+
def test_dt64arr_series_add_tick_DateOffset(self, box_with_array, unit):
|
| 1226 |
+
# GH#4532
|
| 1227 |
+
# operate with pd.offsets
|
| 1228 |
+
ser = Series(
|
| 1229 |
+
[Timestamp("20130101 9:01"), Timestamp("20130101 9:02")]
|
| 1230 |
+
).dt.as_unit(unit)
|
| 1231 |
+
expected = Series(
|
| 1232 |
+
[Timestamp("20130101 9:01:05"), Timestamp("20130101 9:02:05")]
|
| 1233 |
+
).dt.as_unit(unit)
|
| 1234 |
+
|
| 1235 |
+
ser = tm.box_expected(ser, box_with_array)
|
| 1236 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1237 |
+
|
| 1238 |
+
result = ser + pd.offsets.Second(5)
|
| 1239 |
+
tm.assert_equal(result, expected)
|
| 1240 |
+
|
| 1241 |
+
result2 = pd.offsets.Second(5) + ser
|
| 1242 |
+
tm.assert_equal(result2, expected)
|
| 1243 |
+
|
| 1244 |
+
def test_dt64arr_series_sub_tick_DateOffset(self, box_with_array):
|
| 1245 |
+
# GH#4532
|
| 1246 |
+
# operate with pd.offsets
|
| 1247 |
+
ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
|
| 1248 |
+
expected = Series(
|
| 1249 |
+
[Timestamp("20130101 9:00:55"), Timestamp("20130101 9:01:55")]
|
| 1250 |
+
)
|
| 1251 |
+
|
| 1252 |
+
ser = tm.box_expected(ser, box_with_array)
|
| 1253 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1254 |
+
|
| 1255 |
+
result = ser - pd.offsets.Second(5)
|
| 1256 |
+
tm.assert_equal(result, expected)
|
| 1257 |
+
|
| 1258 |
+
result2 = -pd.offsets.Second(5) + ser
|
| 1259 |
+
tm.assert_equal(result2, expected)
|
| 1260 |
+
msg = "(bad|unsupported) operand type for unary"
|
| 1261 |
+
with pytest.raises(TypeError, match=msg):
|
| 1262 |
+
pd.offsets.Second(5) - ser
|
| 1263 |
+
|
| 1264 |
+
@pytest.mark.parametrize(
|
| 1265 |
+
"cls_name", ["Day", "Hour", "Minute", "Second", "Milli", "Micro", "Nano"]
|
| 1266 |
+
)
|
| 1267 |
+
def test_dt64arr_add_sub_tick_DateOffset_smoke(self, cls_name, box_with_array):
|
| 1268 |
+
# GH#4532
|
| 1269 |
+
# smoke tests for valid DateOffsets
|
| 1270 |
+
ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
|
| 1271 |
+
ser = tm.box_expected(ser, box_with_array)
|
| 1272 |
+
|
| 1273 |
+
offset_cls = getattr(pd.offsets, cls_name)
|
| 1274 |
+
ser + offset_cls(5)
|
| 1275 |
+
offset_cls(5) + ser
|
| 1276 |
+
ser - offset_cls(5)
|
| 1277 |
+
|
| 1278 |
+
def test_dti_add_tick_tzaware(self, tz_aware_fixture, box_with_array):
|
| 1279 |
+
# GH#21610, GH#22163 ensure DataFrame doesn't return object-dtype
|
| 1280 |
+
tz = tz_aware_fixture
|
| 1281 |
+
if tz == "US/Pacific":
|
| 1282 |
+
dates = date_range("2012-11-01", periods=3, tz=tz)
|
| 1283 |
+
offset = dates + pd.offsets.Hour(5)
|
| 1284 |
+
assert dates[0] + pd.offsets.Hour(5) == offset[0]
|
| 1285 |
+
|
| 1286 |
+
dates = date_range("2010-11-01 00:00", periods=3, tz=tz, freq="h")
|
| 1287 |
+
expected = DatetimeIndex(
|
| 1288 |
+
["2010-11-01 05:00", "2010-11-01 06:00", "2010-11-01 07:00"],
|
| 1289 |
+
freq="h",
|
| 1290 |
+
tz=tz,
|
| 1291 |
+
).as_unit("ns")
|
| 1292 |
+
|
| 1293 |
+
dates = tm.box_expected(dates, box_with_array)
|
| 1294 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1295 |
+
|
| 1296 |
+
for scalar in [pd.offsets.Hour(5), np.timedelta64(5, "h"), timedelta(hours=5)]:
|
| 1297 |
+
offset = dates + scalar
|
| 1298 |
+
tm.assert_equal(offset, expected)
|
| 1299 |
+
offset = scalar + dates
|
| 1300 |
+
tm.assert_equal(offset, expected)
|
| 1301 |
+
|
| 1302 |
+
roundtrip = offset - scalar
|
| 1303 |
+
tm.assert_equal(roundtrip, dates)
|
| 1304 |
+
|
| 1305 |
+
msg = "|".join(
|
| 1306 |
+
["bad operand type for unary -", "cannot subtract DatetimeArray"]
|
| 1307 |
+
)
|
| 1308 |
+
with pytest.raises(TypeError, match=msg):
|
| 1309 |
+
scalar - dates
|
| 1310 |
+
|
| 1311 |
+
# -------------------------------------------------------------
|
| 1312 |
+
# RelativeDelta DateOffsets
|
| 1313 |
+
|
| 1314 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
| 1315 |
+
def test_dt64arr_add_sub_relativedelta_offsets(self, box_with_array, unit):
|
| 1316 |
+
# GH#10699
|
| 1317 |
+
vec = DatetimeIndex(
|
| 1318 |
+
[
|
| 1319 |
+
Timestamp("2000-01-05 00:15:00"),
|
| 1320 |
+
Timestamp("2000-01-31 00:23:00"),
|
| 1321 |
+
Timestamp("2000-01-01"),
|
| 1322 |
+
Timestamp("2000-03-31"),
|
| 1323 |
+
Timestamp("2000-02-29"),
|
| 1324 |
+
Timestamp("2000-12-31"),
|
| 1325 |
+
Timestamp("2000-05-15"),
|
| 1326 |
+
Timestamp("2001-06-15"),
|
| 1327 |
+
]
|
| 1328 |
+
).as_unit(unit)
|
| 1329 |
+
vec = tm.box_expected(vec, box_with_array)
|
| 1330 |
+
vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
|
| 1331 |
+
|
| 1332 |
+
# DateOffset relativedelta fastpath
|
| 1333 |
+
relative_kwargs = [
|
| 1334 |
+
("years", 2),
|
| 1335 |
+
("months", 5),
|
| 1336 |
+
("days", 3),
|
| 1337 |
+
("hours", 5),
|
| 1338 |
+
("minutes", 10),
|
| 1339 |
+
("seconds", 2),
|
| 1340 |
+
("microseconds", 5),
|
| 1341 |
+
]
|
| 1342 |
+
for i, (offset_unit, value) in enumerate(relative_kwargs):
|
| 1343 |
+
off = DateOffset(**{offset_unit: value})
|
| 1344 |
+
|
| 1345 |
+
exp_unit = unit
|
| 1346 |
+
if offset_unit == "microseconds" and unit != "ns":
|
| 1347 |
+
exp_unit = "us"
|
| 1348 |
+
|
| 1349 |
+
# TODO(GH#55564): as_unit will be unnecessary
|
| 1350 |
+
expected = DatetimeIndex([x + off for x in vec_items]).as_unit(exp_unit)
|
| 1351 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1352 |
+
tm.assert_equal(expected, vec + off)
|
| 1353 |
+
|
| 1354 |
+
expected = DatetimeIndex([x - off for x in vec_items]).as_unit(exp_unit)
|
| 1355 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1356 |
+
tm.assert_equal(expected, vec - off)
|
| 1357 |
+
|
| 1358 |
+
off = DateOffset(**dict(relative_kwargs[: i + 1]))
|
| 1359 |
+
|
| 1360 |
+
expected = DatetimeIndex([x + off for x in vec_items]).as_unit(exp_unit)
|
| 1361 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1362 |
+
tm.assert_equal(expected, vec + off)
|
| 1363 |
+
|
| 1364 |
+
expected = DatetimeIndex([x - off for x in vec_items]).as_unit(exp_unit)
|
| 1365 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1366 |
+
tm.assert_equal(expected, vec - off)
|
| 1367 |
+
msg = "(bad|unsupported) operand type for unary"
|
| 1368 |
+
with pytest.raises(TypeError, match=msg):
|
| 1369 |
+
off - vec
|
| 1370 |
+
|
| 1371 |
+
# -------------------------------------------------------------
|
| 1372 |
+
# Non-Tick, Non-RelativeDelta DateOffsets
|
| 1373 |
+
|
| 1374 |
+
# TODO: redundant with test_dt64arr_add_sub_DateOffset? that includes
|
| 1375 |
+
# tz-aware cases which this does not
|
| 1376 |
+
@pytest.mark.filterwarnings("ignore::pandas.errors.PerformanceWarning")
|
| 1377 |
+
@pytest.mark.parametrize(
|
| 1378 |
+
"cls_and_kwargs",
|
| 1379 |
+
[
|
| 1380 |
+
"YearBegin",
|
| 1381 |
+
("YearBegin", {"month": 5}),
|
| 1382 |
+
"YearEnd",
|
| 1383 |
+
("YearEnd", {"month": 5}),
|
| 1384 |
+
"MonthBegin",
|
| 1385 |
+
"MonthEnd",
|
| 1386 |
+
"SemiMonthEnd",
|
| 1387 |
+
"SemiMonthBegin",
|
| 1388 |
+
"Week",
|
| 1389 |
+
("Week", {"weekday": 3}),
|
| 1390 |
+
"Week",
|
| 1391 |
+
("Week", {"weekday": 6}),
|
| 1392 |
+
"BusinessDay",
|
| 1393 |
+
"BDay",
|
| 1394 |
+
"QuarterEnd",
|
| 1395 |
+
"QuarterBegin",
|
| 1396 |
+
"CustomBusinessDay",
|
| 1397 |
+
"CDay",
|
| 1398 |
+
"CBMonthEnd",
|
| 1399 |
+
"CBMonthBegin",
|
| 1400 |
+
"BMonthBegin",
|
| 1401 |
+
"BMonthEnd",
|
| 1402 |
+
"BusinessHour",
|
| 1403 |
+
"BYearBegin",
|
| 1404 |
+
"BYearEnd",
|
| 1405 |
+
"BQuarterBegin",
|
| 1406 |
+
("LastWeekOfMonth", {"weekday": 2}),
|
| 1407 |
+
(
|
| 1408 |
+
"FY5253Quarter",
|
| 1409 |
+
{
|
| 1410 |
+
"qtr_with_extra_week": 1,
|
| 1411 |
+
"startingMonth": 1,
|
| 1412 |
+
"weekday": 2,
|
| 1413 |
+
"variation": "nearest",
|
| 1414 |
+
},
|
| 1415 |
+
),
|
| 1416 |
+
("FY5253", {"weekday": 0, "startingMonth": 2, "variation": "nearest"}),
|
| 1417 |
+
("WeekOfMonth", {"weekday": 2, "week": 2}),
|
| 1418 |
+
"Easter",
|
| 1419 |
+
("DateOffset", {"day": 4}),
|
| 1420 |
+
("DateOffset", {"month": 5}),
|
| 1421 |
+
],
|
| 1422 |
+
)
|
| 1423 |
+
@pytest.mark.parametrize("normalize", [True, False])
|
| 1424 |
+
@pytest.mark.parametrize("n", [0, 5])
|
| 1425 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
| 1426 |
+
@pytest.mark.parametrize("tz", [None, "US/Central"])
|
| 1427 |
+
def test_dt64arr_add_sub_DateOffsets(
|
| 1428 |
+
self, box_with_array, n, normalize, cls_and_kwargs, unit, tz
|
| 1429 |
+
):
|
| 1430 |
+
# GH#10699
|
| 1431 |
+
# assert vectorized operation matches pointwise operations
|
| 1432 |
+
|
| 1433 |
+
if isinstance(cls_and_kwargs, tuple):
|
| 1434 |
+
# If cls_name param is a tuple, then 2nd entry is kwargs for
|
| 1435 |
+
# the offset constructor
|
| 1436 |
+
cls_name, kwargs = cls_and_kwargs
|
| 1437 |
+
else:
|
| 1438 |
+
cls_name = cls_and_kwargs
|
| 1439 |
+
kwargs = {}
|
| 1440 |
+
|
| 1441 |
+
if n == 0 and cls_name in [
|
| 1442 |
+
"WeekOfMonth",
|
| 1443 |
+
"LastWeekOfMonth",
|
| 1444 |
+
"FY5253Quarter",
|
| 1445 |
+
"FY5253",
|
| 1446 |
+
]:
|
| 1447 |
+
# passing n = 0 is invalid for these offset classes
|
| 1448 |
+
return
|
| 1449 |
+
|
| 1450 |
+
vec = (
|
| 1451 |
+
DatetimeIndex(
|
| 1452 |
+
[
|
| 1453 |
+
Timestamp("2000-01-05 00:15:00"),
|
| 1454 |
+
Timestamp("2000-01-31 00:23:00"),
|
| 1455 |
+
Timestamp("2000-01-01"),
|
| 1456 |
+
Timestamp("2000-03-31"),
|
| 1457 |
+
Timestamp("2000-02-29"),
|
| 1458 |
+
Timestamp("2000-12-31"),
|
| 1459 |
+
Timestamp("2000-05-15"),
|
| 1460 |
+
Timestamp("2001-06-15"),
|
| 1461 |
+
]
|
| 1462 |
+
)
|
| 1463 |
+
.as_unit(unit)
|
| 1464 |
+
.tz_localize(tz)
|
| 1465 |
+
)
|
| 1466 |
+
vec = tm.box_expected(vec, box_with_array)
|
| 1467 |
+
vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
|
| 1468 |
+
|
| 1469 |
+
offset_cls = getattr(pd.offsets, cls_name)
|
| 1470 |
+
offset = offset_cls(n, normalize=normalize, **kwargs)
|
| 1471 |
+
|
| 1472 |
+
# TODO(GH#55564): as_unit will be unnecessary
|
| 1473 |
+
expected = DatetimeIndex([x + offset for x in vec_items]).as_unit(unit)
|
| 1474 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1475 |
+
tm.assert_equal(expected, vec + offset)
|
| 1476 |
+
tm.assert_equal(expected, offset + vec)
|
| 1477 |
+
|
| 1478 |
+
expected = DatetimeIndex([x - offset for x in vec_items]).as_unit(unit)
|
| 1479 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1480 |
+
tm.assert_equal(expected, vec - offset)
|
| 1481 |
+
|
| 1482 |
+
expected = DatetimeIndex([offset + x for x in vec_items]).as_unit(unit)
|
| 1483 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1484 |
+
tm.assert_equal(expected, offset + vec)
|
| 1485 |
+
msg = "(bad|unsupported) operand type for unary"
|
| 1486 |
+
with pytest.raises(TypeError, match=msg):
|
| 1487 |
+
offset - vec
|
| 1488 |
+
|
| 1489 |
+
@pytest.mark.parametrize(
|
| 1490 |
+
"other",
|
| 1491 |
+
[
|
| 1492 |
+
np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)]),
|
| 1493 |
+
np.array([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()]),
|
| 1494 |
+
np.array( # matching offsets
|
| 1495 |
+
[pd.offsets.DateOffset(years=1), pd.offsets.DateOffset(years=1)]
|
| 1496 |
+
),
|
| 1497 |
+
],
|
| 1498 |
+
)
|
| 1499 |
+
@pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
|
| 1500 |
+
def test_dt64arr_add_sub_offset_array(
|
| 1501 |
+
self, tz_naive_fixture, box_with_array, op, other
|
| 1502 |
+
):
|
| 1503 |
+
# GH#18849
|
| 1504 |
+
# GH#10699 array of offsets
|
| 1505 |
+
|
| 1506 |
+
tz = tz_naive_fixture
|
| 1507 |
+
dti = date_range("2017-01-01", periods=2, tz=tz)
|
| 1508 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 1509 |
+
|
| 1510 |
+
expected = DatetimeIndex([op(dti[n], other[n]) for n in range(len(dti))])
|
| 1511 |
+
expected = tm.box_expected(expected, box_with_array).astype(object)
|
| 1512 |
+
|
| 1513 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1514 |
+
res = op(dtarr, other)
|
| 1515 |
+
tm.assert_equal(res, expected)
|
| 1516 |
+
|
| 1517 |
+
# Same thing but boxing other
|
| 1518 |
+
other = tm.box_expected(other, box_with_array)
|
| 1519 |
+
if box_with_array is pd.array and op is roperator.radd:
|
| 1520 |
+
# We expect a NumpyExtensionArray, not ndarray[object] here
|
| 1521 |
+
expected = pd.array(expected, dtype=object)
|
| 1522 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1523 |
+
res = op(dtarr, other)
|
| 1524 |
+
tm.assert_equal(res, expected)
|
| 1525 |
+
|
| 1526 |
+
@pytest.mark.parametrize(
|
| 1527 |
+
"op, offset, exp, exp_freq",
|
| 1528 |
+
[
|
| 1529 |
+
(
|
| 1530 |
+
"__add__",
|
| 1531 |
+
DateOffset(months=3, days=10),
|
| 1532 |
+
[
|
| 1533 |
+
Timestamp("2014-04-11"),
|
| 1534 |
+
Timestamp("2015-04-11"),
|
| 1535 |
+
Timestamp("2016-04-11"),
|
| 1536 |
+
Timestamp("2017-04-11"),
|
| 1537 |
+
],
|
| 1538 |
+
None,
|
| 1539 |
+
),
|
| 1540 |
+
(
|
| 1541 |
+
"__add__",
|
| 1542 |
+
DateOffset(months=3),
|
| 1543 |
+
[
|
| 1544 |
+
Timestamp("2014-04-01"),
|
| 1545 |
+
Timestamp("2015-04-01"),
|
| 1546 |
+
Timestamp("2016-04-01"),
|
| 1547 |
+
Timestamp("2017-04-01"),
|
| 1548 |
+
],
|
| 1549 |
+
"YS-APR",
|
| 1550 |
+
),
|
| 1551 |
+
(
|
| 1552 |
+
"__sub__",
|
| 1553 |
+
DateOffset(months=3, days=10),
|
| 1554 |
+
[
|
| 1555 |
+
Timestamp("2013-09-21"),
|
| 1556 |
+
Timestamp("2014-09-21"),
|
| 1557 |
+
Timestamp("2015-09-21"),
|
| 1558 |
+
Timestamp("2016-09-21"),
|
| 1559 |
+
],
|
| 1560 |
+
None,
|
| 1561 |
+
),
|
| 1562 |
+
(
|
| 1563 |
+
"__sub__",
|
| 1564 |
+
DateOffset(months=3),
|
| 1565 |
+
[
|
| 1566 |
+
Timestamp("2013-10-01"),
|
| 1567 |
+
Timestamp("2014-10-01"),
|
| 1568 |
+
Timestamp("2015-10-01"),
|
| 1569 |
+
Timestamp("2016-10-01"),
|
| 1570 |
+
],
|
| 1571 |
+
"YS-OCT",
|
| 1572 |
+
),
|
| 1573 |
+
],
|
| 1574 |
+
)
|
| 1575 |
+
def test_dti_add_sub_nonzero_mth_offset(
|
| 1576 |
+
self, op, offset, exp, exp_freq, tz_aware_fixture, box_with_array
|
| 1577 |
+
):
|
| 1578 |
+
# GH 26258
|
| 1579 |
+
tz = tz_aware_fixture
|
| 1580 |
+
date = date_range(start="01 Jan 2014", end="01 Jan 2017", freq="YS", tz=tz)
|
| 1581 |
+
date = tm.box_expected(date, box_with_array, False)
|
| 1582 |
+
mth = getattr(date, op)
|
| 1583 |
+
result = mth(offset)
|
| 1584 |
+
|
| 1585 |
+
expected = DatetimeIndex(exp, tz=tz).as_unit("ns")
|
| 1586 |
+
expected = tm.box_expected(expected, box_with_array, False)
|
| 1587 |
+
tm.assert_equal(result, expected)
|
| 1588 |
+
|
| 1589 |
+
def test_dt64arr_series_add_DateOffset_with_milli(self):
|
| 1590 |
+
# GH 57529
|
| 1591 |
+
dti = DatetimeIndex(
|
| 1592 |
+
[
|
| 1593 |
+
"2000-01-01 00:00:00.012345678",
|
| 1594 |
+
"2000-01-31 00:00:00.012345678",
|
| 1595 |
+
"2000-02-29 00:00:00.012345678",
|
| 1596 |
+
],
|
| 1597 |
+
dtype="datetime64[ns]",
|
| 1598 |
+
)
|
| 1599 |
+
result = dti + DateOffset(milliseconds=4)
|
| 1600 |
+
expected = DatetimeIndex(
|
| 1601 |
+
[
|
| 1602 |
+
"2000-01-01 00:00:00.016345678",
|
| 1603 |
+
"2000-01-31 00:00:00.016345678",
|
| 1604 |
+
"2000-02-29 00:00:00.016345678",
|
| 1605 |
+
],
|
| 1606 |
+
dtype="datetime64[ns]",
|
| 1607 |
+
)
|
| 1608 |
+
tm.assert_index_equal(result, expected)
|
| 1609 |
+
|
| 1610 |
+
result = dti + DateOffset(days=1, milliseconds=4)
|
| 1611 |
+
expected = DatetimeIndex(
|
| 1612 |
+
[
|
| 1613 |
+
"2000-01-02 00:00:00.016345678",
|
| 1614 |
+
"2000-02-01 00:00:00.016345678",
|
| 1615 |
+
"2000-03-01 00:00:00.016345678",
|
| 1616 |
+
],
|
| 1617 |
+
dtype="datetime64[ns]",
|
| 1618 |
+
)
|
| 1619 |
+
tm.assert_index_equal(result, expected)
|
| 1620 |
+
|
| 1621 |
+
|
| 1622 |
+
class TestDatetime64OverflowHandling:
|
| 1623 |
+
# TODO: box + de-duplicate
|
| 1624 |
+
|
| 1625 |
+
def test_dt64_overflow_masking(self, box_with_array):
|
| 1626 |
+
# GH#25317
|
| 1627 |
+
left = Series([Timestamp("1969-12-31")], dtype="M8[ns]")
|
| 1628 |
+
right = Series([NaT])
|
| 1629 |
+
|
| 1630 |
+
left = tm.box_expected(left, box_with_array)
|
| 1631 |
+
right = tm.box_expected(right, box_with_array)
|
| 1632 |
+
|
| 1633 |
+
expected = TimedeltaIndex([NaT], dtype="m8[ns]")
|
| 1634 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1635 |
+
|
| 1636 |
+
result = left - right
|
| 1637 |
+
tm.assert_equal(result, expected)
|
| 1638 |
+
|
| 1639 |
+
def test_dt64_series_arith_overflow(self):
|
| 1640 |
+
# GH#12534, fixed by GH#19024
|
| 1641 |
+
dt = Timestamp("1700-01-31")
|
| 1642 |
+
td = Timedelta("20000 Days")
|
| 1643 |
+
dti = date_range("1949-09-30", freq="100YE", periods=4)
|
| 1644 |
+
ser = Series(dti)
|
| 1645 |
+
msg = "Overflow in int64 addition"
|
| 1646 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1647 |
+
ser - dt
|
| 1648 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1649 |
+
dt - ser
|
| 1650 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1651 |
+
ser + td
|
| 1652 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1653 |
+
td + ser
|
| 1654 |
+
|
| 1655 |
+
ser.iloc[-1] = NaT
|
| 1656 |
+
expected = Series(
|
| 1657 |
+
["2004-10-03", "2104-10-04", "2204-10-04", "NaT"], dtype="datetime64[ns]"
|
| 1658 |
+
)
|
| 1659 |
+
res = ser + td
|
| 1660 |
+
tm.assert_series_equal(res, expected)
|
| 1661 |
+
res = td + ser
|
| 1662 |
+
tm.assert_series_equal(res, expected)
|
| 1663 |
+
|
| 1664 |
+
ser.iloc[1:] = NaT
|
| 1665 |
+
expected = Series(["91279 Days", "NaT", "NaT", "NaT"], dtype="timedelta64[ns]")
|
| 1666 |
+
res = ser - dt
|
| 1667 |
+
tm.assert_series_equal(res, expected)
|
| 1668 |
+
res = dt - ser
|
| 1669 |
+
tm.assert_series_equal(res, -expected)
|
| 1670 |
+
|
| 1671 |
+
def test_datetimeindex_sub_timestamp_overflow(self):
|
| 1672 |
+
dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max]).as_unit("ns")
|
| 1673 |
+
dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min]).as_unit("ns")
|
| 1674 |
+
|
| 1675 |
+
tsneg = Timestamp("1950-01-01").as_unit("ns")
|
| 1676 |
+
ts_neg_variants = [
|
| 1677 |
+
tsneg,
|
| 1678 |
+
tsneg.to_pydatetime(),
|
| 1679 |
+
tsneg.to_datetime64().astype("datetime64[ns]"),
|
| 1680 |
+
tsneg.to_datetime64().astype("datetime64[D]"),
|
| 1681 |
+
]
|
| 1682 |
+
|
| 1683 |
+
tspos = Timestamp("1980-01-01").as_unit("ns")
|
| 1684 |
+
ts_pos_variants = [
|
| 1685 |
+
tspos,
|
| 1686 |
+
tspos.to_pydatetime(),
|
| 1687 |
+
tspos.to_datetime64().astype("datetime64[ns]"),
|
| 1688 |
+
tspos.to_datetime64().astype("datetime64[D]"),
|
| 1689 |
+
]
|
| 1690 |
+
msg = "Overflow in int64 addition"
|
| 1691 |
+
for variant in ts_neg_variants:
|
| 1692 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1693 |
+
dtimax - variant
|
| 1694 |
+
|
| 1695 |
+
expected = Timestamp.max._value - tspos._value
|
| 1696 |
+
for variant in ts_pos_variants:
|
| 1697 |
+
res = dtimax - variant
|
| 1698 |
+
assert res[1]._value == expected
|
| 1699 |
+
|
| 1700 |
+
expected = Timestamp.min._value - tsneg._value
|
| 1701 |
+
for variant in ts_neg_variants:
|
| 1702 |
+
res = dtimin - variant
|
| 1703 |
+
assert res[1]._value == expected
|
| 1704 |
+
|
| 1705 |
+
for variant in ts_pos_variants:
|
| 1706 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1707 |
+
dtimin - variant
|
| 1708 |
+
|
| 1709 |
+
def test_datetimeindex_sub_datetimeindex_overflow(self):
|
| 1710 |
+
# GH#22492, GH#22508
|
| 1711 |
+
dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max]).as_unit("ns")
|
| 1712 |
+
dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min]).as_unit("ns")
|
| 1713 |
+
|
| 1714 |
+
ts_neg = pd.to_datetime(["1950-01-01", "1950-01-01"]).as_unit("ns")
|
| 1715 |
+
ts_pos = pd.to_datetime(["1980-01-01", "1980-01-01"]).as_unit("ns")
|
| 1716 |
+
|
| 1717 |
+
# General tests
|
| 1718 |
+
expected = Timestamp.max._value - ts_pos[1]._value
|
| 1719 |
+
result = dtimax - ts_pos
|
| 1720 |
+
assert result[1]._value == expected
|
| 1721 |
+
|
| 1722 |
+
expected = Timestamp.min._value - ts_neg[1]._value
|
| 1723 |
+
result = dtimin - ts_neg
|
| 1724 |
+
assert result[1]._value == expected
|
| 1725 |
+
msg = "Overflow in int64 addition"
|
| 1726 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1727 |
+
dtimax - ts_neg
|
| 1728 |
+
|
| 1729 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1730 |
+
dtimin - ts_pos
|
| 1731 |
+
|
| 1732 |
+
# Edge cases
|
| 1733 |
+
tmin = pd.to_datetime([Timestamp.min])
|
| 1734 |
+
t1 = tmin + Timedelta.max + Timedelta("1us")
|
| 1735 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1736 |
+
t1 - tmin
|
| 1737 |
+
|
| 1738 |
+
tmax = pd.to_datetime([Timestamp.max])
|
| 1739 |
+
t2 = tmax + Timedelta.min - Timedelta("1us")
|
| 1740 |
+
with pytest.raises(OverflowError, match=msg):
|
| 1741 |
+
tmax - t2
|
| 1742 |
+
|
| 1743 |
+
|
| 1744 |
+
class TestTimestampSeriesArithmetic:
|
| 1745 |
+
def test_empty_series_add_sub(self, box_with_array):
|
| 1746 |
+
# GH#13844
|
| 1747 |
+
a = Series(dtype="M8[ns]")
|
| 1748 |
+
b = Series(dtype="m8[ns]")
|
| 1749 |
+
a = box_with_array(a)
|
| 1750 |
+
b = box_with_array(b)
|
| 1751 |
+
tm.assert_equal(a, a + b)
|
| 1752 |
+
tm.assert_equal(a, a - b)
|
| 1753 |
+
tm.assert_equal(a, b + a)
|
| 1754 |
+
msg = "cannot subtract"
|
| 1755 |
+
with pytest.raises(TypeError, match=msg):
|
| 1756 |
+
b - a
|
| 1757 |
+
|
| 1758 |
+
def test_operators_datetimelike(self):
|
| 1759 |
+
# ## timedelta64 ###
|
| 1760 |
+
td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
|
| 1761 |
+
td1.iloc[2] = np.nan
|
| 1762 |
+
|
| 1763 |
+
# ## datetime64 ###
|
| 1764 |
+
dt1 = Series(
|
| 1765 |
+
[
|
| 1766 |
+
Timestamp("20111230"),
|
| 1767 |
+
Timestamp("20120101"),
|
| 1768 |
+
Timestamp("20120103"),
|
| 1769 |
+
]
|
| 1770 |
+
)
|
| 1771 |
+
dt1.iloc[2] = np.nan
|
| 1772 |
+
dt2 = Series(
|
| 1773 |
+
[
|
| 1774 |
+
Timestamp("20111231"),
|
| 1775 |
+
Timestamp("20120102"),
|
| 1776 |
+
Timestamp("20120104"),
|
| 1777 |
+
]
|
| 1778 |
+
)
|
| 1779 |
+
dt1 - dt2
|
| 1780 |
+
dt2 - dt1
|
| 1781 |
+
|
| 1782 |
+
# datetime64 with timetimedelta
|
| 1783 |
+
dt1 + td1
|
| 1784 |
+
td1 + dt1
|
| 1785 |
+
dt1 - td1
|
| 1786 |
+
|
| 1787 |
+
# timetimedelta with datetime64
|
| 1788 |
+
td1 + dt1
|
| 1789 |
+
dt1 + td1
|
| 1790 |
+
|
| 1791 |
+
def test_dt64ser_sub_datetime_dtype(self, unit):
|
| 1792 |
+
ts = Timestamp(datetime(1993, 1, 7, 13, 30, 00))
|
| 1793 |
+
dt = datetime(1993, 6, 22, 13, 30)
|
| 1794 |
+
ser = Series([ts], dtype=f"M8[{unit}]")
|
| 1795 |
+
result = ser - dt
|
| 1796 |
+
|
| 1797 |
+
# the expected unit is the max of `unit` and the unit imputed to `dt`,
|
| 1798 |
+
# which is "us"
|
| 1799 |
+
exp_unit = tm.get_finest_unit(unit, "us")
|
| 1800 |
+
assert result.dtype == f"timedelta64[{exp_unit}]"
|
| 1801 |
+
|
| 1802 |
+
# -------------------------------------------------------------
|
| 1803 |
+
# TODO: This next block of tests came from tests.series.test_operators,
|
| 1804 |
+
# needs to be de-duplicated and parametrized over `box` classes
|
| 1805 |
+
|
| 1806 |
+
@pytest.mark.parametrize(
|
| 1807 |
+
"left, right, op_fail",
|
| 1808 |
+
[
|
| 1809 |
+
[
|
| 1810 |
+
[Timestamp("20111230"), Timestamp("20120101"), NaT],
|
| 1811 |
+
[Timestamp("20111231"), Timestamp("20120102"), Timestamp("20120104")],
|
| 1812 |
+
["__sub__", "__rsub__"],
|
| 1813 |
+
],
|
| 1814 |
+
[
|
| 1815 |
+
[Timestamp("20111230"), Timestamp("20120101"), NaT],
|
| 1816 |
+
[timedelta(minutes=5, seconds=3), timedelta(minutes=5, seconds=3), NaT],
|
| 1817 |
+
["__add__", "__radd__", "__sub__"],
|
| 1818 |
+
],
|
| 1819 |
+
[
|
| 1820 |
+
[
|
| 1821 |
+
Timestamp("20111230", tz="US/Eastern"),
|
| 1822 |
+
Timestamp("20111230", tz="US/Eastern"),
|
| 1823 |
+
NaT,
|
| 1824 |
+
],
|
| 1825 |
+
[timedelta(minutes=5, seconds=3), NaT, timedelta(minutes=5, seconds=3)],
|
| 1826 |
+
["__add__", "__radd__", "__sub__"],
|
| 1827 |
+
],
|
| 1828 |
+
],
|
| 1829 |
+
)
|
| 1830 |
+
def test_operators_datetimelike_invalid(
|
| 1831 |
+
self, left, right, op_fail, all_arithmetic_operators
|
| 1832 |
+
):
|
| 1833 |
+
# these are all TypeError ops
|
| 1834 |
+
op_str = all_arithmetic_operators
|
| 1835 |
+
arg1 = Series(left)
|
| 1836 |
+
arg2 = Series(right)
|
| 1837 |
+
# check that we are getting a TypeError
|
| 1838 |
+
# with 'operate' (from core/ops.py) for the ops that are not
|
| 1839 |
+
# defined
|
| 1840 |
+
op = getattr(arg1, op_str, None)
|
| 1841 |
+
# Previously, _validate_for_numeric_binop in core/indexes/base.py
|
| 1842 |
+
# did this for us.
|
| 1843 |
+
if op_str not in op_fail:
|
| 1844 |
+
with pytest.raises(
|
| 1845 |
+
TypeError, match="operate|[cC]annot|unsupported operand"
|
| 1846 |
+
):
|
| 1847 |
+
op(arg2)
|
| 1848 |
+
else:
|
| 1849 |
+
# Smoke test
|
| 1850 |
+
op(arg2)
|
| 1851 |
+
|
| 1852 |
+
def test_sub_single_tz(self, unit):
|
| 1853 |
+
# GH#12290
|
| 1854 |
+
s1 = Series([Timestamp("2016-02-10", tz="America/Sao_Paulo")]).dt.as_unit(unit)
|
| 1855 |
+
s2 = Series([Timestamp("2016-02-08", tz="America/Sao_Paulo")]).dt.as_unit(unit)
|
| 1856 |
+
result = s1 - s2
|
| 1857 |
+
expected = Series([Timedelta("2days")]).dt.as_unit(unit)
|
| 1858 |
+
tm.assert_series_equal(result, expected)
|
| 1859 |
+
result = s2 - s1
|
| 1860 |
+
expected = Series([Timedelta("-2days")]).dt.as_unit(unit)
|
| 1861 |
+
tm.assert_series_equal(result, expected)
|
| 1862 |
+
|
| 1863 |
+
def test_dt64tz_series_sub_dtitz(self):
|
| 1864 |
+
# GH#19071 subtracting tzaware DatetimeIndex from tzaware Series
|
| 1865 |
+
# (with same tz) raises, fixed by #19024
|
| 1866 |
+
dti = date_range("1999-09-30", periods=10, tz="US/Pacific")
|
| 1867 |
+
ser = Series(dti)
|
| 1868 |
+
expected = Series(TimedeltaIndex(["0days"] * 10))
|
| 1869 |
+
|
| 1870 |
+
res = dti - ser
|
| 1871 |
+
tm.assert_series_equal(res, expected)
|
| 1872 |
+
res = ser - dti
|
| 1873 |
+
tm.assert_series_equal(res, expected)
|
| 1874 |
+
|
| 1875 |
+
def test_sub_datetime_compat(self, unit):
|
| 1876 |
+
# see GH#14088
|
| 1877 |
+
ser = Series([datetime(2016, 8, 23, 12, tzinfo=pytz.utc), NaT]).dt.as_unit(unit)
|
| 1878 |
+
dt = datetime(2016, 8, 22, 12, tzinfo=pytz.utc)
|
| 1879 |
+
# The datetime object has "us" so we upcast lower units
|
| 1880 |
+
exp_unit = tm.get_finest_unit(unit, "us")
|
| 1881 |
+
exp = Series([Timedelta("1 days"), NaT]).dt.as_unit(exp_unit)
|
| 1882 |
+
result = ser - dt
|
| 1883 |
+
tm.assert_series_equal(result, exp)
|
| 1884 |
+
result2 = ser - Timestamp(dt)
|
| 1885 |
+
tm.assert_series_equal(result2, exp)
|
| 1886 |
+
|
| 1887 |
+
def test_dt64_series_add_mixed_tick_DateOffset(self):
|
| 1888 |
+
# GH#4532
|
| 1889 |
+
# operate with pd.offsets
|
| 1890 |
+
s = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
|
| 1891 |
+
|
| 1892 |
+
result = s + pd.offsets.Milli(5)
|
| 1893 |
+
result2 = pd.offsets.Milli(5) + s
|
| 1894 |
+
expected = Series(
|
| 1895 |
+
[Timestamp("20130101 9:01:00.005"), Timestamp("20130101 9:02:00.005")]
|
| 1896 |
+
)
|
| 1897 |
+
tm.assert_series_equal(result, expected)
|
| 1898 |
+
tm.assert_series_equal(result2, expected)
|
| 1899 |
+
|
| 1900 |
+
result = s + pd.offsets.Minute(5) + pd.offsets.Milli(5)
|
| 1901 |
+
expected = Series(
|
| 1902 |
+
[Timestamp("20130101 9:06:00.005"), Timestamp("20130101 9:07:00.005")]
|
| 1903 |
+
)
|
| 1904 |
+
tm.assert_series_equal(result, expected)
|
| 1905 |
+
|
| 1906 |
+
def test_datetime64_ops_nat(self, unit):
|
| 1907 |
+
# GH#11349
|
| 1908 |
+
datetime_series = Series([NaT, Timestamp("19900315")]).dt.as_unit(unit)
|
| 1909 |
+
nat_series_dtype_timestamp = Series([NaT, NaT], dtype=f"datetime64[{unit}]")
|
| 1910 |
+
single_nat_dtype_datetime = Series([NaT], dtype=f"datetime64[{unit}]")
|
| 1911 |
+
|
| 1912 |
+
# subtraction
|
| 1913 |
+
tm.assert_series_equal(-NaT + datetime_series, nat_series_dtype_timestamp)
|
| 1914 |
+
msg = "bad operand type for unary -: 'DatetimeArray'"
|
| 1915 |
+
with pytest.raises(TypeError, match=msg):
|
| 1916 |
+
-single_nat_dtype_datetime + datetime_series
|
| 1917 |
+
|
| 1918 |
+
tm.assert_series_equal(
|
| 1919 |
+
-NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
|
| 1920 |
+
)
|
| 1921 |
+
with pytest.raises(TypeError, match=msg):
|
| 1922 |
+
-single_nat_dtype_datetime + nat_series_dtype_timestamp
|
| 1923 |
+
|
| 1924 |
+
# addition
|
| 1925 |
+
tm.assert_series_equal(
|
| 1926 |
+
nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
|
| 1927 |
+
)
|
| 1928 |
+
tm.assert_series_equal(
|
| 1929 |
+
NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
|
| 1930 |
+
)
|
| 1931 |
+
|
| 1932 |
+
tm.assert_series_equal(
|
| 1933 |
+
nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
|
| 1934 |
+
)
|
| 1935 |
+
tm.assert_series_equal(
|
| 1936 |
+
NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
|
| 1937 |
+
)
|
| 1938 |
+
|
| 1939 |
+
# -------------------------------------------------------------
|
| 1940 |
+
# Timezone-Centric Tests
|
| 1941 |
+
|
| 1942 |
+
def test_operators_datetimelike_with_timezones(self):
|
| 1943 |
+
tz = "US/Eastern"
|
| 1944 |
+
dt1 = Series(date_range("2000-01-01 09:00:00", periods=5, tz=tz), name="foo")
|
| 1945 |
+
dt2 = dt1.copy()
|
| 1946 |
+
dt2.iloc[2] = np.nan
|
| 1947 |
+
|
| 1948 |
+
td1 = Series(pd.timedelta_range("1 days 1 min", periods=5, freq="h"))
|
| 1949 |
+
td2 = td1.copy()
|
| 1950 |
+
td2.iloc[1] = np.nan
|
| 1951 |
+
assert td2._values.freq is None
|
| 1952 |
+
|
| 1953 |
+
result = dt1 + td1[0]
|
| 1954 |
+
exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
|
| 1955 |
+
tm.assert_series_equal(result, exp)
|
| 1956 |
+
|
| 1957 |
+
result = dt2 + td2[0]
|
| 1958 |
+
exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
|
| 1959 |
+
tm.assert_series_equal(result, exp)
|
| 1960 |
+
|
| 1961 |
+
# odd numpy behavior with scalar timedeltas
|
| 1962 |
+
result = td1[0] + dt1
|
| 1963 |
+
exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
|
| 1964 |
+
tm.assert_series_equal(result, exp)
|
| 1965 |
+
|
| 1966 |
+
result = td2[0] + dt2
|
| 1967 |
+
exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
|
| 1968 |
+
tm.assert_series_equal(result, exp)
|
| 1969 |
+
|
| 1970 |
+
result = dt1 - td1[0]
|
| 1971 |
+
exp = (dt1.dt.tz_localize(None) - td1[0]).dt.tz_localize(tz)
|
| 1972 |
+
tm.assert_series_equal(result, exp)
|
| 1973 |
+
msg = "(bad|unsupported) operand type for unary"
|
| 1974 |
+
with pytest.raises(TypeError, match=msg):
|
| 1975 |
+
td1[0] - dt1
|
| 1976 |
+
|
| 1977 |
+
result = dt2 - td2[0]
|
| 1978 |
+
exp = (dt2.dt.tz_localize(None) - td2[0]).dt.tz_localize(tz)
|
| 1979 |
+
tm.assert_series_equal(result, exp)
|
| 1980 |
+
with pytest.raises(TypeError, match=msg):
|
| 1981 |
+
td2[0] - dt2
|
| 1982 |
+
|
| 1983 |
+
result = dt1 + td1
|
| 1984 |
+
exp = (dt1.dt.tz_localize(None) + td1).dt.tz_localize(tz)
|
| 1985 |
+
tm.assert_series_equal(result, exp)
|
| 1986 |
+
|
| 1987 |
+
result = dt2 + td2
|
| 1988 |
+
exp = (dt2.dt.tz_localize(None) + td2).dt.tz_localize(tz)
|
| 1989 |
+
tm.assert_series_equal(result, exp)
|
| 1990 |
+
|
| 1991 |
+
result = dt1 - td1
|
| 1992 |
+
exp = (dt1.dt.tz_localize(None) - td1).dt.tz_localize(tz)
|
| 1993 |
+
tm.assert_series_equal(result, exp)
|
| 1994 |
+
|
| 1995 |
+
result = dt2 - td2
|
| 1996 |
+
exp = (dt2.dt.tz_localize(None) - td2).dt.tz_localize(tz)
|
| 1997 |
+
tm.assert_series_equal(result, exp)
|
| 1998 |
+
msg = "cannot (add|subtract)"
|
| 1999 |
+
with pytest.raises(TypeError, match=msg):
|
| 2000 |
+
td1 - dt1
|
| 2001 |
+
with pytest.raises(TypeError, match=msg):
|
| 2002 |
+
td2 - dt2
|
| 2003 |
+
|
| 2004 |
+
|
| 2005 |
+
class TestDatetimeIndexArithmetic:
|
| 2006 |
+
# -------------------------------------------------------------
|
| 2007 |
+
# Binary operations DatetimeIndex and TimedeltaIndex/array
|
| 2008 |
+
|
| 2009 |
+
def test_dti_add_tdi(self, tz_naive_fixture):
|
| 2010 |
+
# GH#17558
|
| 2011 |
+
tz = tz_naive_fixture
|
| 2012 |
+
dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
|
| 2013 |
+
tdi = pd.timedelta_range("0 days", periods=10)
|
| 2014 |
+
expected = date_range("2017-01-01", periods=10, tz=tz)
|
| 2015 |
+
expected = expected._with_freq(None)
|
| 2016 |
+
|
| 2017 |
+
# add with TimedeltaIndex
|
| 2018 |
+
result = dti + tdi
|
| 2019 |
+
tm.assert_index_equal(result, expected)
|
| 2020 |
+
|
| 2021 |
+
result = tdi + dti
|
| 2022 |
+
tm.assert_index_equal(result, expected)
|
| 2023 |
+
|
| 2024 |
+
# add with timedelta64 array
|
| 2025 |
+
result = dti + tdi.values
|
| 2026 |
+
tm.assert_index_equal(result, expected)
|
| 2027 |
+
|
| 2028 |
+
result = tdi.values + dti
|
| 2029 |
+
tm.assert_index_equal(result, expected)
|
| 2030 |
+
|
| 2031 |
+
def test_dti_iadd_tdi(self, tz_naive_fixture):
|
| 2032 |
+
# GH#17558
|
| 2033 |
+
tz = tz_naive_fixture
|
| 2034 |
+
dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
|
| 2035 |
+
tdi = pd.timedelta_range("0 days", periods=10)
|
| 2036 |
+
expected = date_range("2017-01-01", periods=10, tz=tz)
|
| 2037 |
+
expected = expected._with_freq(None)
|
| 2038 |
+
|
| 2039 |
+
# iadd with TimedeltaIndex
|
| 2040 |
+
result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
|
| 2041 |
+
result += tdi
|
| 2042 |
+
tm.assert_index_equal(result, expected)
|
| 2043 |
+
|
| 2044 |
+
result = pd.timedelta_range("0 days", periods=10)
|
| 2045 |
+
result += dti
|
| 2046 |
+
tm.assert_index_equal(result, expected)
|
| 2047 |
+
|
| 2048 |
+
# iadd with timedelta64 array
|
| 2049 |
+
result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
|
| 2050 |
+
result += tdi.values
|
| 2051 |
+
tm.assert_index_equal(result, expected)
|
| 2052 |
+
|
| 2053 |
+
result = pd.timedelta_range("0 days", periods=10)
|
| 2054 |
+
result += dti
|
| 2055 |
+
tm.assert_index_equal(result, expected)
|
| 2056 |
+
|
| 2057 |
+
def test_dti_sub_tdi(self, tz_naive_fixture):
|
| 2058 |
+
# GH#17558
|
| 2059 |
+
tz = tz_naive_fixture
|
| 2060 |
+
dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
|
| 2061 |
+
tdi = pd.timedelta_range("0 days", periods=10)
|
| 2062 |
+
expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D")
|
| 2063 |
+
expected = expected._with_freq(None)
|
| 2064 |
+
|
| 2065 |
+
# sub with TimedeltaIndex
|
| 2066 |
+
result = dti - tdi
|
| 2067 |
+
tm.assert_index_equal(result, expected)
|
| 2068 |
+
|
| 2069 |
+
msg = "cannot subtract .*TimedeltaArray"
|
| 2070 |
+
with pytest.raises(TypeError, match=msg):
|
| 2071 |
+
tdi - dti
|
| 2072 |
+
|
| 2073 |
+
# sub with timedelta64 array
|
| 2074 |
+
result = dti - tdi.values
|
| 2075 |
+
tm.assert_index_equal(result, expected)
|
| 2076 |
+
|
| 2077 |
+
msg = "cannot subtract a datelike from a TimedeltaArray"
|
| 2078 |
+
with pytest.raises(TypeError, match=msg):
|
| 2079 |
+
tdi.values - dti
|
| 2080 |
+
|
| 2081 |
+
def test_dti_isub_tdi(self, tz_naive_fixture, unit):
|
| 2082 |
+
# GH#17558
|
| 2083 |
+
tz = tz_naive_fixture
|
| 2084 |
+
dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10).as_unit(unit)
|
| 2085 |
+
tdi = pd.timedelta_range("0 days", periods=10, unit=unit)
|
| 2086 |
+
expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D", unit=unit)
|
| 2087 |
+
expected = expected._with_freq(None)
|
| 2088 |
+
|
| 2089 |
+
# isub with TimedeltaIndex
|
| 2090 |
+
result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10).as_unit(unit)
|
| 2091 |
+
result -= tdi
|
| 2092 |
+
tm.assert_index_equal(result, expected)
|
| 2093 |
+
|
| 2094 |
+
# DTA.__isub__ GH#43904
|
| 2095 |
+
dta = dti._data.copy()
|
| 2096 |
+
dta -= tdi
|
| 2097 |
+
tm.assert_datetime_array_equal(dta, expected._data)
|
| 2098 |
+
|
| 2099 |
+
out = dti._data.copy()
|
| 2100 |
+
np.subtract(out, tdi, out=out)
|
| 2101 |
+
tm.assert_datetime_array_equal(out, expected._data)
|
| 2102 |
+
|
| 2103 |
+
msg = "cannot subtract a datelike from a TimedeltaArray"
|
| 2104 |
+
with pytest.raises(TypeError, match=msg):
|
| 2105 |
+
tdi -= dti
|
| 2106 |
+
|
| 2107 |
+
# isub with timedelta64 array
|
| 2108 |
+
result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10).as_unit(unit)
|
| 2109 |
+
result -= tdi.values
|
| 2110 |
+
tm.assert_index_equal(result, expected)
|
| 2111 |
+
|
| 2112 |
+
with pytest.raises(TypeError, match=msg):
|
| 2113 |
+
tdi.values -= dti
|
| 2114 |
+
|
| 2115 |
+
with pytest.raises(TypeError, match=msg):
|
| 2116 |
+
tdi._values -= dti
|
| 2117 |
+
|
| 2118 |
+
# -------------------------------------------------------------
|
| 2119 |
+
# Binary Operations DatetimeIndex and datetime-like
|
| 2120 |
+
# TODO: A couple other tests belong in this section. Move them in
|
| 2121 |
+
# A PR where there isn't already a giant diff.
|
| 2122 |
+
|
| 2123 |
+
# -------------------------------------------------------------
|
| 2124 |
+
|
| 2125 |
+
def test_dta_add_sub_index(self, tz_naive_fixture):
|
| 2126 |
+
# Check that DatetimeArray defers to Index classes
|
| 2127 |
+
dti = date_range("20130101", periods=3, tz=tz_naive_fixture)
|
| 2128 |
+
dta = dti.array
|
| 2129 |
+
result = dta - dti
|
| 2130 |
+
expected = dti - dti
|
| 2131 |
+
tm.assert_index_equal(result, expected)
|
| 2132 |
+
|
| 2133 |
+
tdi = result
|
| 2134 |
+
result = dta + tdi
|
| 2135 |
+
expected = dti + tdi
|
| 2136 |
+
tm.assert_index_equal(result, expected)
|
| 2137 |
+
|
| 2138 |
+
result = dta - tdi
|
| 2139 |
+
expected = dti - tdi
|
| 2140 |
+
tm.assert_index_equal(result, expected)
|
| 2141 |
+
|
| 2142 |
+
def test_sub_dti_dti(self, unit):
|
| 2143 |
+
# previously performed setop (deprecated in 0.16.0), now changed to
|
| 2144 |
+
# return subtraction -> TimeDeltaIndex (GH ...)
|
| 2145 |
+
|
| 2146 |
+
dti = date_range("20130101", periods=3, unit=unit)
|
| 2147 |
+
dti_tz = date_range("20130101", periods=3, unit=unit).tz_localize("US/Eastern")
|
| 2148 |
+
expected = TimedeltaIndex([0, 0, 0]).as_unit(unit)
|
| 2149 |
+
|
| 2150 |
+
result = dti - dti
|
| 2151 |
+
tm.assert_index_equal(result, expected)
|
| 2152 |
+
|
| 2153 |
+
result = dti_tz - dti_tz
|
| 2154 |
+
tm.assert_index_equal(result, expected)
|
| 2155 |
+
msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
|
| 2156 |
+
with pytest.raises(TypeError, match=msg):
|
| 2157 |
+
dti_tz - dti
|
| 2158 |
+
|
| 2159 |
+
with pytest.raises(TypeError, match=msg):
|
| 2160 |
+
dti - dti_tz
|
| 2161 |
+
|
| 2162 |
+
# isub
|
| 2163 |
+
dti -= dti
|
| 2164 |
+
tm.assert_index_equal(dti, expected)
|
| 2165 |
+
|
| 2166 |
+
# different length raises ValueError
|
| 2167 |
+
dti1 = date_range("20130101", periods=3, unit=unit)
|
| 2168 |
+
dti2 = date_range("20130101", periods=4, unit=unit)
|
| 2169 |
+
msg = "cannot add indices of unequal length"
|
| 2170 |
+
with pytest.raises(ValueError, match=msg):
|
| 2171 |
+
dti1 - dti2
|
| 2172 |
+
|
| 2173 |
+
# NaN propagation
|
| 2174 |
+
dti1 = DatetimeIndex(["2012-01-01", np.nan, "2012-01-03"]).as_unit(unit)
|
| 2175 |
+
dti2 = DatetimeIndex(["2012-01-02", "2012-01-03", np.nan]).as_unit(unit)
|
| 2176 |
+
expected = TimedeltaIndex(["1 days", np.nan, np.nan]).as_unit(unit)
|
| 2177 |
+
result = dti2 - dti1
|
| 2178 |
+
tm.assert_index_equal(result, expected)
|
| 2179 |
+
|
| 2180 |
+
# -------------------------------------------------------------------
|
| 2181 |
+
# TODO: Most of this block is moved from series or frame tests, needs
|
| 2182 |
+
# cleanup, box-parametrization, and de-duplication
|
| 2183 |
+
|
| 2184 |
+
@pytest.mark.parametrize("op", [operator.add, operator.sub])
|
| 2185 |
+
def test_timedelta64_equal_timedelta_supported_ops(self, op, box_with_array):
|
| 2186 |
+
ser = Series(
|
| 2187 |
+
[
|
| 2188 |
+
Timestamp("20130301"),
|
| 2189 |
+
Timestamp("20130228 23:00:00"),
|
| 2190 |
+
Timestamp("20130228 22:00:00"),
|
| 2191 |
+
Timestamp("20130228 21:00:00"),
|
| 2192 |
+
]
|
| 2193 |
+
)
|
| 2194 |
+
obj = box_with_array(ser)
|
| 2195 |
+
|
| 2196 |
+
intervals = ["D", "h", "m", "s", "us"]
|
| 2197 |
+
|
| 2198 |
+
def timedelta64(*args):
|
| 2199 |
+
# see casting notes in NumPy gh-12927
|
| 2200 |
+
return np.sum(list(starmap(np.timedelta64, zip(args, intervals))))
|
| 2201 |
+
|
| 2202 |
+
for d, h, m, s, us in product(*([range(2)] * 5)):
|
| 2203 |
+
nptd = timedelta64(d, h, m, s, us)
|
| 2204 |
+
pytd = timedelta(days=d, hours=h, minutes=m, seconds=s, microseconds=us)
|
| 2205 |
+
lhs = op(obj, nptd)
|
| 2206 |
+
rhs = op(obj, pytd)
|
| 2207 |
+
|
| 2208 |
+
tm.assert_equal(lhs, rhs)
|
| 2209 |
+
|
| 2210 |
+
def test_ops_nat_mixed_datetime64_timedelta64(self):
|
| 2211 |
+
# GH#11349
|
| 2212 |
+
timedelta_series = Series([NaT, Timedelta("1s")])
|
| 2213 |
+
datetime_series = Series([NaT, Timestamp("19900315")])
|
| 2214 |
+
nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[ns]")
|
| 2215 |
+
nat_series_dtype_timestamp = Series([NaT, NaT], dtype="datetime64[ns]")
|
| 2216 |
+
single_nat_dtype_datetime = Series([NaT], dtype="datetime64[ns]")
|
| 2217 |
+
single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[ns]")
|
| 2218 |
+
|
| 2219 |
+
# subtraction
|
| 2220 |
+
tm.assert_series_equal(
|
| 2221 |
+
datetime_series - single_nat_dtype_datetime, nat_series_dtype_timedelta
|
| 2222 |
+
)
|
| 2223 |
+
|
| 2224 |
+
tm.assert_series_equal(
|
| 2225 |
+
datetime_series - single_nat_dtype_timedelta, nat_series_dtype_timestamp
|
| 2226 |
+
)
|
| 2227 |
+
tm.assert_series_equal(
|
| 2228 |
+
-single_nat_dtype_timedelta + datetime_series, nat_series_dtype_timestamp
|
| 2229 |
+
)
|
| 2230 |
+
|
| 2231 |
+
# without a Series wrapping the NaT, it is ambiguous
|
| 2232 |
+
# whether it is a datetime64 or timedelta64
|
| 2233 |
+
# defaults to interpreting it as timedelta64
|
| 2234 |
+
tm.assert_series_equal(
|
| 2235 |
+
nat_series_dtype_timestamp - single_nat_dtype_datetime,
|
| 2236 |
+
nat_series_dtype_timedelta,
|
| 2237 |
+
)
|
| 2238 |
+
|
| 2239 |
+
tm.assert_series_equal(
|
| 2240 |
+
nat_series_dtype_timestamp - single_nat_dtype_timedelta,
|
| 2241 |
+
nat_series_dtype_timestamp,
|
| 2242 |
+
)
|
| 2243 |
+
tm.assert_series_equal(
|
| 2244 |
+
-single_nat_dtype_timedelta + nat_series_dtype_timestamp,
|
| 2245 |
+
nat_series_dtype_timestamp,
|
| 2246 |
+
)
|
| 2247 |
+
msg = "cannot subtract a datelike"
|
| 2248 |
+
with pytest.raises(TypeError, match=msg):
|
| 2249 |
+
timedelta_series - single_nat_dtype_datetime
|
| 2250 |
+
|
| 2251 |
+
# addition
|
| 2252 |
+
tm.assert_series_equal(
|
| 2253 |
+
nat_series_dtype_timestamp + single_nat_dtype_timedelta,
|
| 2254 |
+
nat_series_dtype_timestamp,
|
| 2255 |
+
)
|
| 2256 |
+
tm.assert_series_equal(
|
| 2257 |
+
single_nat_dtype_timedelta + nat_series_dtype_timestamp,
|
| 2258 |
+
nat_series_dtype_timestamp,
|
| 2259 |
+
)
|
| 2260 |
+
|
| 2261 |
+
tm.assert_series_equal(
|
| 2262 |
+
nat_series_dtype_timestamp + single_nat_dtype_timedelta,
|
| 2263 |
+
nat_series_dtype_timestamp,
|
| 2264 |
+
)
|
| 2265 |
+
tm.assert_series_equal(
|
| 2266 |
+
single_nat_dtype_timedelta + nat_series_dtype_timestamp,
|
| 2267 |
+
nat_series_dtype_timestamp,
|
| 2268 |
+
)
|
| 2269 |
+
|
| 2270 |
+
tm.assert_series_equal(
|
| 2271 |
+
nat_series_dtype_timedelta + single_nat_dtype_datetime,
|
| 2272 |
+
nat_series_dtype_timestamp,
|
| 2273 |
+
)
|
| 2274 |
+
tm.assert_series_equal(
|
| 2275 |
+
single_nat_dtype_datetime + nat_series_dtype_timedelta,
|
| 2276 |
+
nat_series_dtype_timestamp,
|
| 2277 |
+
)
|
| 2278 |
+
|
| 2279 |
+
def test_ufunc_coercions(self, unit):
|
| 2280 |
+
idx = date_range("2011-01-01", periods=3, freq="2D", name="x", unit=unit)
|
| 2281 |
+
|
| 2282 |
+
delta = np.timedelta64(1, "D")
|
| 2283 |
+
exp = date_range("2011-01-02", periods=3, freq="2D", name="x", unit=unit)
|
| 2284 |
+
for result in [idx + delta, np.add(idx, delta)]:
|
| 2285 |
+
assert isinstance(result, DatetimeIndex)
|
| 2286 |
+
tm.assert_index_equal(result, exp)
|
| 2287 |
+
assert result.freq == "2D"
|
| 2288 |
+
|
| 2289 |
+
exp = date_range("2010-12-31", periods=3, freq="2D", name="x", unit=unit)
|
| 2290 |
+
|
| 2291 |
+
for result in [idx - delta, np.subtract(idx, delta)]:
|
| 2292 |
+
assert isinstance(result, DatetimeIndex)
|
| 2293 |
+
tm.assert_index_equal(result, exp)
|
| 2294 |
+
assert result.freq == "2D"
|
| 2295 |
+
|
| 2296 |
+
# When adding/subtracting an ndarray (which has no .freq), the result
|
| 2297 |
+
# does not infer freq
|
| 2298 |
+
idx = idx._with_freq(None)
|
| 2299 |
+
delta = np.array(
|
| 2300 |
+
[np.timedelta64(1, "D"), np.timedelta64(2, "D"), np.timedelta64(3, "D")]
|
| 2301 |
+
)
|
| 2302 |
+
exp = DatetimeIndex(
|
| 2303 |
+
["2011-01-02", "2011-01-05", "2011-01-08"], name="x"
|
| 2304 |
+
).as_unit(unit)
|
| 2305 |
+
|
| 2306 |
+
for result in [idx + delta, np.add(idx, delta)]:
|
| 2307 |
+
tm.assert_index_equal(result, exp)
|
| 2308 |
+
assert result.freq == exp.freq
|
| 2309 |
+
|
| 2310 |
+
exp = DatetimeIndex(
|
| 2311 |
+
["2010-12-31", "2011-01-01", "2011-01-02"], name="x"
|
| 2312 |
+
).as_unit(unit)
|
| 2313 |
+
for result in [idx - delta, np.subtract(idx, delta)]:
|
| 2314 |
+
assert isinstance(result, DatetimeIndex)
|
| 2315 |
+
tm.assert_index_equal(result, exp)
|
| 2316 |
+
assert result.freq == exp.freq
|
| 2317 |
+
|
| 2318 |
+
def test_dti_add_series(self, tz_naive_fixture, names):
|
| 2319 |
+
# GH#13905
|
| 2320 |
+
tz = tz_naive_fixture
|
| 2321 |
+
index = DatetimeIndex(
|
| 2322 |
+
["2016-06-28 05:30", "2016-06-28 05:31"], tz=tz, name=names[0]
|
| 2323 |
+
).as_unit("ns")
|
| 2324 |
+
ser = Series([Timedelta(seconds=5)] * 2, index=index, name=names[1])
|
| 2325 |
+
expected = Series(index + Timedelta(seconds=5), index=index, name=names[2])
|
| 2326 |
+
|
| 2327 |
+
# passing name arg isn't enough when names[2] is None
|
| 2328 |
+
expected.name = names[2]
|
| 2329 |
+
assert expected.dtype == index.dtype
|
| 2330 |
+
result = ser + index
|
| 2331 |
+
tm.assert_series_equal(result, expected)
|
| 2332 |
+
result2 = index + ser
|
| 2333 |
+
tm.assert_series_equal(result2, expected)
|
| 2334 |
+
|
| 2335 |
+
expected = index + Timedelta(seconds=5)
|
| 2336 |
+
result3 = ser.values + index
|
| 2337 |
+
tm.assert_index_equal(result3, expected)
|
| 2338 |
+
result4 = index + ser.values
|
| 2339 |
+
tm.assert_index_equal(result4, expected)
|
| 2340 |
+
|
| 2341 |
+
@pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
|
| 2342 |
+
def test_dti_addsub_offset_arraylike(
|
| 2343 |
+
self, tz_naive_fixture, names, op, index_or_series
|
| 2344 |
+
):
|
| 2345 |
+
# GH#18849, GH#19744
|
| 2346 |
+
other_box = index_or_series
|
| 2347 |
+
|
| 2348 |
+
tz = tz_naive_fixture
|
| 2349 |
+
dti = date_range("2017-01-01", periods=2, tz=tz, name=names[0])
|
| 2350 |
+
other = other_box([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
|
| 2351 |
+
|
| 2352 |
+
xbox = get_upcast_box(dti, other)
|
| 2353 |
+
|
| 2354 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 2355 |
+
res = op(dti, other)
|
| 2356 |
+
|
| 2357 |
+
expected = DatetimeIndex(
|
| 2358 |
+
[op(dti[n], other[n]) for n in range(len(dti))], name=names[2], freq="infer"
|
| 2359 |
+
)
|
| 2360 |
+
expected = tm.box_expected(expected, xbox).astype(object)
|
| 2361 |
+
tm.assert_equal(res, expected)
|
| 2362 |
+
|
| 2363 |
+
@pytest.mark.parametrize("other_box", [pd.Index, np.array])
|
| 2364 |
+
def test_dti_addsub_object_arraylike(
|
| 2365 |
+
self, tz_naive_fixture, box_with_array, other_box
|
| 2366 |
+
):
|
| 2367 |
+
tz = tz_naive_fixture
|
| 2368 |
+
|
| 2369 |
+
dti = date_range("2017-01-01", periods=2, tz=tz)
|
| 2370 |
+
dtarr = tm.box_expected(dti, box_with_array)
|
| 2371 |
+
other = other_box([pd.offsets.MonthEnd(), Timedelta(days=4)])
|
| 2372 |
+
xbox = get_upcast_box(dtarr, other)
|
| 2373 |
+
|
| 2374 |
+
expected = DatetimeIndex(["2017-01-31", "2017-01-06"], tz=tz_naive_fixture)
|
| 2375 |
+
expected = tm.box_expected(expected, xbox).astype(object)
|
| 2376 |
+
|
| 2377 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 2378 |
+
result = dtarr + other
|
| 2379 |
+
tm.assert_equal(result, expected)
|
| 2380 |
+
|
| 2381 |
+
expected = DatetimeIndex(["2016-12-31", "2016-12-29"], tz=tz_naive_fixture)
|
| 2382 |
+
expected = tm.box_expected(expected, xbox).astype(object)
|
| 2383 |
+
|
| 2384 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 2385 |
+
result = dtarr - other
|
| 2386 |
+
tm.assert_equal(result, expected)
|
| 2387 |
+
|
| 2388 |
+
|
| 2389 |
+
@pytest.mark.parametrize("years", [-1, 0, 1])
|
| 2390 |
+
@pytest.mark.parametrize("months", [-2, 0, 2])
|
| 2391 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
| 2392 |
+
def test_shift_months(years, months, unit):
|
| 2393 |
+
dti = DatetimeIndex(
|
| 2394 |
+
[
|
| 2395 |
+
Timestamp("2000-01-05 00:15:00"),
|
| 2396 |
+
Timestamp("2000-01-31 00:23:00"),
|
| 2397 |
+
Timestamp("2000-01-01"),
|
| 2398 |
+
Timestamp("2000-02-29"),
|
| 2399 |
+
Timestamp("2000-12-31"),
|
| 2400 |
+
]
|
| 2401 |
+
).as_unit(unit)
|
| 2402 |
+
shifted = shift_months(dti.asi8, years * 12 + months, reso=dti._data._creso)
|
| 2403 |
+
shifted_dt64 = shifted.view(f"M8[{dti.unit}]")
|
| 2404 |
+
actual = DatetimeIndex(shifted_dt64)
|
| 2405 |
+
|
| 2406 |
+
raw = [x + pd.offsets.DateOffset(years=years, months=months) for x in dti]
|
| 2407 |
+
expected = DatetimeIndex(raw).as_unit(dti.unit)
|
| 2408 |
+
tm.assert_index_equal(actual, expected)
|
| 2409 |
+
|
| 2410 |
+
|
| 2411 |
+
def test_dt64arr_addsub_object_dtype_2d():
|
| 2412 |
+
# block-wise DataFrame operations will require operating on 2D
|
| 2413 |
+
# DatetimeArray/TimedeltaArray, so check that specifically.
|
| 2414 |
+
dti = date_range("1994-02-13", freq="2W", periods=4)
|
| 2415 |
+
dta = dti._data.reshape((4, 1))
|
| 2416 |
+
|
| 2417 |
+
other = np.array([[pd.offsets.Day(n)] for n in range(4)])
|
| 2418 |
+
assert other.shape == dta.shape
|
| 2419 |
+
|
| 2420 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 2421 |
+
result = dta + other
|
| 2422 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 2423 |
+
expected = (dta[:, 0] + other[:, 0]).reshape(-1, 1)
|
| 2424 |
+
|
| 2425 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 2426 |
+
|
| 2427 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 2428 |
+
# Case where we expect to get a TimedeltaArray back
|
| 2429 |
+
result2 = dta - dta.astype(object)
|
| 2430 |
+
|
| 2431 |
+
assert result2.shape == (4, 1)
|
| 2432 |
+
assert all(td._value == 0 for td in result2.ravel())
|
| 2433 |
+
|
| 2434 |
+
|
| 2435 |
+
def test_non_nano_dt64_addsub_np_nat_scalars():
|
| 2436 |
+
# GH 52295
|
| 2437 |
+
ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
|
| 2438 |
+
result = ser - np.datetime64("nat", "ms")
|
| 2439 |
+
expected = Series([NaT] * 3, dtype="timedelta64[ms]")
|
| 2440 |
+
tm.assert_series_equal(result, expected)
|
| 2441 |
+
|
| 2442 |
+
result = ser + np.timedelta64("nat", "ms")
|
| 2443 |
+
expected = Series([NaT] * 3, dtype="datetime64[ms]")
|
| 2444 |
+
tm.assert_series_equal(result, expected)
|
| 2445 |
+
|
| 2446 |
+
|
| 2447 |
+
def test_non_nano_dt64_addsub_np_nat_scalars_unitless():
|
| 2448 |
+
# GH 52295
|
| 2449 |
+
# TODO: Can we default to the ser unit?
|
| 2450 |
+
ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
|
| 2451 |
+
result = ser - np.datetime64("nat")
|
| 2452 |
+
expected = Series([NaT] * 3, dtype="timedelta64[ns]")
|
| 2453 |
+
tm.assert_series_equal(result, expected)
|
| 2454 |
+
|
| 2455 |
+
result = ser + np.timedelta64("nat")
|
| 2456 |
+
expected = Series([NaT] * 3, dtype="datetime64[ns]")
|
| 2457 |
+
tm.assert_series_equal(result, expected)
|
| 2458 |
+
|
| 2459 |
+
|
| 2460 |
+
def test_non_nano_dt64_addsub_np_nat_scalars_unsupported_unit():
|
| 2461 |
+
# GH 52295
|
| 2462 |
+
ser = Series([12332, 23243, 33243], dtype="datetime64[s]")
|
| 2463 |
+
result = ser - np.datetime64("nat", "D")
|
| 2464 |
+
expected = Series([NaT] * 3, dtype="timedelta64[s]")
|
| 2465 |
+
tm.assert_series_equal(result, expected)
|
| 2466 |
+
|
| 2467 |
+
result = ser + np.timedelta64("nat", "D")
|
| 2468 |
+
expected = Series([NaT] * 3, dtype="datetime64[s]")
|
| 2469 |
+
tm.assert_series_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_interval.py
ADDED
|
@@ -0,0 +1,306 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import operator
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
from pandas.core.dtypes.common import is_list_like
|
| 7 |
+
|
| 8 |
+
import pandas as pd
|
| 9 |
+
from pandas import (
|
| 10 |
+
Categorical,
|
| 11 |
+
Index,
|
| 12 |
+
Interval,
|
| 13 |
+
IntervalIndex,
|
| 14 |
+
Period,
|
| 15 |
+
Series,
|
| 16 |
+
Timedelta,
|
| 17 |
+
Timestamp,
|
| 18 |
+
date_range,
|
| 19 |
+
period_range,
|
| 20 |
+
timedelta_range,
|
| 21 |
+
)
|
| 22 |
+
import pandas._testing as tm
|
| 23 |
+
from pandas.core.arrays import (
|
| 24 |
+
BooleanArray,
|
| 25 |
+
IntervalArray,
|
| 26 |
+
)
|
| 27 |
+
from pandas.tests.arithmetic.common import get_upcast_box
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@pytest.fixture(
|
| 31 |
+
params=[
|
| 32 |
+
(Index([0, 2, 4, 4]), Index([1, 3, 5, 8])),
|
| 33 |
+
(Index([0.0, 1.0, 2.0, np.nan]), Index([1.0, 2.0, 3.0, np.nan])),
|
| 34 |
+
(
|
| 35 |
+
timedelta_range("0 days", periods=3).insert(3, pd.NaT),
|
| 36 |
+
timedelta_range("1 day", periods=3).insert(3, pd.NaT),
|
| 37 |
+
),
|
| 38 |
+
(
|
| 39 |
+
date_range("20170101", periods=3).insert(3, pd.NaT),
|
| 40 |
+
date_range("20170102", periods=3).insert(3, pd.NaT),
|
| 41 |
+
),
|
| 42 |
+
(
|
| 43 |
+
date_range("20170101", periods=3, tz="US/Eastern").insert(3, pd.NaT),
|
| 44 |
+
date_range("20170102", periods=3, tz="US/Eastern").insert(3, pd.NaT),
|
| 45 |
+
),
|
| 46 |
+
],
|
| 47 |
+
ids=lambda x: str(x[0].dtype),
|
| 48 |
+
)
|
| 49 |
+
def left_right_dtypes(request):
|
| 50 |
+
"""
|
| 51 |
+
Fixture for building an IntervalArray from various dtypes
|
| 52 |
+
"""
|
| 53 |
+
return request.param
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@pytest.fixture
|
| 57 |
+
def interval_array(left_right_dtypes):
|
| 58 |
+
"""
|
| 59 |
+
Fixture to generate an IntervalArray of various dtypes containing NA if possible
|
| 60 |
+
"""
|
| 61 |
+
left, right = left_right_dtypes
|
| 62 |
+
return IntervalArray.from_arrays(left, right)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def create_categorical_intervals(left, right, closed="right"):
|
| 66 |
+
return Categorical(IntervalIndex.from_arrays(left, right, closed))
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def create_series_intervals(left, right, closed="right"):
|
| 70 |
+
return Series(IntervalArray.from_arrays(left, right, closed))
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def create_series_categorical_intervals(left, right, closed="right"):
|
| 74 |
+
return Series(Categorical(IntervalIndex.from_arrays(left, right, closed)))
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class TestComparison:
|
| 78 |
+
@pytest.fixture(params=[operator.eq, operator.ne])
|
| 79 |
+
def op(self, request):
|
| 80 |
+
return request.param
|
| 81 |
+
|
| 82 |
+
@pytest.fixture(
|
| 83 |
+
params=[
|
| 84 |
+
IntervalArray.from_arrays,
|
| 85 |
+
IntervalIndex.from_arrays,
|
| 86 |
+
create_categorical_intervals,
|
| 87 |
+
create_series_intervals,
|
| 88 |
+
create_series_categorical_intervals,
|
| 89 |
+
],
|
| 90 |
+
ids=[
|
| 91 |
+
"IntervalArray",
|
| 92 |
+
"IntervalIndex",
|
| 93 |
+
"Categorical[Interval]",
|
| 94 |
+
"Series[Interval]",
|
| 95 |
+
"Series[Categorical[Interval]]",
|
| 96 |
+
],
|
| 97 |
+
)
|
| 98 |
+
def interval_constructor(self, request):
|
| 99 |
+
"""
|
| 100 |
+
Fixture for all pandas native interval constructors.
|
| 101 |
+
To be used as the LHS of IntervalArray comparisons.
|
| 102 |
+
"""
|
| 103 |
+
return request.param
|
| 104 |
+
|
| 105 |
+
def elementwise_comparison(self, op, interval_array, other):
|
| 106 |
+
"""
|
| 107 |
+
Helper that performs elementwise comparisons between `array` and `other`
|
| 108 |
+
"""
|
| 109 |
+
other = other if is_list_like(other) else [other] * len(interval_array)
|
| 110 |
+
expected = np.array([op(x, y) for x, y in zip(interval_array, other)])
|
| 111 |
+
if isinstance(other, Series):
|
| 112 |
+
return Series(expected, index=other.index)
|
| 113 |
+
return expected
|
| 114 |
+
|
| 115 |
+
def test_compare_scalar_interval(self, op, interval_array):
|
| 116 |
+
# matches first interval
|
| 117 |
+
other = interval_array[0]
|
| 118 |
+
result = op(interval_array, other)
|
| 119 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 120 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 121 |
+
|
| 122 |
+
# matches on a single endpoint but not both
|
| 123 |
+
other = Interval(interval_array.left[0], interval_array.right[1])
|
| 124 |
+
result = op(interval_array, other)
|
| 125 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 126 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 127 |
+
|
| 128 |
+
def test_compare_scalar_interval_mixed_closed(self, op, closed, other_closed):
|
| 129 |
+
interval_array = IntervalArray.from_arrays(range(2), range(1, 3), closed=closed)
|
| 130 |
+
other = Interval(0, 1, closed=other_closed)
|
| 131 |
+
|
| 132 |
+
result = op(interval_array, other)
|
| 133 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 134 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 135 |
+
|
| 136 |
+
def test_compare_scalar_na(self, op, interval_array, nulls_fixture, box_with_array):
|
| 137 |
+
box = box_with_array
|
| 138 |
+
obj = tm.box_expected(interval_array, box)
|
| 139 |
+
result = op(obj, nulls_fixture)
|
| 140 |
+
|
| 141 |
+
if nulls_fixture is pd.NA:
|
| 142 |
+
# GH#31882
|
| 143 |
+
exp = np.ones(interval_array.shape, dtype=bool)
|
| 144 |
+
expected = BooleanArray(exp, exp)
|
| 145 |
+
else:
|
| 146 |
+
expected = self.elementwise_comparison(op, interval_array, nulls_fixture)
|
| 147 |
+
|
| 148 |
+
if not (box is Index and nulls_fixture is pd.NA):
|
| 149 |
+
# don't cast expected from BooleanArray to ndarray[object]
|
| 150 |
+
xbox = get_upcast_box(obj, nulls_fixture, True)
|
| 151 |
+
expected = tm.box_expected(expected, xbox)
|
| 152 |
+
|
| 153 |
+
tm.assert_equal(result, expected)
|
| 154 |
+
|
| 155 |
+
rev = op(nulls_fixture, obj)
|
| 156 |
+
tm.assert_equal(rev, expected)
|
| 157 |
+
|
| 158 |
+
@pytest.mark.parametrize(
|
| 159 |
+
"other",
|
| 160 |
+
[
|
| 161 |
+
0,
|
| 162 |
+
1.0,
|
| 163 |
+
True,
|
| 164 |
+
"foo",
|
| 165 |
+
Timestamp("2017-01-01"),
|
| 166 |
+
Timestamp("2017-01-01", tz="US/Eastern"),
|
| 167 |
+
Timedelta("0 days"),
|
| 168 |
+
Period("2017-01-01", "D"),
|
| 169 |
+
],
|
| 170 |
+
)
|
| 171 |
+
def test_compare_scalar_other(self, op, interval_array, other):
|
| 172 |
+
result = op(interval_array, other)
|
| 173 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 174 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 175 |
+
|
| 176 |
+
def test_compare_list_like_interval(self, op, interval_array, interval_constructor):
|
| 177 |
+
# same endpoints
|
| 178 |
+
other = interval_constructor(interval_array.left, interval_array.right)
|
| 179 |
+
result = op(interval_array, other)
|
| 180 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 181 |
+
tm.assert_equal(result, expected)
|
| 182 |
+
|
| 183 |
+
# different endpoints
|
| 184 |
+
other = interval_constructor(
|
| 185 |
+
interval_array.left[::-1], interval_array.right[::-1]
|
| 186 |
+
)
|
| 187 |
+
result = op(interval_array, other)
|
| 188 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 189 |
+
tm.assert_equal(result, expected)
|
| 190 |
+
|
| 191 |
+
# all nan endpoints
|
| 192 |
+
other = interval_constructor([np.nan] * 4, [np.nan] * 4)
|
| 193 |
+
result = op(interval_array, other)
|
| 194 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 195 |
+
tm.assert_equal(result, expected)
|
| 196 |
+
|
| 197 |
+
def test_compare_list_like_interval_mixed_closed(
|
| 198 |
+
self, op, interval_constructor, closed, other_closed
|
| 199 |
+
):
|
| 200 |
+
interval_array = IntervalArray.from_arrays(range(2), range(1, 3), closed=closed)
|
| 201 |
+
other = interval_constructor(range(2), range(1, 3), closed=other_closed)
|
| 202 |
+
|
| 203 |
+
result = op(interval_array, other)
|
| 204 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 205 |
+
tm.assert_equal(result, expected)
|
| 206 |
+
|
| 207 |
+
@pytest.mark.parametrize(
|
| 208 |
+
"other",
|
| 209 |
+
[
|
| 210 |
+
(
|
| 211 |
+
Interval(0, 1),
|
| 212 |
+
Interval(Timedelta("1 day"), Timedelta("2 days")),
|
| 213 |
+
Interval(4, 5, "both"),
|
| 214 |
+
Interval(10, 20, "neither"),
|
| 215 |
+
),
|
| 216 |
+
(0, 1.5, Timestamp("20170103"), np.nan),
|
| 217 |
+
(
|
| 218 |
+
Timestamp("20170102", tz="US/Eastern"),
|
| 219 |
+
Timedelta("2 days"),
|
| 220 |
+
"baz",
|
| 221 |
+
pd.NaT,
|
| 222 |
+
),
|
| 223 |
+
],
|
| 224 |
+
)
|
| 225 |
+
def test_compare_list_like_object(self, op, interval_array, other):
|
| 226 |
+
result = op(interval_array, other)
|
| 227 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 228 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 229 |
+
|
| 230 |
+
def test_compare_list_like_nan(self, op, interval_array, nulls_fixture):
|
| 231 |
+
other = [nulls_fixture] * 4
|
| 232 |
+
result = op(interval_array, other)
|
| 233 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 234 |
+
|
| 235 |
+
tm.assert_equal(result, expected)
|
| 236 |
+
|
| 237 |
+
@pytest.mark.parametrize(
|
| 238 |
+
"other",
|
| 239 |
+
[
|
| 240 |
+
np.arange(4, dtype="int64"),
|
| 241 |
+
np.arange(4, dtype="float64"),
|
| 242 |
+
date_range("2017-01-01", periods=4),
|
| 243 |
+
date_range("2017-01-01", periods=4, tz="US/Eastern"),
|
| 244 |
+
timedelta_range("0 days", periods=4),
|
| 245 |
+
period_range("2017-01-01", periods=4, freq="D"),
|
| 246 |
+
Categorical(list("abab")),
|
| 247 |
+
Categorical(date_range("2017-01-01", periods=4)),
|
| 248 |
+
pd.array(list("abcd")),
|
| 249 |
+
pd.array(["foo", 3.14, None, object()], dtype=object),
|
| 250 |
+
],
|
| 251 |
+
ids=lambda x: str(x.dtype),
|
| 252 |
+
)
|
| 253 |
+
def test_compare_list_like_other(self, op, interval_array, other):
|
| 254 |
+
result = op(interval_array, other)
|
| 255 |
+
expected = self.elementwise_comparison(op, interval_array, other)
|
| 256 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 257 |
+
|
| 258 |
+
@pytest.mark.parametrize("length", [1, 3, 5])
|
| 259 |
+
@pytest.mark.parametrize("other_constructor", [IntervalArray, list])
|
| 260 |
+
def test_compare_length_mismatch_errors(self, op, other_constructor, length):
|
| 261 |
+
interval_array = IntervalArray.from_arrays(range(4), range(1, 5))
|
| 262 |
+
other = other_constructor([Interval(0, 1)] * length)
|
| 263 |
+
with pytest.raises(ValueError, match="Lengths must match to compare"):
|
| 264 |
+
op(interval_array, other)
|
| 265 |
+
|
| 266 |
+
@pytest.mark.parametrize(
|
| 267 |
+
"constructor, expected_type, assert_func",
|
| 268 |
+
[
|
| 269 |
+
(IntervalIndex, np.array, tm.assert_numpy_array_equal),
|
| 270 |
+
(Series, Series, tm.assert_series_equal),
|
| 271 |
+
],
|
| 272 |
+
)
|
| 273 |
+
def test_index_series_compat(self, op, constructor, expected_type, assert_func):
|
| 274 |
+
# IntervalIndex/Series that rely on IntervalArray for comparisons
|
| 275 |
+
breaks = range(4)
|
| 276 |
+
index = constructor(IntervalIndex.from_breaks(breaks))
|
| 277 |
+
|
| 278 |
+
# scalar comparisons
|
| 279 |
+
other = index[0]
|
| 280 |
+
result = op(index, other)
|
| 281 |
+
expected = expected_type(self.elementwise_comparison(op, index, other))
|
| 282 |
+
assert_func(result, expected)
|
| 283 |
+
|
| 284 |
+
other = breaks[0]
|
| 285 |
+
result = op(index, other)
|
| 286 |
+
expected = expected_type(self.elementwise_comparison(op, index, other))
|
| 287 |
+
assert_func(result, expected)
|
| 288 |
+
|
| 289 |
+
# list-like comparisons
|
| 290 |
+
other = IntervalArray.from_breaks(breaks)
|
| 291 |
+
result = op(index, other)
|
| 292 |
+
expected = expected_type(self.elementwise_comparison(op, index, other))
|
| 293 |
+
assert_func(result, expected)
|
| 294 |
+
|
| 295 |
+
other = [index[0], breaks[0], "foo"]
|
| 296 |
+
result = op(index, other)
|
| 297 |
+
expected = expected_type(self.elementwise_comparison(op, index, other))
|
| 298 |
+
assert_func(result, expected)
|
| 299 |
+
|
| 300 |
+
@pytest.mark.parametrize("scalars", ["a", False, 1, 1.0, None])
|
| 301 |
+
def test_comparison_operations(self, scalars):
|
| 302 |
+
# GH #28981
|
| 303 |
+
expected = Series([False, False])
|
| 304 |
+
s = Series([Interval(0, 1), Interval(1, 2)], dtype="interval")
|
| 305 |
+
result = s == scalars
|
| 306 |
+
tm.assert_series_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_numeric.py
ADDED
|
@@ -0,0 +1,1567 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Arithmetic tests for DataFrame/Series/Index/Array classes that should
|
| 2 |
+
# behave identically.
|
| 3 |
+
# Specifically for numeric dtypes
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from collections import abc
|
| 7 |
+
from datetime import timedelta
|
| 8 |
+
from decimal import Decimal
|
| 9 |
+
import operator
|
| 10 |
+
|
| 11 |
+
import numpy as np
|
| 12 |
+
import pytest
|
| 13 |
+
|
| 14 |
+
import pandas as pd
|
| 15 |
+
from pandas import (
|
| 16 |
+
Index,
|
| 17 |
+
RangeIndex,
|
| 18 |
+
Series,
|
| 19 |
+
Timedelta,
|
| 20 |
+
TimedeltaIndex,
|
| 21 |
+
array,
|
| 22 |
+
date_range,
|
| 23 |
+
)
|
| 24 |
+
import pandas._testing as tm
|
| 25 |
+
from pandas.core import ops
|
| 26 |
+
from pandas.core.computation import expressions as expr
|
| 27 |
+
from pandas.tests.arithmetic.common import (
|
| 28 |
+
assert_invalid_addsub_type,
|
| 29 |
+
assert_invalid_comparison,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture(autouse=True, params=[0, 1000000], ids=["numexpr", "python"])
|
| 34 |
+
def switch_numexpr_min_elements(request, monkeypatch):
|
| 35 |
+
with monkeypatch.context() as m:
|
| 36 |
+
m.setattr(expr, "_MIN_ELEMENTS", request.param)
|
| 37 |
+
yield request.param
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@pytest.fixture(params=[Index, Series, tm.to_array])
|
| 41 |
+
def box_pandas_1d_array(request):
|
| 42 |
+
"""
|
| 43 |
+
Fixture to test behavior for Index, Series and tm.to_array classes
|
| 44 |
+
"""
|
| 45 |
+
return request.param
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@pytest.fixture(
|
| 49 |
+
params=[
|
| 50 |
+
# TODO: add more dtypes here
|
| 51 |
+
Index(np.arange(5, dtype="float64")),
|
| 52 |
+
Index(np.arange(5, dtype="int64")),
|
| 53 |
+
Index(np.arange(5, dtype="uint64")),
|
| 54 |
+
RangeIndex(5),
|
| 55 |
+
],
|
| 56 |
+
ids=lambda x: type(x).__name__,
|
| 57 |
+
)
|
| 58 |
+
def numeric_idx(request):
|
| 59 |
+
"""
|
| 60 |
+
Several types of numeric-dtypes Index objects
|
| 61 |
+
"""
|
| 62 |
+
return request.param
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
@pytest.fixture(
|
| 66 |
+
params=[Index, Series, tm.to_array, np.array, list], ids=lambda x: x.__name__
|
| 67 |
+
)
|
| 68 |
+
def box_1d_array(request):
|
| 69 |
+
"""
|
| 70 |
+
Fixture to test behavior for Index, Series, tm.to_array, numpy Array and list
|
| 71 |
+
classes
|
| 72 |
+
"""
|
| 73 |
+
return request.param
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def adjust_negative_zero(zero, expected):
|
| 77 |
+
"""
|
| 78 |
+
Helper to adjust the expected result if we are dividing by -0.0
|
| 79 |
+
as opposed to 0.0
|
| 80 |
+
"""
|
| 81 |
+
if np.signbit(np.array(zero)).any():
|
| 82 |
+
# All entries in the `zero` fixture should be either
|
| 83 |
+
# all-negative or no-negative.
|
| 84 |
+
assert np.signbit(np.array(zero)).all()
|
| 85 |
+
|
| 86 |
+
expected *= -1
|
| 87 |
+
|
| 88 |
+
return expected
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def compare_op(series, other, op):
|
| 92 |
+
left = np.abs(series) if op in (ops.rpow, operator.pow) else series
|
| 93 |
+
right = np.abs(other) if op in (ops.rpow, operator.pow) else other
|
| 94 |
+
|
| 95 |
+
cython_or_numpy = op(left, right)
|
| 96 |
+
python = left.combine(right, op)
|
| 97 |
+
if isinstance(other, Series) and not other.index.equals(series.index):
|
| 98 |
+
python.index = python.index._with_freq(None)
|
| 99 |
+
tm.assert_series_equal(cython_or_numpy, python)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
# TODO: remove this kludge once mypy stops giving false positives here
|
| 103 |
+
# List comprehension has incompatible type List[PandasObject]; expected List[RangeIndex]
|
| 104 |
+
# See GH#29725
|
| 105 |
+
_ldtypes = ["i1", "i2", "i4", "i8", "u1", "u2", "u4", "u8", "f2", "f4", "f8"]
|
| 106 |
+
lefts: list[Index | Series] = [RangeIndex(10, 40, 10)]
|
| 107 |
+
lefts.extend([Series([10, 20, 30], dtype=dtype) for dtype in _ldtypes])
|
| 108 |
+
lefts.extend([Index([10, 20, 30], dtype=dtype) for dtype in _ldtypes if dtype != "f2"])
|
| 109 |
+
|
| 110 |
+
# ------------------------------------------------------------------
|
| 111 |
+
# Comparisons
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class TestNumericComparisons:
|
| 115 |
+
def test_operator_series_comparison_zerorank(self):
|
| 116 |
+
# GH#13006
|
| 117 |
+
result = np.float64(0) > Series([1, 2, 3])
|
| 118 |
+
expected = 0.0 > Series([1, 2, 3])
|
| 119 |
+
tm.assert_series_equal(result, expected)
|
| 120 |
+
result = Series([1, 2, 3]) < np.float64(0)
|
| 121 |
+
expected = Series([1, 2, 3]) < 0.0
|
| 122 |
+
tm.assert_series_equal(result, expected)
|
| 123 |
+
result = np.array([0, 1, 2])[0] > Series([0, 1, 2])
|
| 124 |
+
expected = 0.0 > Series([1, 2, 3])
|
| 125 |
+
tm.assert_series_equal(result, expected)
|
| 126 |
+
|
| 127 |
+
def test_df_numeric_cmp_dt64_raises(self, box_with_array, fixed_now_ts):
|
| 128 |
+
# GH#8932, GH#22163
|
| 129 |
+
ts = fixed_now_ts
|
| 130 |
+
obj = np.array(range(5))
|
| 131 |
+
obj = tm.box_expected(obj, box_with_array)
|
| 132 |
+
|
| 133 |
+
assert_invalid_comparison(obj, ts, box_with_array)
|
| 134 |
+
|
| 135 |
+
def test_compare_invalid(self):
|
| 136 |
+
# GH#8058
|
| 137 |
+
# ops testing
|
| 138 |
+
a = Series(np.random.default_rng(2).standard_normal(5), name=0)
|
| 139 |
+
b = Series(np.random.default_rng(2).standard_normal(5))
|
| 140 |
+
b.name = pd.Timestamp("2000-01-01")
|
| 141 |
+
tm.assert_series_equal(a / b, 1 / (b / a))
|
| 142 |
+
|
| 143 |
+
def test_numeric_cmp_string_numexpr_path(self, box_with_array, monkeypatch):
|
| 144 |
+
# GH#36377, GH#35700
|
| 145 |
+
box = box_with_array
|
| 146 |
+
xbox = box if box is not Index else np.ndarray
|
| 147 |
+
|
| 148 |
+
obj = Series(np.random.default_rng(2).standard_normal(51))
|
| 149 |
+
obj = tm.box_expected(obj, box, transpose=False)
|
| 150 |
+
with monkeypatch.context() as m:
|
| 151 |
+
m.setattr(expr, "_MIN_ELEMENTS", 50)
|
| 152 |
+
result = obj == "a"
|
| 153 |
+
|
| 154 |
+
expected = Series(np.zeros(51, dtype=bool))
|
| 155 |
+
expected = tm.box_expected(expected, xbox, transpose=False)
|
| 156 |
+
tm.assert_equal(result, expected)
|
| 157 |
+
|
| 158 |
+
with monkeypatch.context() as m:
|
| 159 |
+
m.setattr(expr, "_MIN_ELEMENTS", 50)
|
| 160 |
+
result = obj != "a"
|
| 161 |
+
tm.assert_equal(result, ~expected)
|
| 162 |
+
|
| 163 |
+
msg = "Invalid comparison between dtype=float64 and str"
|
| 164 |
+
with pytest.raises(TypeError, match=msg):
|
| 165 |
+
obj < "a"
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
# ------------------------------------------------------------------
|
| 169 |
+
# Numeric dtypes Arithmetic with Datetime/Timedelta Scalar
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class TestNumericArraylikeArithmeticWithDatetimeLike:
|
| 173 |
+
@pytest.mark.parametrize("box_cls", [np.array, Index, Series])
|
| 174 |
+
@pytest.mark.parametrize(
|
| 175 |
+
"left", lefts, ids=lambda x: type(x).__name__ + str(x.dtype)
|
| 176 |
+
)
|
| 177 |
+
def test_mul_td64arr(self, left, box_cls):
|
| 178 |
+
# GH#22390
|
| 179 |
+
right = np.array([1, 2, 3], dtype="m8[s]")
|
| 180 |
+
right = box_cls(right)
|
| 181 |
+
|
| 182 |
+
expected = TimedeltaIndex(["10s", "40s", "90s"], dtype=right.dtype)
|
| 183 |
+
|
| 184 |
+
if isinstance(left, Series) or box_cls is Series:
|
| 185 |
+
expected = Series(expected)
|
| 186 |
+
assert expected.dtype == right.dtype
|
| 187 |
+
|
| 188 |
+
result = left * right
|
| 189 |
+
tm.assert_equal(result, expected)
|
| 190 |
+
|
| 191 |
+
result = right * left
|
| 192 |
+
tm.assert_equal(result, expected)
|
| 193 |
+
|
| 194 |
+
@pytest.mark.parametrize("box_cls", [np.array, Index, Series])
|
| 195 |
+
@pytest.mark.parametrize(
|
| 196 |
+
"left", lefts, ids=lambda x: type(x).__name__ + str(x.dtype)
|
| 197 |
+
)
|
| 198 |
+
def test_div_td64arr(self, left, box_cls):
|
| 199 |
+
# GH#22390
|
| 200 |
+
right = np.array([10, 40, 90], dtype="m8[s]")
|
| 201 |
+
right = box_cls(right)
|
| 202 |
+
|
| 203 |
+
expected = TimedeltaIndex(["1s", "2s", "3s"], dtype=right.dtype)
|
| 204 |
+
if isinstance(left, Series) or box_cls is Series:
|
| 205 |
+
expected = Series(expected)
|
| 206 |
+
assert expected.dtype == right.dtype
|
| 207 |
+
|
| 208 |
+
result = right / left
|
| 209 |
+
tm.assert_equal(result, expected)
|
| 210 |
+
|
| 211 |
+
result = right // left
|
| 212 |
+
tm.assert_equal(result, expected)
|
| 213 |
+
|
| 214 |
+
# (true_) needed for min-versions build 2022-12-26
|
| 215 |
+
msg = "ufunc '(true_)?divide' cannot use operands with types"
|
| 216 |
+
with pytest.raises(TypeError, match=msg):
|
| 217 |
+
left / right
|
| 218 |
+
|
| 219 |
+
msg = "ufunc 'floor_divide' cannot use operands with types"
|
| 220 |
+
with pytest.raises(TypeError, match=msg):
|
| 221 |
+
left // right
|
| 222 |
+
|
| 223 |
+
# TODO: also test Tick objects;
|
| 224 |
+
# see test_numeric_arr_rdiv_tdscalar for note on these failing
|
| 225 |
+
@pytest.mark.parametrize(
|
| 226 |
+
"scalar_td",
|
| 227 |
+
[
|
| 228 |
+
Timedelta(days=1),
|
| 229 |
+
Timedelta(days=1).to_timedelta64(),
|
| 230 |
+
Timedelta(days=1).to_pytimedelta(),
|
| 231 |
+
Timedelta(days=1).to_timedelta64().astype("timedelta64[s]"),
|
| 232 |
+
Timedelta(days=1).to_timedelta64().astype("timedelta64[ms]"),
|
| 233 |
+
],
|
| 234 |
+
ids=lambda x: type(x).__name__,
|
| 235 |
+
)
|
| 236 |
+
def test_numeric_arr_mul_tdscalar(self, scalar_td, numeric_idx, box_with_array):
|
| 237 |
+
# GH#19333
|
| 238 |
+
box = box_with_array
|
| 239 |
+
index = numeric_idx
|
| 240 |
+
expected = TimedeltaIndex([Timedelta(days=n) for n in range(len(index))])
|
| 241 |
+
if isinstance(scalar_td, np.timedelta64):
|
| 242 |
+
dtype = scalar_td.dtype
|
| 243 |
+
expected = expected.astype(dtype)
|
| 244 |
+
elif type(scalar_td) is timedelta:
|
| 245 |
+
expected = expected.astype("m8[us]")
|
| 246 |
+
|
| 247 |
+
index = tm.box_expected(index, box)
|
| 248 |
+
expected = tm.box_expected(expected, box)
|
| 249 |
+
|
| 250 |
+
result = index * scalar_td
|
| 251 |
+
tm.assert_equal(result, expected)
|
| 252 |
+
|
| 253 |
+
commute = scalar_td * index
|
| 254 |
+
tm.assert_equal(commute, expected)
|
| 255 |
+
|
| 256 |
+
@pytest.mark.parametrize(
|
| 257 |
+
"scalar_td",
|
| 258 |
+
[
|
| 259 |
+
Timedelta(days=1),
|
| 260 |
+
Timedelta(days=1).to_timedelta64(),
|
| 261 |
+
Timedelta(days=1).to_pytimedelta(),
|
| 262 |
+
],
|
| 263 |
+
ids=lambda x: type(x).__name__,
|
| 264 |
+
)
|
| 265 |
+
@pytest.mark.parametrize("dtype", [np.int64, np.float64])
|
| 266 |
+
def test_numeric_arr_mul_tdscalar_numexpr_path(
|
| 267 |
+
self, dtype, scalar_td, box_with_array
|
| 268 |
+
):
|
| 269 |
+
# GH#44772 for the float64 case
|
| 270 |
+
box = box_with_array
|
| 271 |
+
|
| 272 |
+
arr_i8 = np.arange(2 * 10**4).astype(np.int64, copy=False)
|
| 273 |
+
arr = arr_i8.astype(dtype, copy=False)
|
| 274 |
+
obj = tm.box_expected(arr, box, transpose=False)
|
| 275 |
+
|
| 276 |
+
expected = arr_i8.view("timedelta64[D]").astype("timedelta64[ns]")
|
| 277 |
+
if type(scalar_td) is timedelta:
|
| 278 |
+
expected = expected.astype("timedelta64[us]")
|
| 279 |
+
|
| 280 |
+
expected = tm.box_expected(expected, box, transpose=False)
|
| 281 |
+
|
| 282 |
+
result = obj * scalar_td
|
| 283 |
+
tm.assert_equal(result, expected)
|
| 284 |
+
|
| 285 |
+
result = scalar_td * obj
|
| 286 |
+
tm.assert_equal(result, expected)
|
| 287 |
+
|
| 288 |
+
def test_numeric_arr_rdiv_tdscalar(self, three_days, numeric_idx, box_with_array):
|
| 289 |
+
box = box_with_array
|
| 290 |
+
|
| 291 |
+
index = numeric_idx[1:3]
|
| 292 |
+
|
| 293 |
+
expected = TimedeltaIndex(["3 Days", "36 Hours"])
|
| 294 |
+
if isinstance(three_days, np.timedelta64):
|
| 295 |
+
dtype = three_days.dtype
|
| 296 |
+
if dtype < np.dtype("m8[s]"):
|
| 297 |
+
# i.e. resolution is lower -> use lowest supported resolution
|
| 298 |
+
dtype = np.dtype("m8[s]")
|
| 299 |
+
expected = expected.astype(dtype)
|
| 300 |
+
elif type(three_days) is timedelta:
|
| 301 |
+
expected = expected.astype("m8[us]")
|
| 302 |
+
elif isinstance(
|
| 303 |
+
three_days,
|
| 304 |
+
(pd.offsets.Day, pd.offsets.Hour, pd.offsets.Minute, pd.offsets.Second),
|
| 305 |
+
):
|
| 306 |
+
# closest reso is Second
|
| 307 |
+
expected = expected.astype("m8[s]")
|
| 308 |
+
|
| 309 |
+
index = tm.box_expected(index, box)
|
| 310 |
+
expected = tm.box_expected(expected, box)
|
| 311 |
+
|
| 312 |
+
result = three_days / index
|
| 313 |
+
tm.assert_equal(result, expected)
|
| 314 |
+
|
| 315 |
+
msg = "cannot use operands with types dtype"
|
| 316 |
+
with pytest.raises(TypeError, match=msg):
|
| 317 |
+
index / three_days
|
| 318 |
+
|
| 319 |
+
@pytest.mark.parametrize(
|
| 320 |
+
"other",
|
| 321 |
+
[
|
| 322 |
+
Timedelta(hours=31),
|
| 323 |
+
Timedelta(hours=31).to_pytimedelta(),
|
| 324 |
+
Timedelta(hours=31).to_timedelta64(),
|
| 325 |
+
Timedelta(hours=31).to_timedelta64().astype("m8[h]"),
|
| 326 |
+
np.timedelta64("NaT"),
|
| 327 |
+
np.timedelta64("NaT", "D"),
|
| 328 |
+
pd.offsets.Minute(3),
|
| 329 |
+
pd.offsets.Second(0),
|
| 330 |
+
# GH#28080 numeric+datetimelike should raise; Timestamp used
|
| 331 |
+
# to raise NullFrequencyError but that behavior was removed in 1.0
|
| 332 |
+
pd.Timestamp("2021-01-01", tz="Asia/Tokyo"),
|
| 333 |
+
pd.Timestamp("2021-01-01"),
|
| 334 |
+
pd.Timestamp("2021-01-01").to_pydatetime(),
|
| 335 |
+
pd.Timestamp("2021-01-01", tz="UTC").to_pydatetime(),
|
| 336 |
+
pd.Timestamp("2021-01-01").to_datetime64(),
|
| 337 |
+
np.datetime64("NaT", "ns"),
|
| 338 |
+
pd.NaT,
|
| 339 |
+
],
|
| 340 |
+
ids=repr,
|
| 341 |
+
)
|
| 342 |
+
def test_add_sub_datetimedeltalike_invalid(
|
| 343 |
+
self, numeric_idx, other, box_with_array
|
| 344 |
+
):
|
| 345 |
+
box = box_with_array
|
| 346 |
+
|
| 347 |
+
left = tm.box_expected(numeric_idx, box)
|
| 348 |
+
msg = "|".join(
|
| 349 |
+
[
|
| 350 |
+
"unsupported operand type",
|
| 351 |
+
"Addition/subtraction of integers and integer-arrays",
|
| 352 |
+
"Instead of adding/subtracting",
|
| 353 |
+
"cannot use operands with types dtype",
|
| 354 |
+
"Concatenation operation is not implemented for NumPy arrays",
|
| 355 |
+
"Cannot (add|subtract) NaT (to|from) ndarray",
|
| 356 |
+
# pd.array vs np.datetime64 case
|
| 357 |
+
r"operand type\(s\) all returned NotImplemented from __array_ufunc__",
|
| 358 |
+
"can only perform ops with numeric values",
|
| 359 |
+
"cannot subtract DatetimeArray from ndarray",
|
| 360 |
+
# pd.Timedelta(1) + Index([0, 1, 2])
|
| 361 |
+
"Cannot add or subtract Timedelta from integers",
|
| 362 |
+
]
|
| 363 |
+
)
|
| 364 |
+
assert_invalid_addsub_type(left, other, msg)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
# ------------------------------------------------------------------
|
| 368 |
+
# Arithmetic
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
class TestDivisionByZero:
|
| 372 |
+
def test_div_zero(self, zero, numeric_idx):
|
| 373 |
+
idx = numeric_idx
|
| 374 |
+
|
| 375 |
+
expected = Index([np.nan, np.inf, np.inf, np.inf, np.inf], dtype=np.float64)
|
| 376 |
+
# We only adjust for Index, because Series does not yet apply
|
| 377 |
+
# the adjustment correctly.
|
| 378 |
+
expected2 = adjust_negative_zero(zero, expected)
|
| 379 |
+
|
| 380 |
+
result = idx / zero
|
| 381 |
+
tm.assert_index_equal(result, expected2)
|
| 382 |
+
ser_compat = Series(idx).astype("i8") / np.array(zero).astype("i8")
|
| 383 |
+
tm.assert_series_equal(ser_compat, Series(expected))
|
| 384 |
+
|
| 385 |
+
def test_floordiv_zero(self, zero, numeric_idx):
|
| 386 |
+
idx = numeric_idx
|
| 387 |
+
|
| 388 |
+
expected = Index([np.nan, np.inf, np.inf, np.inf, np.inf], dtype=np.float64)
|
| 389 |
+
# We only adjust for Index, because Series does not yet apply
|
| 390 |
+
# the adjustment correctly.
|
| 391 |
+
expected2 = adjust_negative_zero(zero, expected)
|
| 392 |
+
|
| 393 |
+
result = idx // zero
|
| 394 |
+
tm.assert_index_equal(result, expected2)
|
| 395 |
+
ser_compat = Series(idx).astype("i8") // np.array(zero).astype("i8")
|
| 396 |
+
tm.assert_series_equal(ser_compat, Series(expected))
|
| 397 |
+
|
| 398 |
+
def test_mod_zero(self, zero, numeric_idx):
|
| 399 |
+
idx = numeric_idx
|
| 400 |
+
|
| 401 |
+
expected = Index([np.nan, np.nan, np.nan, np.nan, np.nan], dtype=np.float64)
|
| 402 |
+
result = idx % zero
|
| 403 |
+
tm.assert_index_equal(result, expected)
|
| 404 |
+
ser_compat = Series(idx).astype("i8") % np.array(zero).astype("i8")
|
| 405 |
+
tm.assert_series_equal(ser_compat, Series(result))
|
| 406 |
+
|
| 407 |
+
def test_divmod_zero(self, zero, numeric_idx):
|
| 408 |
+
idx = numeric_idx
|
| 409 |
+
|
| 410 |
+
exleft = Index([np.nan, np.inf, np.inf, np.inf, np.inf], dtype=np.float64)
|
| 411 |
+
exright = Index([np.nan, np.nan, np.nan, np.nan, np.nan], dtype=np.float64)
|
| 412 |
+
exleft = adjust_negative_zero(zero, exleft)
|
| 413 |
+
|
| 414 |
+
result = divmod(idx, zero)
|
| 415 |
+
tm.assert_index_equal(result[0], exleft)
|
| 416 |
+
tm.assert_index_equal(result[1], exright)
|
| 417 |
+
|
| 418 |
+
@pytest.mark.parametrize("op", [operator.truediv, operator.floordiv])
|
| 419 |
+
def test_div_negative_zero(self, zero, numeric_idx, op):
|
| 420 |
+
# Check that -1 / -0.0 returns np.inf, not -np.inf
|
| 421 |
+
if numeric_idx.dtype == np.uint64:
|
| 422 |
+
pytest.skip(f"Div by negative 0 not relevant for {numeric_idx.dtype}")
|
| 423 |
+
idx = numeric_idx - 3
|
| 424 |
+
|
| 425 |
+
expected = Index([-np.inf, -np.inf, -np.inf, np.nan, np.inf], dtype=np.float64)
|
| 426 |
+
expected = adjust_negative_zero(zero, expected)
|
| 427 |
+
|
| 428 |
+
result = op(idx, zero)
|
| 429 |
+
tm.assert_index_equal(result, expected)
|
| 430 |
+
|
| 431 |
+
# ------------------------------------------------------------------
|
| 432 |
+
|
| 433 |
+
@pytest.mark.parametrize("dtype1", [np.int64, np.float64, np.uint64])
|
| 434 |
+
def test_ser_div_ser(
|
| 435 |
+
self,
|
| 436 |
+
switch_numexpr_min_elements,
|
| 437 |
+
dtype1,
|
| 438 |
+
any_real_numpy_dtype,
|
| 439 |
+
):
|
| 440 |
+
# no longer do integer div for any ops, but deal with the 0's
|
| 441 |
+
dtype2 = any_real_numpy_dtype
|
| 442 |
+
|
| 443 |
+
first = Series([3, 4, 5, 8], name="first").astype(dtype1)
|
| 444 |
+
second = Series([0, 0, 0, 3], name="second").astype(dtype2)
|
| 445 |
+
|
| 446 |
+
with np.errstate(all="ignore"):
|
| 447 |
+
expected = Series(
|
| 448 |
+
first.values.astype(np.float64) / second.values,
|
| 449 |
+
dtype="float64",
|
| 450 |
+
name=None,
|
| 451 |
+
)
|
| 452 |
+
expected.iloc[0:3] = np.inf
|
| 453 |
+
if first.dtype == "int64" and second.dtype == "float32":
|
| 454 |
+
# when using numexpr, the casting rules are slightly different
|
| 455 |
+
# and int64/float32 combo results in float32 instead of float64
|
| 456 |
+
if expr.USE_NUMEXPR and switch_numexpr_min_elements == 0:
|
| 457 |
+
expected = expected.astype("float32")
|
| 458 |
+
|
| 459 |
+
result = first / second
|
| 460 |
+
tm.assert_series_equal(result, expected)
|
| 461 |
+
assert not result.equals(second / first)
|
| 462 |
+
|
| 463 |
+
@pytest.mark.parametrize("dtype1", [np.int64, np.float64, np.uint64])
|
| 464 |
+
def test_ser_divmod_zero(self, dtype1, any_real_numpy_dtype):
|
| 465 |
+
# GH#26987
|
| 466 |
+
dtype2 = any_real_numpy_dtype
|
| 467 |
+
left = Series([1, 1]).astype(dtype1)
|
| 468 |
+
right = Series([0, 2]).astype(dtype2)
|
| 469 |
+
|
| 470 |
+
# GH#27321 pandas convention is to set 1 // 0 to np.inf, as opposed
|
| 471 |
+
# to numpy which sets to np.nan; patch `expected[0]` below
|
| 472 |
+
expected = left // right, left % right
|
| 473 |
+
expected = list(expected)
|
| 474 |
+
expected[0] = expected[0].astype(np.float64)
|
| 475 |
+
expected[0][0] = np.inf
|
| 476 |
+
result = divmod(left, right)
|
| 477 |
+
|
| 478 |
+
tm.assert_series_equal(result[0], expected[0])
|
| 479 |
+
tm.assert_series_equal(result[1], expected[1])
|
| 480 |
+
|
| 481 |
+
# rdivmod case
|
| 482 |
+
result = divmod(left.values, right)
|
| 483 |
+
tm.assert_series_equal(result[0], expected[0])
|
| 484 |
+
tm.assert_series_equal(result[1], expected[1])
|
| 485 |
+
|
| 486 |
+
def test_ser_divmod_inf(self):
|
| 487 |
+
left = Series([np.inf, 1.0])
|
| 488 |
+
right = Series([np.inf, 2.0])
|
| 489 |
+
|
| 490 |
+
expected = left // right, left % right
|
| 491 |
+
result = divmod(left, right)
|
| 492 |
+
|
| 493 |
+
tm.assert_series_equal(result[0], expected[0])
|
| 494 |
+
tm.assert_series_equal(result[1], expected[1])
|
| 495 |
+
|
| 496 |
+
# rdivmod case
|
| 497 |
+
result = divmod(left.values, right)
|
| 498 |
+
tm.assert_series_equal(result[0], expected[0])
|
| 499 |
+
tm.assert_series_equal(result[1], expected[1])
|
| 500 |
+
|
| 501 |
+
def test_rdiv_zero_compat(self):
|
| 502 |
+
# GH#8674
|
| 503 |
+
zero_array = np.array([0] * 5)
|
| 504 |
+
data = np.random.default_rng(2).standard_normal(5)
|
| 505 |
+
expected = Series([0.0] * 5)
|
| 506 |
+
|
| 507 |
+
result = zero_array / Series(data)
|
| 508 |
+
tm.assert_series_equal(result, expected)
|
| 509 |
+
|
| 510 |
+
result = Series(zero_array) / data
|
| 511 |
+
tm.assert_series_equal(result, expected)
|
| 512 |
+
|
| 513 |
+
result = Series(zero_array) / Series(data)
|
| 514 |
+
tm.assert_series_equal(result, expected)
|
| 515 |
+
|
| 516 |
+
def test_div_zero_inf_signs(self):
|
| 517 |
+
# GH#9144, inf signing
|
| 518 |
+
ser = Series([-1, 0, 1], name="first")
|
| 519 |
+
expected = Series([-np.inf, np.nan, np.inf], name="first")
|
| 520 |
+
|
| 521 |
+
result = ser / 0
|
| 522 |
+
tm.assert_series_equal(result, expected)
|
| 523 |
+
|
| 524 |
+
def test_rdiv_zero(self):
|
| 525 |
+
# GH#9144
|
| 526 |
+
ser = Series([-1, 0, 1], name="first")
|
| 527 |
+
expected = Series([0.0, np.nan, 0.0], name="first")
|
| 528 |
+
|
| 529 |
+
result = 0 / ser
|
| 530 |
+
tm.assert_series_equal(result, expected)
|
| 531 |
+
|
| 532 |
+
def test_floordiv_div(self):
|
| 533 |
+
# GH#9144
|
| 534 |
+
ser = Series([-1, 0, 1], name="first")
|
| 535 |
+
|
| 536 |
+
result = ser // 0
|
| 537 |
+
expected = Series([-np.inf, np.nan, np.inf], name="first")
|
| 538 |
+
tm.assert_series_equal(result, expected)
|
| 539 |
+
|
| 540 |
+
def test_df_div_zero_df(self):
|
| 541 |
+
# integer div, but deal with the 0's (GH#9144)
|
| 542 |
+
df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
|
| 543 |
+
result = df / df
|
| 544 |
+
|
| 545 |
+
first = Series([1.0, 1.0, 1.0, 1.0])
|
| 546 |
+
second = Series([np.nan, np.nan, np.nan, 1])
|
| 547 |
+
expected = pd.DataFrame({"first": first, "second": second})
|
| 548 |
+
tm.assert_frame_equal(result, expected)
|
| 549 |
+
|
| 550 |
+
def test_df_div_zero_array(self):
|
| 551 |
+
# integer div, but deal with the 0's (GH#9144)
|
| 552 |
+
df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
|
| 553 |
+
|
| 554 |
+
first = Series([1.0, 1.0, 1.0, 1.0])
|
| 555 |
+
second = Series([np.nan, np.nan, np.nan, 1])
|
| 556 |
+
expected = pd.DataFrame({"first": first, "second": second})
|
| 557 |
+
|
| 558 |
+
with np.errstate(all="ignore"):
|
| 559 |
+
arr = df.values.astype("float") / df.values
|
| 560 |
+
result = pd.DataFrame(arr, index=df.index, columns=df.columns)
|
| 561 |
+
tm.assert_frame_equal(result, expected)
|
| 562 |
+
|
| 563 |
+
def test_df_div_zero_int(self):
|
| 564 |
+
# integer div, but deal with the 0's (GH#9144)
|
| 565 |
+
df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
|
| 566 |
+
|
| 567 |
+
result = df / 0
|
| 568 |
+
expected = pd.DataFrame(np.inf, index=df.index, columns=df.columns)
|
| 569 |
+
expected.iloc[0:3, 1] = np.nan
|
| 570 |
+
tm.assert_frame_equal(result, expected)
|
| 571 |
+
|
| 572 |
+
# numpy has a slightly different (wrong) treatment
|
| 573 |
+
with np.errstate(all="ignore"):
|
| 574 |
+
arr = df.values.astype("float64") / 0
|
| 575 |
+
result2 = pd.DataFrame(arr, index=df.index, columns=df.columns)
|
| 576 |
+
tm.assert_frame_equal(result2, expected)
|
| 577 |
+
|
| 578 |
+
def test_df_div_zero_series_does_not_commute(self):
|
| 579 |
+
# integer div, but deal with the 0's (GH#9144)
|
| 580 |
+
df = pd.DataFrame(np.random.default_rng(2).standard_normal((10, 5)))
|
| 581 |
+
ser = df[0]
|
| 582 |
+
res = ser / df
|
| 583 |
+
res2 = df / ser
|
| 584 |
+
assert not res.fillna(0).equals(res2.fillna(0))
|
| 585 |
+
|
| 586 |
+
# ------------------------------------------------------------------
|
| 587 |
+
# Mod By Zero
|
| 588 |
+
|
| 589 |
+
def test_df_mod_zero_df(self, using_array_manager):
|
| 590 |
+
# GH#3590, modulo as ints
|
| 591 |
+
df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
|
| 592 |
+
# this is technically wrong, as the integer portion is coerced to float
|
| 593 |
+
first = Series([0, 0, 0, 0])
|
| 594 |
+
if not using_array_manager:
|
| 595 |
+
# INFO(ArrayManager) BlockManager doesn't preserve dtype per column
|
| 596 |
+
# while ArrayManager performs op column-wisedoes and thus preserves
|
| 597 |
+
# dtype if possible
|
| 598 |
+
first = first.astype("float64")
|
| 599 |
+
second = Series([np.nan, np.nan, np.nan, 0])
|
| 600 |
+
expected = pd.DataFrame({"first": first, "second": second})
|
| 601 |
+
result = df % df
|
| 602 |
+
tm.assert_frame_equal(result, expected)
|
| 603 |
+
|
| 604 |
+
# GH#38939 If we dont pass copy=False, df is consolidated and
|
| 605 |
+
# result["first"] is float64 instead of int64
|
| 606 |
+
df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]}, copy=False)
|
| 607 |
+
first = Series([0, 0, 0, 0], dtype="int64")
|
| 608 |
+
second = Series([np.nan, np.nan, np.nan, 0])
|
| 609 |
+
expected = pd.DataFrame({"first": first, "second": second})
|
| 610 |
+
result = df % df
|
| 611 |
+
tm.assert_frame_equal(result, expected)
|
| 612 |
+
|
| 613 |
+
def test_df_mod_zero_array(self):
|
| 614 |
+
# GH#3590, modulo as ints
|
| 615 |
+
df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
|
| 616 |
+
|
| 617 |
+
# this is technically wrong, as the integer portion is coerced to float
|
| 618 |
+
# ###
|
| 619 |
+
first = Series([0, 0, 0, 0], dtype="float64")
|
| 620 |
+
second = Series([np.nan, np.nan, np.nan, 0])
|
| 621 |
+
expected = pd.DataFrame({"first": first, "second": second})
|
| 622 |
+
|
| 623 |
+
# numpy has a slightly different (wrong) treatment
|
| 624 |
+
with np.errstate(all="ignore"):
|
| 625 |
+
arr = df.values % df.values
|
| 626 |
+
result2 = pd.DataFrame(arr, index=df.index, columns=df.columns, dtype="float64")
|
| 627 |
+
result2.iloc[0:3, 1] = np.nan
|
| 628 |
+
tm.assert_frame_equal(result2, expected)
|
| 629 |
+
|
| 630 |
+
def test_df_mod_zero_int(self):
|
| 631 |
+
# GH#3590, modulo as ints
|
| 632 |
+
df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
|
| 633 |
+
|
| 634 |
+
result = df % 0
|
| 635 |
+
expected = pd.DataFrame(np.nan, index=df.index, columns=df.columns)
|
| 636 |
+
tm.assert_frame_equal(result, expected)
|
| 637 |
+
|
| 638 |
+
# numpy has a slightly different (wrong) treatment
|
| 639 |
+
with np.errstate(all="ignore"):
|
| 640 |
+
arr = df.values.astype("float64") % 0
|
| 641 |
+
result2 = pd.DataFrame(arr, index=df.index, columns=df.columns)
|
| 642 |
+
tm.assert_frame_equal(result2, expected)
|
| 643 |
+
|
| 644 |
+
def test_df_mod_zero_series_does_not_commute(self):
|
| 645 |
+
# GH#3590, modulo as ints
|
| 646 |
+
# not commutative with series
|
| 647 |
+
df = pd.DataFrame(np.random.default_rng(2).standard_normal((10, 5)))
|
| 648 |
+
ser = df[0]
|
| 649 |
+
res = ser % df
|
| 650 |
+
res2 = df % ser
|
| 651 |
+
assert not res.fillna(0).equals(res2.fillna(0))
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
class TestMultiplicationDivision:
|
| 655 |
+
# __mul__, __rmul__, __div__, __rdiv__, __floordiv__, __rfloordiv__
|
| 656 |
+
# for non-timestamp/timedelta/period dtypes
|
| 657 |
+
|
| 658 |
+
def test_divide_decimal(self, box_with_array):
|
| 659 |
+
# resolves issue GH#9787
|
| 660 |
+
box = box_with_array
|
| 661 |
+
ser = Series([Decimal(10)])
|
| 662 |
+
expected = Series([Decimal(5)])
|
| 663 |
+
|
| 664 |
+
ser = tm.box_expected(ser, box)
|
| 665 |
+
expected = tm.box_expected(expected, box)
|
| 666 |
+
|
| 667 |
+
result = ser / Decimal(2)
|
| 668 |
+
|
| 669 |
+
tm.assert_equal(result, expected)
|
| 670 |
+
|
| 671 |
+
result = ser // Decimal(2)
|
| 672 |
+
tm.assert_equal(result, expected)
|
| 673 |
+
|
| 674 |
+
def test_div_equiv_binop(self):
|
| 675 |
+
# Test Series.div as well as Series.__div__
|
| 676 |
+
# float/integer issue
|
| 677 |
+
# GH#7785
|
| 678 |
+
first = Series([1, 0], name="first")
|
| 679 |
+
second = Series([-0.01, -0.02], name="second")
|
| 680 |
+
expected = Series([-0.01, -np.inf])
|
| 681 |
+
|
| 682 |
+
result = second.div(first)
|
| 683 |
+
tm.assert_series_equal(result, expected, check_names=False)
|
| 684 |
+
|
| 685 |
+
result = second / first
|
| 686 |
+
tm.assert_series_equal(result, expected)
|
| 687 |
+
|
| 688 |
+
def test_div_int(self, numeric_idx):
|
| 689 |
+
idx = numeric_idx
|
| 690 |
+
result = idx / 1
|
| 691 |
+
expected = idx.astype("float64")
|
| 692 |
+
tm.assert_index_equal(result, expected)
|
| 693 |
+
|
| 694 |
+
result = idx / 2
|
| 695 |
+
expected = Index(idx.values / 2)
|
| 696 |
+
tm.assert_index_equal(result, expected)
|
| 697 |
+
|
| 698 |
+
@pytest.mark.parametrize("op", [operator.mul, ops.rmul, operator.floordiv])
|
| 699 |
+
def test_mul_int_identity(self, op, numeric_idx, box_with_array):
|
| 700 |
+
idx = numeric_idx
|
| 701 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 702 |
+
|
| 703 |
+
result = op(idx, 1)
|
| 704 |
+
tm.assert_equal(result, idx)
|
| 705 |
+
|
| 706 |
+
def test_mul_int_array(self, numeric_idx):
|
| 707 |
+
idx = numeric_idx
|
| 708 |
+
didx = idx * idx
|
| 709 |
+
|
| 710 |
+
result = idx * np.array(5, dtype="int64")
|
| 711 |
+
tm.assert_index_equal(result, idx * 5)
|
| 712 |
+
|
| 713 |
+
arr_dtype = "uint64" if idx.dtype == np.uint64 else "int64"
|
| 714 |
+
result = idx * np.arange(5, dtype=arr_dtype)
|
| 715 |
+
tm.assert_index_equal(result, didx)
|
| 716 |
+
|
| 717 |
+
def test_mul_int_series(self, numeric_idx):
|
| 718 |
+
idx = numeric_idx
|
| 719 |
+
didx = idx * idx
|
| 720 |
+
|
| 721 |
+
arr_dtype = "uint64" if idx.dtype == np.uint64 else "int64"
|
| 722 |
+
result = idx * Series(np.arange(5, dtype=arr_dtype))
|
| 723 |
+
tm.assert_series_equal(result, Series(didx))
|
| 724 |
+
|
| 725 |
+
def test_mul_float_series(self, numeric_idx):
|
| 726 |
+
idx = numeric_idx
|
| 727 |
+
rng5 = np.arange(5, dtype="float64")
|
| 728 |
+
|
| 729 |
+
result = idx * Series(rng5 + 0.1)
|
| 730 |
+
expected = Series(rng5 * (rng5 + 0.1))
|
| 731 |
+
tm.assert_series_equal(result, expected)
|
| 732 |
+
|
| 733 |
+
def test_mul_index(self, numeric_idx):
|
| 734 |
+
idx = numeric_idx
|
| 735 |
+
|
| 736 |
+
result = idx * idx
|
| 737 |
+
tm.assert_index_equal(result, idx**2)
|
| 738 |
+
|
| 739 |
+
def test_mul_datelike_raises(self, numeric_idx):
|
| 740 |
+
idx = numeric_idx
|
| 741 |
+
msg = "cannot perform __rmul__ with this index type"
|
| 742 |
+
with pytest.raises(TypeError, match=msg):
|
| 743 |
+
idx * date_range("20130101", periods=5)
|
| 744 |
+
|
| 745 |
+
def test_mul_size_mismatch_raises(self, numeric_idx):
|
| 746 |
+
idx = numeric_idx
|
| 747 |
+
msg = "operands could not be broadcast together"
|
| 748 |
+
with pytest.raises(ValueError, match=msg):
|
| 749 |
+
idx * idx[0:3]
|
| 750 |
+
with pytest.raises(ValueError, match=msg):
|
| 751 |
+
idx * np.array([1, 2])
|
| 752 |
+
|
| 753 |
+
@pytest.mark.parametrize("op", [operator.pow, ops.rpow])
|
| 754 |
+
def test_pow_float(self, op, numeric_idx, box_with_array):
|
| 755 |
+
# test power calculations both ways, GH#14973
|
| 756 |
+
box = box_with_array
|
| 757 |
+
idx = numeric_idx
|
| 758 |
+
expected = Index(op(idx.values, 2.0))
|
| 759 |
+
|
| 760 |
+
idx = tm.box_expected(idx, box)
|
| 761 |
+
expected = tm.box_expected(expected, box)
|
| 762 |
+
|
| 763 |
+
result = op(idx, 2.0)
|
| 764 |
+
tm.assert_equal(result, expected)
|
| 765 |
+
|
| 766 |
+
def test_modulo(self, numeric_idx, box_with_array):
|
| 767 |
+
# GH#9244
|
| 768 |
+
box = box_with_array
|
| 769 |
+
idx = numeric_idx
|
| 770 |
+
expected = Index(idx.values % 2)
|
| 771 |
+
|
| 772 |
+
idx = tm.box_expected(idx, box)
|
| 773 |
+
expected = tm.box_expected(expected, box)
|
| 774 |
+
|
| 775 |
+
result = idx % 2
|
| 776 |
+
tm.assert_equal(result, expected)
|
| 777 |
+
|
| 778 |
+
def test_divmod_scalar(self, numeric_idx):
|
| 779 |
+
idx = numeric_idx
|
| 780 |
+
|
| 781 |
+
result = divmod(idx, 2)
|
| 782 |
+
with np.errstate(all="ignore"):
|
| 783 |
+
div, mod = divmod(idx.values, 2)
|
| 784 |
+
|
| 785 |
+
expected = Index(div), Index(mod)
|
| 786 |
+
for r, e in zip(result, expected):
|
| 787 |
+
tm.assert_index_equal(r, e)
|
| 788 |
+
|
| 789 |
+
def test_divmod_ndarray(self, numeric_idx):
|
| 790 |
+
idx = numeric_idx
|
| 791 |
+
other = np.ones(idx.values.shape, dtype=idx.values.dtype) * 2
|
| 792 |
+
|
| 793 |
+
result = divmod(idx, other)
|
| 794 |
+
with np.errstate(all="ignore"):
|
| 795 |
+
div, mod = divmod(idx.values, other)
|
| 796 |
+
|
| 797 |
+
expected = Index(div), Index(mod)
|
| 798 |
+
for r, e in zip(result, expected):
|
| 799 |
+
tm.assert_index_equal(r, e)
|
| 800 |
+
|
| 801 |
+
def test_divmod_series(self, numeric_idx):
|
| 802 |
+
idx = numeric_idx
|
| 803 |
+
other = np.ones(idx.values.shape, dtype=idx.values.dtype) * 2
|
| 804 |
+
|
| 805 |
+
result = divmod(idx, Series(other))
|
| 806 |
+
with np.errstate(all="ignore"):
|
| 807 |
+
div, mod = divmod(idx.values, other)
|
| 808 |
+
|
| 809 |
+
expected = Series(div), Series(mod)
|
| 810 |
+
for r, e in zip(result, expected):
|
| 811 |
+
tm.assert_series_equal(r, e)
|
| 812 |
+
|
| 813 |
+
@pytest.mark.parametrize("other", [np.nan, 7, -23, 2.718, -3.14, np.inf])
|
| 814 |
+
def test_ops_np_scalar(self, other):
|
| 815 |
+
vals = np.random.default_rng(2).standard_normal((5, 3))
|
| 816 |
+
f = lambda x: pd.DataFrame(
|
| 817 |
+
x, index=list("ABCDE"), columns=["jim", "joe", "jolie"]
|
| 818 |
+
)
|
| 819 |
+
|
| 820 |
+
df = f(vals)
|
| 821 |
+
|
| 822 |
+
tm.assert_frame_equal(df / np.array(other), f(vals / other))
|
| 823 |
+
tm.assert_frame_equal(np.array(other) * df, f(vals * other))
|
| 824 |
+
tm.assert_frame_equal(df + np.array(other), f(vals + other))
|
| 825 |
+
tm.assert_frame_equal(np.array(other) - df, f(other - vals))
|
| 826 |
+
|
| 827 |
+
# TODO: This came from series.test.test_operators, needs cleanup
|
| 828 |
+
def test_operators_frame(self):
|
| 829 |
+
# rpow does not work with DataFrame
|
| 830 |
+
ts = Series(
|
| 831 |
+
np.arange(10, dtype=np.float64),
|
| 832 |
+
index=date_range("2020-01-01", periods=10),
|
| 833 |
+
name="ts",
|
| 834 |
+
)
|
| 835 |
+
ts.name = "ts"
|
| 836 |
+
|
| 837 |
+
df = pd.DataFrame({"A": ts})
|
| 838 |
+
|
| 839 |
+
tm.assert_series_equal(ts + ts, ts + df["A"], check_names=False)
|
| 840 |
+
tm.assert_series_equal(ts**ts, ts ** df["A"], check_names=False)
|
| 841 |
+
tm.assert_series_equal(ts < ts, ts < df["A"], check_names=False)
|
| 842 |
+
tm.assert_series_equal(ts / ts, ts / df["A"], check_names=False)
|
| 843 |
+
|
| 844 |
+
# TODO: this came from tests.series.test_analytics, needs cleanup and
|
| 845 |
+
# de-duplication with test_modulo above
|
| 846 |
+
def test_modulo2(self):
|
| 847 |
+
with np.errstate(all="ignore"):
|
| 848 |
+
# GH#3590, modulo as ints
|
| 849 |
+
p = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
|
| 850 |
+
result = p["first"] % p["second"]
|
| 851 |
+
expected = Series(p["first"].values % p["second"].values, dtype="float64")
|
| 852 |
+
expected.iloc[0:3] = np.nan
|
| 853 |
+
tm.assert_series_equal(result, expected)
|
| 854 |
+
|
| 855 |
+
result = p["first"] % 0
|
| 856 |
+
expected = Series(np.nan, index=p.index, name="first")
|
| 857 |
+
tm.assert_series_equal(result, expected)
|
| 858 |
+
|
| 859 |
+
p = p.astype("float64")
|
| 860 |
+
result = p["first"] % p["second"]
|
| 861 |
+
expected = Series(p["first"].values % p["second"].values)
|
| 862 |
+
tm.assert_series_equal(result, expected)
|
| 863 |
+
|
| 864 |
+
p = p.astype("float64")
|
| 865 |
+
result = p["first"] % p["second"]
|
| 866 |
+
result2 = p["second"] % p["first"]
|
| 867 |
+
assert not result.equals(result2)
|
| 868 |
+
|
| 869 |
+
def test_modulo_zero_int(self):
|
| 870 |
+
# GH#9144
|
| 871 |
+
with np.errstate(all="ignore"):
|
| 872 |
+
s = Series([0, 1])
|
| 873 |
+
|
| 874 |
+
result = s % 0
|
| 875 |
+
expected = Series([np.nan, np.nan])
|
| 876 |
+
tm.assert_series_equal(result, expected)
|
| 877 |
+
|
| 878 |
+
result = 0 % s
|
| 879 |
+
expected = Series([np.nan, 0.0])
|
| 880 |
+
tm.assert_series_equal(result, expected)
|
| 881 |
+
|
| 882 |
+
|
| 883 |
+
class TestAdditionSubtraction:
|
| 884 |
+
# __add__, __sub__, __radd__, __rsub__, __iadd__, __isub__
|
| 885 |
+
# for non-timestamp/timedelta/period dtypes
|
| 886 |
+
|
| 887 |
+
@pytest.mark.parametrize(
|
| 888 |
+
"first, second, expected",
|
| 889 |
+
[
|
| 890 |
+
(
|
| 891 |
+
Series([1, 2, 3], index=list("ABC"), name="x"),
|
| 892 |
+
Series([2, 2, 2], index=list("ABD"), name="x"),
|
| 893 |
+
Series([3.0, 4.0, np.nan, np.nan], index=list("ABCD"), name="x"),
|
| 894 |
+
),
|
| 895 |
+
(
|
| 896 |
+
Series([1, 2, 3], index=list("ABC"), name="x"),
|
| 897 |
+
Series([2, 2, 2, 2], index=list("ABCD"), name="x"),
|
| 898 |
+
Series([3, 4, 5, np.nan], index=list("ABCD"), name="x"),
|
| 899 |
+
),
|
| 900 |
+
],
|
| 901 |
+
)
|
| 902 |
+
def test_add_series(self, first, second, expected):
|
| 903 |
+
# GH#1134
|
| 904 |
+
tm.assert_series_equal(first + second, expected)
|
| 905 |
+
tm.assert_series_equal(second + first, expected)
|
| 906 |
+
|
| 907 |
+
@pytest.mark.parametrize(
|
| 908 |
+
"first, second, expected",
|
| 909 |
+
[
|
| 910 |
+
(
|
| 911 |
+
pd.DataFrame({"x": [1, 2, 3]}, index=list("ABC")),
|
| 912 |
+
pd.DataFrame({"x": [2, 2, 2]}, index=list("ABD")),
|
| 913 |
+
pd.DataFrame({"x": [3.0, 4.0, np.nan, np.nan]}, index=list("ABCD")),
|
| 914 |
+
),
|
| 915 |
+
(
|
| 916 |
+
pd.DataFrame({"x": [1, 2, 3]}, index=list("ABC")),
|
| 917 |
+
pd.DataFrame({"x": [2, 2, 2, 2]}, index=list("ABCD")),
|
| 918 |
+
pd.DataFrame({"x": [3, 4, 5, np.nan]}, index=list("ABCD")),
|
| 919 |
+
),
|
| 920 |
+
],
|
| 921 |
+
)
|
| 922 |
+
def test_add_frames(self, first, second, expected):
|
| 923 |
+
# GH#1134
|
| 924 |
+
tm.assert_frame_equal(first + second, expected)
|
| 925 |
+
tm.assert_frame_equal(second + first, expected)
|
| 926 |
+
|
| 927 |
+
# TODO: This came from series.test.test_operators, needs cleanup
|
| 928 |
+
def test_series_frame_radd_bug(self, fixed_now_ts):
|
| 929 |
+
# GH#353
|
| 930 |
+
vals = Series([str(i) for i in range(5)])
|
| 931 |
+
result = "foo_" + vals
|
| 932 |
+
expected = vals.map(lambda x: "foo_" + x)
|
| 933 |
+
tm.assert_series_equal(result, expected)
|
| 934 |
+
|
| 935 |
+
frame = pd.DataFrame({"vals": vals})
|
| 936 |
+
result = "foo_" + frame
|
| 937 |
+
expected = pd.DataFrame({"vals": vals.map(lambda x: "foo_" + x)})
|
| 938 |
+
tm.assert_frame_equal(result, expected)
|
| 939 |
+
|
| 940 |
+
ts = Series(
|
| 941 |
+
np.arange(10, dtype=np.float64),
|
| 942 |
+
index=date_range("2020-01-01", periods=10),
|
| 943 |
+
name="ts",
|
| 944 |
+
)
|
| 945 |
+
|
| 946 |
+
# really raise this time
|
| 947 |
+
fix_now = fixed_now_ts.to_pydatetime()
|
| 948 |
+
msg = "|".join(
|
| 949 |
+
[
|
| 950 |
+
"unsupported operand type",
|
| 951 |
+
# wrong error message, see https://github.com/numpy/numpy/issues/18832
|
| 952 |
+
"Concatenation operation",
|
| 953 |
+
]
|
| 954 |
+
)
|
| 955 |
+
with pytest.raises(TypeError, match=msg):
|
| 956 |
+
fix_now + ts
|
| 957 |
+
|
| 958 |
+
with pytest.raises(TypeError, match=msg):
|
| 959 |
+
ts + fix_now
|
| 960 |
+
|
| 961 |
+
# TODO: This came from series.test.test_operators, needs cleanup
|
| 962 |
+
def test_datetime64_with_index(self):
|
| 963 |
+
# arithmetic integer ops with an index
|
| 964 |
+
ser = Series(np.random.default_rng(2).standard_normal(5))
|
| 965 |
+
expected = ser - ser.index.to_series()
|
| 966 |
+
result = ser - ser.index
|
| 967 |
+
tm.assert_series_equal(result, expected)
|
| 968 |
+
|
| 969 |
+
# GH#4629
|
| 970 |
+
# arithmetic datetime64 ops with an index
|
| 971 |
+
ser = Series(
|
| 972 |
+
date_range("20130101", periods=5),
|
| 973 |
+
index=date_range("20130101", periods=5),
|
| 974 |
+
)
|
| 975 |
+
expected = ser - ser.index.to_series()
|
| 976 |
+
result = ser - ser.index
|
| 977 |
+
tm.assert_series_equal(result, expected)
|
| 978 |
+
|
| 979 |
+
msg = "cannot subtract PeriodArray from DatetimeArray"
|
| 980 |
+
with pytest.raises(TypeError, match=msg):
|
| 981 |
+
# GH#18850
|
| 982 |
+
result = ser - ser.index.to_period()
|
| 983 |
+
|
| 984 |
+
df = pd.DataFrame(
|
| 985 |
+
np.random.default_rng(2).standard_normal((5, 2)),
|
| 986 |
+
index=date_range("20130101", periods=5),
|
| 987 |
+
)
|
| 988 |
+
df["date"] = pd.Timestamp("20130102")
|
| 989 |
+
df["expected"] = df["date"] - df.index.to_series()
|
| 990 |
+
df["result"] = df["date"] - df.index
|
| 991 |
+
tm.assert_series_equal(df["result"], df["expected"], check_names=False)
|
| 992 |
+
|
| 993 |
+
# TODO: taken from tests.frame.test_operators, needs cleanup
|
| 994 |
+
def test_frame_operators(self, float_frame):
|
| 995 |
+
frame = float_frame
|
| 996 |
+
|
| 997 |
+
garbage = np.random.default_rng(2).random(4)
|
| 998 |
+
colSeries = Series(garbage, index=np.array(frame.columns))
|
| 999 |
+
|
| 1000 |
+
idSum = frame + frame
|
| 1001 |
+
seriesSum = frame + colSeries
|
| 1002 |
+
|
| 1003 |
+
for col, series in idSum.items():
|
| 1004 |
+
for idx, val in series.items():
|
| 1005 |
+
origVal = frame[col][idx] * 2
|
| 1006 |
+
if not np.isnan(val):
|
| 1007 |
+
assert val == origVal
|
| 1008 |
+
else:
|
| 1009 |
+
assert np.isnan(origVal)
|
| 1010 |
+
|
| 1011 |
+
for col, series in seriesSum.items():
|
| 1012 |
+
for idx, val in series.items():
|
| 1013 |
+
origVal = frame[col][idx] + colSeries[col]
|
| 1014 |
+
if not np.isnan(val):
|
| 1015 |
+
assert val == origVal
|
| 1016 |
+
else:
|
| 1017 |
+
assert np.isnan(origVal)
|
| 1018 |
+
|
| 1019 |
+
def test_frame_operators_col_align(self, float_frame):
|
| 1020 |
+
frame2 = pd.DataFrame(float_frame, columns=["D", "C", "B", "A"])
|
| 1021 |
+
added = frame2 + frame2
|
| 1022 |
+
expected = frame2 * 2
|
| 1023 |
+
tm.assert_frame_equal(added, expected)
|
| 1024 |
+
|
| 1025 |
+
def test_frame_operators_none_to_nan(self):
|
| 1026 |
+
df = pd.DataFrame({"a": ["a", None, "b"]})
|
| 1027 |
+
tm.assert_frame_equal(df + df, pd.DataFrame({"a": ["aa", np.nan, "bb"]}))
|
| 1028 |
+
|
| 1029 |
+
@pytest.mark.parametrize("dtype", ("float", "int64"))
|
| 1030 |
+
def test_frame_operators_empty_like(self, dtype):
|
| 1031 |
+
# Test for issue #10181
|
| 1032 |
+
frames = [
|
| 1033 |
+
pd.DataFrame(dtype=dtype),
|
| 1034 |
+
pd.DataFrame(columns=["A"], dtype=dtype),
|
| 1035 |
+
pd.DataFrame(index=[0], dtype=dtype),
|
| 1036 |
+
]
|
| 1037 |
+
for df in frames:
|
| 1038 |
+
assert (df + df).equals(df)
|
| 1039 |
+
tm.assert_frame_equal(df + df, df)
|
| 1040 |
+
|
| 1041 |
+
@pytest.mark.parametrize(
|
| 1042 |
+
"func",
|
| 1043 |
+
[lambda x: x * 2, lambda x: x[::2], lambda x: 5],
|
| 1044 |
+
ids=["multiply", "slice", "constant"],
|
| 1045 |
+
)
|
| 1046 |
+
def test_series_operators_arithmetic(self, all_arithmetic_functions, func):
|
| 1047 |
+
op = all_arithmetic_functions
|
| 1048 |
+
series = Series(
|
| 1049 |
+
np.arange(10, dtype=np.float64),
|
| 1050 |
+
index=date_range("2020-01-01", periods=10),
|
| 1051 |
+
name="ts",
|
| 1052 |
+
)
|
| 1053 |
+
other = func(series)
|
| 1054 |
+
compare_op(series, other, op)
|
| 1055 |
+
|
| 1056 |
+
@pytest.mark.parametrize(
|
| 1057 |
+
"func", [lambda x: x + 1, lambda x: 5], ids=["add", "constant"]
|
| 1058 |
+
)
|
| 1059 |
+
def test_series_operators_compare(self, comparison_op, func):
|
| 1060 |
+
op = comparison_op
|
| 1061 |
+
series = Series(
|
| 1062 |
+
np.arange(10, dtype=np.float64),
|
| 1063 |
+
index=date_range("2020-01-01", periods=10),
|
| 1064 |
+
name="ts",
|
| 1065 |
+
)
|
| 1066 |
+
other = func(series)
|
| 1067 |
+
compare_op(series, other, op)
|
| 1068 |
+
|
| 1069 |
+
@pytest.mark.parametrize(
|
| 1070 |
+
"func",
|
| 1071 |
+
[lambda x: x * 2, lambda x: x[::2], lambda x: 5],
|
| 1072 |
+
ids=["multiply", "slice", "constant"],
|
| 1073 |
+
)
|
| 1074 |
+
def test_divmod(self, func):
|
| 1075 |
+
series = Series(
|
| 1076 |
+
np.arange(10, dtype=np.float64),
|
| 1077 |
+
index=date_range("2020-01-01", periods=10),
|
| 1078 |
+
name="ts",
|
| 1079 |
+
)
|
| 1080 |
+
other = func(series)
|
| 1081 |
+
results = divmod(series, other)
|
| 1082 |
+
if isinstance(other, abc.Iterable) and len(series) != len(other):
|
| 1083 |
+
# if the lengths don't match, this is the test where we use
|
| 1084 |
+
# `tser[::2]`. Pad every other value in `other_np` with nan.
|
| 1085 |
+
other_np = []
|
| 1086 |
+
for n in other:
|
| 1087 |
+
other_np.append(n)
|
| 1088 |
+
other_np.append(np.nan)
|
| 1089 |
+
else:
|
| 1090 |
+
other_np = other
|
| 1091 |
+
other_np = np.asarray(other_np)
|
| 1092 |
+
with np.errstate(all="ignore"):
|
| 1093 |
+
expecteds = divmod(series.values, np.asarray(other_np))
|
| 1094 |
+
|
| 1095 |
+
for result, expected in zip(results, expecteds):
|
| 1096 |
+
# check the values, name, and index separately
|
| 1097 |
+
tm.assert_almost_equal(np.asarray(result), expected)
|
| 1098 |
+
|
| 1099 |
+
assert result.name == series.name
|
| 1100 |
+
tm.assert_index_equal(result.index, series.index._with_freq(None))
|
| 1101 |
+
|
| 1102 |
+
def test_series_divmod_zero(self):
|
| 1103 |
+
# Check that divmod uses pandas convention for division by zero,
|
| 1104 |
+
# which does not match numpy.
|
| 1105 |
+
# pandas convention has
|
| 1106 |
+
# 1/0 == np.inf
|
| 1107 |
+
# -1/0 == -np.inf
|
| 1108 |
+
# 1/-0.0 == -np.inf
|
| 1109 |
+
# -1/-0.0 == np.inf
|
| 1110 |
+
tser = Series(
|
| 1111 |
+
np.arange(1, 11, dtype=np.float64),
|
| 1112 |
+
index=date_range("2020-01-01", periods=10),
|
| 1113 |
+
name="ts",
|
| 1114 |
+
)
|
| 1115 |
+
other = tser * 0
|
| 1116 |
+
|
| 1117 |
+
result = divmod(tser, other)
|
| 1118 |
+
exp1 = Series([np.inf] * len(tser), index=tser.index, name="ts")
|
| 1119 |
+
exp2 = Series([np.nan] * len(tser), index=tser.index, name="ts")
|
| 1120 |
+
tm.assert_series_equal(result[0], exp1)
|
| 1121 |
+
tm.assert_series_equal(result[1], exp2)
|
| 1122 |
+
|
| 1123 |
+
|
| 1124 |
+
class TestUFuncCompat:
|
| 1125 |
+
# TODO: add more dtypes
|
| 1126 |
+
@pytest.mark.parametrize("holder", [Index, RangeIndex, Series])
|
| 1127 |
+
@pytest.mark.parametrize("dtype", [np.int64, np.uint64, np.float64])
|
| 1128 |
+
def test_ufunc_compat(self, holder, dtype):
|
| 1129 |
+
box = Series if holder is Series else Index
|
| 1130 |
+
|
| 1131 |
+
if holder is RangeIndex:
|
| 1132 |
+
if dtype != np.int64:
|
| 1133 |
+
pytest.skip(f"dtype {dtype} not relevant for RangeIndex")
|
| 1134 |
+
idx = RangeIndex(0, 5, name="foo")
|
| 1135 |
+
else:
|
| 1136 |
+
idx = holder(np.arange(5, dtype=dtype), name="foo")
|
| 1137 |
+
result = np.sin(idx)
|
| 1138 |
+
expected = box(np.sin(np.arange(5, dtype=dtype)), name="foo")
|
| 1139 |
+
tm.assert_equal(result, expected)
|
| 1140 |
+
|
| 1141 |
+
# TODO: add more dtypes
|
| 1142 |
+
@pytest.mark.parametrize("holder", [Index, Series])
|
| 1143 |
+
@pytest.mark.parametrize("dtype", [np.int64, np.uint64, np.float64])
|
| 1144 |
+
def test_ufunc_coercions(self, holder, dtype):
|
| 1145 |
+
idx = holder([1, 2, 3, 4, 5], dtype=dtype, name="x")
|
| 1146 |
+
box = Series if holder is Series else Index
|
| 1147 |
+
|
| 1148 |
+
result = np.sqrt(idx)
|
| 1149 |
+
assert result.dtype == "f8" and isinstance(result, box)
|
| 1150 |
+
exp = Index(np.sqrt(np.array([1, 2, 3, 4, 5], dtype=np.float64)), name="x")
|
| 1151 |
+
exp = tm.box_expected(exp, box)
|
| 1152 |
+
tm.assert_equal(result, exp)
|
| 1153 |
+
|
| 1154 |
+
result = np.divide(idx, 2.0)
|
| 1155 |
+
assert result.dtype == "f8" and isinstance(result, box)
|
| 1156 |
+
exp = Index([0.5, 1.0, 1.5, 2.0, 2.5], dtype=np.float64, name="x")
|
| 1157 |
+
exp = tm.box_expected(exp, box)
|
| 1158 |
+
tm.assert_equal(result, exp)
|
| 1159 |
+
|
| 1160 |
+
# _evaluate_numeric_binop
|
| 1161 |
+
result = idx + 2.0
|
| 1162 |
+
assert result.dtype == "f8" and isinstance(result, box)
|
| 1163 |
+
exp = Index([3.0, 4.0, 5.0, 6.0, 7.0], dtype=np.float64, name="x")
|
| 1164 |
+
exp = tm.box_expected(exp, box)
|
| 1165 |
+
tm.assert_equal(result, exp)
|
| 1166 |
+
|
| 1167 |
+
result = idx - 2.0
|
| 1168 |
+
assert result.dtype == "f8" and isinstance(result, box)
|
| 1169 |
+
exp = Index([-1.0, 0.0, 1.0, 2.0, 3.0], dtype=np.float64, name="x")
|
| 1170 |
+
exp = tm.box_expected(exp, box)
|
| 1171 |
+
tm.assert_equal(result, exp)
|
| 1172 |
+
|
| 1173 |
+
result = idx * 1.0
|
| 1174 |
+
assert result.dtype == "f8" and isinstance(result, box)
|
| 1175 |
+
exp = Index([1.0, 2.0, 3.0, 4.0, 5.0], dtype=np.float64, name="x")
|
| 1176 |
+
exp = tm.box_expected(exp, box)
|
| 1177 |
+
tm.assert_equal(result, exp)
|
| 1178 |
+
|
| 1179 |
+
result = idx / 2.0
|
| 1180 |
+
assert result.dtype == "f8" and isinstance(result, box)
|
| 1181 |
+
exp = Index([0.5, 1.0, 1.5, 2.0, 2.5], dtype=np.float64, name="x")
|
| 1182 |
+
exp = tm.box_expected(exp, box)
|
| 1183 |
+
tm.assert_equal(result, exp)
|
| 1184 |
+
|
| 1185 |
+
# TODO: add more dtypes
|
| 1186 |
+
@pytest.mark.parametrize("holder", [Index, Series])
|
| 1187 |
+
@pytest.mark.parametrize("dtype", [np.int64, np.uint64, np.float64])
|
| 1188 |
+
def test_ufunc_multiple_return_values(self, holder, dtype):
|
| 1189 |
+
obj = holder([1, 2, 3], dtype=dtype, name="x")
|
| 1190 |
+
box = Series if holder is Series else Index
|
| 1191 |
+
|
| 1192 |
+
result = np.modf(obj)
|
| 1193 |
+
assert isinstance(result, tuple)
|
| 1194 |
+
exp1 = Index([0.0, 0.0, 0.0], dtype=np.float64, name="x")
|
| 1195 |
+
exp2 = Index([1.0, 2.0, 3.0], dtype=np.float64, name="x")
|
| 1196 |
+
tm.assert_equal(result[0], tm.box_expected(exp1, box))
|
| 1197 |
+
tm.assert_equal(result[1], tm.box_expected(exp2, box))
|
| 1198 |
+
|
| 1199 |
+
def test_ufunc_at(self):
|
| 1200 |
+
s = Series([0, 1, 2], index=[1, 2, 3], name="x")
|
| 1201 |
+
np.add.at(s, [0, 2], 10)
|
| 1202 |
+
expected = Series([10, 1, 12], index=[1, 2, 3], name="x")
|
| 1203 |
+
tm.assert_series_equal(s, expected)
|
| 1204 |
+
|
| 1205 |
+
|
| 1206 |
+
class TestObjectDtypeEquivalence:
|
| 1207 |
+
# Tests that arithmetic operations match operations executed elementwise
|
| 1208 |
+
|
| 1209 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 1210 |
+
def test_numarr_with_dtype_add_nan(self, dtype, box_with_array):
|
| 1211 |
+
box = box_with_array
|
| 1212 |
+
ser = Series([1, 2, 3], dtype=dtype)
|
| 1213 |
+
expected = Series([np.nan, np.nan, np.nan], dtype=dtype)
|
| 1214 |
+
|
| 1215 |
+
ser = tm.box_expected(ser, box)
|
| 1216 |
+
expected = tm.box_expected(expected, box)
|
| 1217 |
+
|
| 1218 |
+
result = np.nan + ser
|
| 1219 |
+
tm.assert_equal(result, expected)
|
| 1220 |
+
|
| 1221 |
+
result = ser + np.nan
|
| 1222 |
+
tm.assert_equal(result, expected)
|
| 1223 |
+
|
| 1224 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 1225 |
+
def test_numarr_with_dtype_add_int(self, dtype, box_with_array):
|
| 1226 |
+
box = box_with_array
|
| 1227 |
+
ser = Series([1, 2, 3], dtype=dtype)
|
| 1228 |
+
expected = Series([2, 3, 4], dtype=dtype)
|
| 1229 |
+
|
| 1230 |
+
ser = tm.box_expected(ser, box)
|
| 1231 |
+
expected = tm.box_expected(expected, box)
|
| 1232 |
+
|
| 1233 |
+
result = 1 + ser
|
| 1234 |
+
tm.assert_equal(result, expected)
|
| 1235 |
+
|
| 1236 |
+
result = ser + 1
|
| 1237 |
+
tm.assert_equal(result, expected)
|
| 1238 |
+
|
| 1239 |
+
# TODO: moved from tests.series.test_operators; needs cleanup
|
| 1240 |
+
@pytest.mark.parametrize(
|
| 1241 |
+
"op",
|
| 1242 |
+
[operator.add, operator.sub, operator.mul, operator.truediv, operator.floordiv],
|
| 1243 |
+
)
|
| 1244 |
+
def test_operators_reverse_object(self, op):
|
| 1245 |
+
# GH#56
|
| 1246 |
+
arr = Series(
|
| 1247 |
+
np.random.default_rng(2).standard_normal(10),
|
| 1248 |
+
index=np.arange(10),
|
| 1249 |
+
dtype=object,
|
| 1250 |
+
)
|
| 1251 |
+
|
| 1252 |
+
result = op(1.0, arr)
|
| 1253 |
+
expected = op(1.0, arr.astype(float))
|
| 1254 |
+
tm.assert_series_equal(result.astype(float), expected)
|
| 1255 |
+
|
| 1256 |
+
|
| 1257 |
+
class TestNumericArithmeticUnsorted:
|
| 1258 |
+
# Tests in this class have been moved from type-specific test modules
|
| 1259 |
+
# but not yet sorted, parametrized, and de-duplicated
|
| 1260 |
+
@pytest.mark.parametrize(
|
| 1261 |
+
"op",
|
| 1262 |
+
[
|
| 1263 |
+
operator.add,
|
| 1264 |
+
operator.sub,
|
| 1265 |
+
operator.mul,
|
| 1266 |
+
operator.floordiv,
|
| 1267 |
+
operator.truediv,
|
| 1268 |
+
],
|
| 1269 |
+
)
|
| 1270 |
+
@pytest.mark.parametrize(
|
| 1271 |
+
"idx1",
|
| 1272 |
+
[
|
| 1273 |
+
RangeIndex(0, 10, 1),
|
| 1274 |
+
RangeIndex(0, 20, 2),
|
| 1275 |
+
RangeIndex(-10, 10, 2),
|
| 1276 |
+
RangeIndex(5, -5, -1),
|
| 1277 |
+
],
|
| 1278 |
+
)
|
| 1279 |
+
@pytest.mark.parametrize(
|
| 1280 |
+
"idx2",
|
| 1281 |
+
[
|
| 1282 |
+
RangeIndex(0, 10, 1),
|
| 1283 |
+
RangeIndex(0, 20, 2),
|
| 1284 |
+
RangeIndex(-10, 10, 2),
|
| 1285 |
+
RangeIndex(5, -5, -1),
|
| 1286 |
+
],
|
| 1287 |
+
)
|
| 1288 |
+
def test_binops_index(self, op, idx1, idx2):
|
| 1289 |
+
idx1 = idx1._rename("foo")
|
| 1290 |
+
idx2 = idx2._rename("bar")
|
| 1291 |
+
result = op(idx1, idx2)
|
| 1292 |
+
expected = op(Index(idx1.to_numpy()), Index(idx2.to_numpy()))
|
| 1293 |
+
tm.assert_index_equal(result, expected, exact="equiv")
|
| 1294 |
+
|
| 1295 |
+
@pytest.mark.parametrize(
|
| 1296 |
+
"op",
|
| 1297 |
+
[
|
| 1298 |
+
operator.add,
|
| 1299 |
+
operator.sub,
|
| 1300 |
+
operator.mul,
|
| 1301 |
+
operator.floordiv,
|
| 1302 |
+
operator.truediv,
|
| 1303 |
+
],
|
| 1304 |
+
)
|
| 1305 |
+
@pytest.mark.parametrize(
|
| 1306 |
+
"idx",
|
| 1307 |
+
[
|
| 1308 |
+
RangeIndex(0, 10, 1),
|
| 1309 |
+
RangeIndex(0, 20, 2),
|
| 1310 |
+
RangeIndex(-10, 10, 2),
|
| 1311 |
+
RangeIndex(5, -5, -1),
|
| 1312 |
+
],
|
| 1313 |
+
)
|
| 1314 |
+
@pytest.mark.parametrize("scalar", [-1, 1, 2])
|
| 1315 |
+
def test_binops_index_scalar(self, op, idx, scalar):
|
| 1316 |
+
result = op(idx, scalar)
|
| 1317 |
+
expected = op(Index(idx.to_numpy()), scalar)
|
| 1318 |
+
tm.assert_index_equal(result, expected, exact="equiv")
|
| 1319 |
+
|
| 1320 |
+
@pytest.mark.parametrize("idx1", [RangeIndex(0, 10, 1), RangeIndex(0, 20, 2)])
|
| 1321 |
+
@pytest.mark.parametrize("idx2", [RangeIndex(0, 10, 1), RangeIndex(0, 20, 2)])
|
| 1322 |
+
def test_binops_index_pow(self, idx1, idx2):
|
| 1323 |
+
# numpy does not allow powers of negative integers so test separately
|
| 1324 |
+
# https://github.com/numpy/numpy/pull/8127
|
| 1325 |
+
idx1 = idx1._rename("foo")
|
| 1326 |
+
idx2 = idx2._rename("bar")
|
| 1327 |
+
result = pow(idx1, idx2)
|
| 1328 |
+
expected = pow(Index(idx1.to_numpy()), Index(idx2.to_numpy()))
|
| 1329 |
+
tm.assert_index_equal(result, expected, exact="equiv")
|
| 1330 |
+
|
| 1331 |
+
@pytest.mark.parametrize("idx", [RangeIndex(0, 10, 1), RangeIndex(0, 20, 2)])
|
| 1332 |
+
@pytest.mark.parametrize("scalar", [1, 2])
|
| 1333 |
+
def test_binops_index_scalar_pow(self, idx, scalar):
|
| 1334 |
+
# numpy does not allow powers of negative integers so test separately
|
| 1335 |
+
# https://github.com/numpy/numpy/pull/8127
|
| 1336 |
+
result = pow(idx, scalar)
|
| 1337 |
+
expected = pow(Index(idx.to_numpy()), scalar)
|
| 1338 |
+
tm.assert_index_equal(result, expected, exact="equiv")
|
| 1339 |
+
|
| 1340 |
+
# TODO: divmod?
|
| 1341 |
+
@pytest.mark.parametrize(
|
| 1342 |
+
"op",
|
| 1343 |
+
[
|
| 1344 |
+
operator.add,
|
| 1345 |
+
operator.sub,
|
| 1346 |
+
operator.mul,
|
| 1347 |
+
operator.floordiv,
|
| 1348 |
+
operator.truediv,
|
| 1349 |
+
operator.pow,
|
| 1350 |
+
operator.mod,
|
| 1351 |
+
],
|
| 1352 |
+
)
|
| 1353 |
+
def test_arithmetic_with_frame_or_series(self, op):
|
| 1354 |
+
# check that we return NotImplemented when operating with Series
|
| 1355 |
+
# or DataFrame
|
| 1356 |
+
index = RangeIndex(5)
|
| 1357 |
+
other = Series(np.random.default_rng(2).standard_normal(5))
|
| 1358 |
+
|
| 1359 |
+
expected = op(Series(index), other)
|
| 1360 |
+
result = op(index, other)
|
| 1361 |
+
tm.assert_series_equal(result, expected)
|
| 1362 |
+
|
| 1363 |
+
other = pd.DataFrame(np.random.default_rng(2).standard_normal((2, 5)))
|
| 1364 |
+
expected = op(pd.DataFrame([index, index]), other)
|
| 1365 |
+
result = op(index, other)
|
| 1366 |
+
tm.assert_frame_equal(result, expected)
|
| 1367 |
+
|
| 1368 |
+
def test_numeric_compat2(self):
|
| 1369 |
+
# validate that we are handling the RangeIndex overrides to numeric ops
|
| 1370 |
+
# and returning RangeIndex where possible
|
| 1371 |
+
|
| 1372 |
+
idx = RangeIndex(0, 10, 2)
|
| 1373 |
+
|
| 1374 |
+
result = idx * 2
|
| 1375 |
+
expected = RangeIndex(0, 20, 4)
|
| 1376 |
+
tm.assert_index_equal(result, expected, exact=True)
|
| 1377 |
+
|
| 1378 |
+
result = idx + 2
|
| 1379 |
+
expected = RangeIndex(2, 12, 2)
|
| 1380 |
+
tm.assert_index_equal(result, expected, exact=True)
|
| 1381 |
+
|
| 1382 |
+
result = idx - 2
|
| 1383 |
+
expected = RangeIndex(-2, 8, 2)
|
| 1384 |
+
tm.assert_index_equal(result, expected, exact=True)
|
| 1385 |
+
|
| 1386 |
+
result = idx / 2
|
| 1387 |
+
expected = RangeIndex(0, 5, 1).astype("float64")
|
| 1388 |
+
tm.assert_index_equal(result, expected, exact=True)
|
| 1389 |
+
|
| 1390 |
+
result = idx / 4
|
| 1391 |
+
expected = RangeIndex(0, 10, 2) / 4
|
| 1392 |
+
tm.assert_index_equal(result, expected, exact=True)
|
| 1393 |
+
|
| 1394 |
+
result = idx // 1
|
| 1395 |
+
expected = idx
|
| 1396 |
+
tm.assert_index_equal(result, expected, exact=True)
|
| 1397 |
+
|
| 1398 |
+
# __mul__
|
| 1399 |
+
result = idx * idx
|
| 1400 |
+
expected = Index(idx.values * idx.values)
|
| 1401 |
+
tm.assert_index_equal(result, expected, exact=True)
|
| 1402 |
+
|
| 1403 |
+
# __pow__
|
| 1404 |
+
idx = RangeIndex(0, 1000, 2)
|
| 1405 |
+
result = idx**2
|
| 1406 |
+
expected = Index(idx._values) ** 2
|
| 1407 |
+
tm.assert_index_equal(Index(result.values), expected, exact=True)
|
| 1408 |
+
|
| 1409 |
+
@pytest.mark.parametrize(
|
| 1410 |
+
"idx, div, expected",
|
| 1411 |
+
[
|
| 1412 |
+
# TODO: add more dtypes
|
| 1413 |
+
(RangeIndex(0, 1000, 2), 2, RangeIndex(0, 500, 1)),
|
| 1414 |
+
(RangeIndex(-99, -201, -3), -3, RangeIndex(33, 67, 1)),
|
| 1415 |
+
(
|
| 1416 |
+
RangeIndex(0, 1000, 1),
|
| 1417 |
+
2,
|
| 1418 |
+
Index(RangeIndex(0, 1000, 1)._values) // 2,
|
| 1419 |
+
),
|
| 1420 |
+
(
|
| 1421 |
+
RangeIndex(0, 100, 1),
|
| 1422 |
+
2.0,
|
| 1423 |
+
Index(RangeIndex(0, 100, 1)._values) // 2.0,
|
| 1424 |
+
),
|
| 1425 |
+
(RangeIndex(0), 50, RangeIndex(0)),
|
| 1426 |
+
(RangeIndex(2, 4, 2), 3, RangeIndex(0, 1, 1)),
|
| 1427 |
+
(RangeIndex(-5, -10, -6), 4, RangeIndex(-2, -1, 1)),
|
| 1428 |
+
(RangeIndex(-100, -200, 3), 2, RangeIndex(0)),
|
| 1429 |
+
],
|
| 1430 |
+
)
|
| 1431 |
+
def test_numeric_compat2_floordiv(self, idx, div, expected):
|
| 1432 |
+
# __floordiv__
|
| 1433 |
+
tm.assert_index_equal(idx // div, expected, exact=True)
|
| 1434 |
+
|
| 1435 |
+
@pytest.mark.parametrize("dtype", [np.int64, np.float64])
|
| 1436 |
+
@pytest.mark.parametrize("delta", [1, 0, -1])
|
| 1437 |
+
def test_addsub_arithmetic(self, dtype, delta):
|
| 1438 |
+
# GH#8142
|
| 1439 |
+
delta = dtype(delta)
|
| 1440 |
+
index = Index([10, 11, 12], dtype=dtype)
|
| 1441 |
+
result = index + delta
|
| 1442 |
+
expected = Index(index.values + delta, dtype=dtype)
|
| 1443 |
+
tm.assert_index_equal(result, expected)
|
| 1444 |
+
|
| 1445 |
+
# this subtraction used to fail
|
| 1446 |
+
result = index - delta
|
| 1447 |
+
expected = Index(index.values - delta, dtype=dtype)
|
| 1448 |
+
tm.assert_index_equal(result, expected)
|
| 1449 |
+
|
| 1450 |
+
tm.assert_index_equal(index + index, 2 * index)
|
| 1451 |
+
tm.assert_index_equal(index - index, 0 * index)
|
| 1452 |
+
assert not (index - index).empty
|
| 1453 |
+
|
| 1454 |
+
def test_pow_nan_with_zero(self, box_with_array):
|
| 1455 |
+
left = Index([np.nan, np.nan, np.nan])
|
| 1456 |
+
right = Index([0, 0, 0])
|
| 1457 |
+
expected = Index([1.0, 1.0, 1.0])
|
| 1458 |
+
|
| 1459 |
+
left = tm.box_expected(left, box_with_array)
|
| 1460 |
+
right = tm.box_expected(right, box_with_array)
|
| 1461 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1462 |
+
|
| 1463 |
+
result = left**right
|
| 1464 |
+
tm.assert_equal(result, expected)
|
| 1465 |
+
|
| 1466 |
+
|
| 1467 |
+
def test_fill_value_inf_masking():
|
| 1468 |
+
# GH #27464 make sure we mask 0/1 with Inf and not NaN
|
| 1469 |
+
df = pd.DataFrame({"A": [0, 1, 2], "B": [1.1, None, 1.1]})
|
| 1470 |
+
|
| 1471 |
+
other = pd.DataFrame({"A": [1.1, 1.2, 1.3]}, index=[0, 2, 3])
|
| 1472 |
+
|
| 1473 |
+
result = df.rfloordiv(other, fill_value=1)
|
| 1474 |
+
|
| 1475 |
+
expected = pd.DataFrame(
|
| 1476 |
+
{"A": [np.inf, 1.0, 0.0, 1.0], "B": [0.0, np.nan, 0.0, np.nan]}
|
| 1477 |
+
)
|
| 1478 |
+
tm.assert_frame_equal(result, expected)
|
| 1479 |
+
|
| 1480 |
+
|
| 1481 |
+
def test_dataframe_div_silenced():
|
| 1482 |
+
# GH#26793
|
| 1483 |
+
pdf1 = pd.DataFrame(
|
| 1484 |
+
{
|
| 1485 |
+
"A": np.arange(10),
|
| 1486 |
+
"B": [np.nan, 1, 2, 3, 4] * 2,
|
| 1487 |
+
"C": [np.nan] * 10,
|
| 1488 |
+
"D": np.arange(10),
|
| 1489 |
+
},
|
| 1490 |
+
index=list("abcdefghij"),
|
| 1491 |
+
columns=list("ABCD"),
|
| 1492 |
+
)
|
| 1493 |
+
pdf2 = pd.DataFrame(
|
| 1494 |
+
np.random.default_rng(2).standard_normal((10, 4)),
|
| 1495 |
+
index=list("abcdefghjk"),
|
| 1496 |
+
columns=list("ABCX"),
|
| 1497 |
+
)
|
| 1498 |
+
with tm.assert_produces_warning(None):
|
| 1499 |
+
pdf1.div(pdf2, fill_value=0)
|
| 1500 |
+
|
| 1501 |
+
|
| 1502 |
+
@pytest.mark.parametrize(
|
| 1503 |
+
"data, expected_data",
|
| 1504 |
+
[([0, 1, 2], [0, 2, 4])],
|
| 1505 |
+
)
|
| 1506 |
+
def test_integer_array_add_list_like(
|
| 1507 |
+
box_pandas_1d_array, box_1d_array, data, expected_data
|
| 1508 |
+
):
|
| 1509 |
+
# GH22606 Verify operators with IntegerArray and list-likes
|
| 1510 |
+
arr = array(data, dtype="Int64")
|
| 1511 |
+
container = box_pandas_1d_array(arr)
|
| 1512 |
+
left = container + box_1d_array(data)
|
| 1513 |
+
right = box_1d_array(data) + container
|
| 1514 |
+
|
| 1515 |
+
if Series in [box_1d_array, box_pandas_1d_array]:
|
| 1516 |
+
cls = Series
|
| 1517 |
+
elif Index in [box_1d_array, box_pandas_1d_array]:
|
| 1518 |
+
cls = Index
|
| 1519 |
+
else:
|
| 1520 |
+
cls = array
|
| 1521 |
+
|
| 1522 |
+
expected = cls(expected_data, dtype="Int64")
|
| 1523 |
+
|
| 1524 |
+
tm.assert_equal(left, expected)
|
| 1525 |
+
tm.assert_equal(right, expected)
|
| 1526 |
+
|
| 1527 |
+
|
| 1528 |
+
def test_sub_multiindex_swapped_levels():
|
| 1529 |
+
# GH 9952
|
| 1530 |
+
df = pd.DataFrame(
|
| 1531 |
+
{"a": np.random.default_rng(2).standard_normal(6)},
|
| 1532 |
+
index=pd.MultiIndex.from_product(
|
| 1533 |
+
[["a", "b"], [0, 1, 2]], names=["levA", "levB"]
|
| 1534 |
+
),
|
| 1535 |
+
)
|
| 1536 |
+
df2 = df.copy()
|
| 1537 |
+
df2.index = df2.index.swaplevel(0, 1)
|
| 1538 |
+
result = df - df2
|
| 1539 |
+
expected = pd.DataFrame([0.0] * 6, columns=["a"], index=df.index)
|
| 1540 |
+
tm.assert_frame_equal(result, expected)
|
| 1541 |
+
|
| 1542 |
+
|
| 1543 |
+
@pytest.mark.parametrize("power", [1, 2, 5])
|
| 1544 |
+
@pytest.mark.parametrize("string_size", [0, 1, 2, 5])
|
| 1545 |
+
def test_empty_str_comparison(power, string_size):
|
| 1546 |
+
# GH 37348
|
| 1547 |
+
a = np.array(range(10**power))
|
| 1548 |
+
right = pd.DataFrame(a, dtype=np.int64)
|
| 1549 |
+
left = " " * string_size
|
| 1550 |
+
|
| 1551 |
+
result = right == left
|
| 1552 |
+
expected = pd.DataFrame(np.zeros(right.shape, dtype=bool))
|
| 1553 |
+
tm.assert_frame_equal(result, expected)
|
| 1554 |
+
|
| 1555 |
+
|
| 1556 |
+
def test_series_add_sub_with_UInt64():
|
| 1557 |
+
# GH 22023
|
| 1558 |
+
series1 = Series([1, 2, 3])
|
| 1559 |
+
series2 = Series([2, 1, 3], dtype="UInt64")
|
| 1560 |
+
|
| 1561 |
+
result = series1 + series2
|
| 1562 |
+
expected = Series([3, 3, 6], dtype="Float64")
|
| 1563 |
+
tm.assert_series_equal(result, expected)
|
| 1564 |
+
|
| 1565 |
+
result = series1 - series2
|
| 1566 |
+
expected = Series([-1, 1, 0], dtype="Float64")
|
| 1567 |
+
tm.assert_series_equal(result, expected)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_object.py
ADDED
|
@@ -0,0 +1,420 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Arithmetic tests for DataFrame/Series/Index/Array classes that should
|
| 2 |
+
# behave identically.
|
| 3 |
+
# Specifically for object dtype
|
| 4 |
+
import datetime
|
| 5 |
+
from decimal import Decimal
|
| 6 |
+
import operator
|
| 7 |
+
|
| 8 |
+
import numpy as np
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from pandas._config import using_pyarrow_string_dtype
|
| 12 |
+
|
| 13 |
+
import pandas.util._test_decorators as td
|
| 14 |
+
|
| 15 |
+
import pandas as pd
|
| 16 |
+
from pandas import (
|
| 17 |
+
Series,
|
| 18 |
+
Timestamp,
|
| 19 |
+
option_context,
|
| 20 |
+
)
|
| 21 |
+
import pandas._testing as tm
|
| 22 |
+
from pandas.core import ops
|
| 23 |
+
|
| 24 |
+
# ------------------------------------------------------------------
|
| 25 |
+
# Comparisons
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class TestObjectComparisons:
|
| 29 |
+
def test_comparison_object_numeric_nas(self, comparison_op):
|
| 30 |
+
ser = Series(np.random.default_rng(2).standard_normal(10), dtype=object)
|
| 31 |
+
shifted = ser.shift(2)
|
| 32 |
+
|
| 33 |
+
func = comparison_op
|
| 34 |
+
|
| 35 |
+
result = func(ser, shifted)
|
| 36 |
+
expected = func(ser.astype(float), shifted.astype(float))
|
| 37 |
+
tm.assert_series_equal(result, expected)
|
| 38 |
+
|
| 39 |
+
@pytest.mark.parametrize(
|
| 40 |
+
"infer_string", [False, pytest.param(True, marks=td.skip_if_no("pyarrow"))]
|
| 41 |
+
)
|
| 42 |
+
def test_object_comparisons(self, infer_string):
|
| 43 |
+
with option_context("future.infer_string", infer_string):
|
| 44 |
+
ser = Series(["a", "b", np.nan, "c", "a"])
|
| 45 |
+
|
| 46 |
+
result = ser == "a"
|
| 47 |
+
expected = Series([True, False, False, False, True])
|
| 48 |
+
tm.assert_series_equal(result, expected)
|
| 49 |
+
|
| 50 |
+
result = ser < "a"
|
| 51 |
+
expected = Series([False, False, False, False, False])
|
| 52 |
+
tm.assert_series_equal(result, expected)
|
| 53 |
+
|
| 54 |
+
result = ser != "a"
|
| 55 |
+
expected = -(ser == "a")
|
| 56 |
+
tm.assert_series_equal(result, expected)
|
| 57 |
+
|
| 58 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 59 |
+
def test_more_na_comparisons(self, dtype):
|
| 60 |
+
left = Series(["a", np.nan, "c"], dtype=dtype)
|
| 61 |
+
right = Series(["a", np.nan, "d"], dtype=dtype)
|
| 62 |
+
|
| 63 |
+
result = left == right
|
| 64 |
+
expected = Series([True, False, False])
|
| 65 |
+
tm.assert_series_equal(result, expected)
|
| 66 |
+
|
| 67 |
+
result = left != right
|
| 68 |
+
expected = Series([False, True, True])
|
| 69 |
+
tm.assert_series_equal(result, expected)
|
| 70 |
+
|
| 71 |
+
result = left == np.nan
|
| 72 |
+
expected = Series([False, False, False])
|
| 73 |
+
tm.assert_series_equal(result, expected)
|
| 74 |
+
|
| 75 |
+
result = left != np.nan
|
| 76 |
+
expected = Series([True, True, True])
|
| 77 |
+
tm.assert_series_equal(result, expected)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
# ------------------------------------------------------------------
|
| 81 |
+
# Arithmetic
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class TestArithmetic:
|
| 85 |
+
def test_add_period_to_array_of_offset(self):
|
| 86 |
+
# GH#50162
|
| 87 |
+
per = pd.Period("2012-1-1", freq="D")
|
| 88 |
+
pi = pd.period_range("2012-1-1", periods=10, freq="D")
|
| 89 |
+
idx = per - pi
|
| 90 |
+
|
| 91 |
+
expected = pd.Index([x + per for x in idx], dtype=object)
|
| 92 |
+
result = idx + per
|
| 93 |
+
tm.assert_index_equal(result, expected)
|
| 94 |
+
|
| 95 |
+
result = per + idx
|
| 96 |
+
tm.assert_index_equal(result, expected)
|
| 97 |
+
|
| 98 |
+
# TODO: parametrize
|
| 99 |
+
def test_pow_ops_object(self):
|
| 100 |
+
# GH#22922
|
| 101 |
+
# pow is weird with masking & 1, so testing here
|
| 102 |
+
a = Series([1, np.nan, 1, np.nan], dtype=object)
|
| 103 |
+
b = Series([1, np.nan, np.nan, 1], dtype=object)
|
| 104 |
+
result = a**b
|
| 105 |
+
expected = Series(a.values**b.values, dtype=object)
|
| 106 |
+
tm.assert_series_equal(result, expected)
|
| 107 |
+
|
| 108 |
+
result = b**a
|
| 109 |
+
expected = Series(b.values**a.values, dtype=object)
|
| 110 |
+
|
| 111 |
+
tm.assert_series_equal(result, expected)
|
| 112 |
+
|
| 113 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 114 |
+
@pytest.mark.parametrize("other", ["category", "Int64"])
|
| 115 |
+
def test_add_extension_scalar(self, other, box_with_array, op):
|
| 116 |
+
# GH#22378
|
| 117 |
+
# Check that scalars satisfying is_extension_array_dtype(obj)
|
| 118 |
+
# do not incorrectly try to dispatch to an ExtensionArray operation
|
| 119 |
+
|
| 120 |
+
arr = Series(["a", "b", "c"])
|
| 121 |
+
expected = Series([op(x, other) for x in arr])
|
| 122 |
+
|
| 123 |
+
arr = tm.box_expected(arr, box_with_array)
|
| 124 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 125 |
+
|
| 126 |
+
result = op(arr, other)
|
| 127 |
+
tm.assert_equal(result, expected)
|
| 128 |
+
|
| 129 |
+
def test_objarr_add_str(self, box_with_array):
|
| 130 |
+
ser = Series(["x", np.nan, "x"])
|
| 131 |
+
expected = Series(["xa", np.nan, "xa"])
|
| 132 |
+
|
| 133 |
+
ser = tm.box_expected(ser, box_with_array)
|
| 134 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 135 |
+
|
| 136 |
+
result = ser + "a"
|
| 137 |
+
tm.assert_equal(result, expected)
|
| 138 |
+
|
| 139 |
+
def test_objarr_radd_str(self, box_with_array):
|
| 140 |
+
ser = Series(["x", np.nan, "x"])
|
| 141 |
+
expected = Series(["ax", np.nan, "ax"])
|
| 142 |
+
|
| 143 |
+
ser = tm.box_expected(ser, box_with_array)
|
| 144 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 145 |
+
|
| 146 |
+
result = "a" + ser
|
| 147 |
+
tm.assert_equal(result, expected)
|
| 148 |
+
|
| 149 |
+
@pytest.mark.parametrize(
|
| 150 |
+
"data",
|
| 151 |
+
[
|
| 152 |
+
[1, 2, 3],
|
| 153 |
+
[1.1, 2.2, 3.3],
|
| 154 |
+
[Timestamp("2011-01-01"), Timestamp("2011-01-02"), pd.NaT],
|
| 155 |
+
["x", "y", 1],
|
| 156 |
+
],
|
| 157 |
+
)
|
| 158 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 159 |
+
def test_objarr_radd_str_invalid(self, dtype, data, box_with_array):
|
| 160 |
+
ser = Series(data, dtype=dtype)
|
| 161 |
+
|
| 162 |
+
ser = tm.box_expected(ser, box_with_array)
|
| 163 |
+
msg = "|".join(
|
| 164 |
+
[
|
| 165 |
+
"can only concatenate str",
|
| 166 |
+
"did not contain a loop with signature matching types",
|
| 167 |
+
"unsupported operand type",
|
| 168 |
+
"must be str",
|
| 169 |
+
]
|
| 170 |
+
)
|
| 171 |
+
with pytest.raises(TypeError, match=msg):
|
| 172 |
+
"foo_" + ser
|
| 173 |
+
|
| 174 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd, operator.sub, ops.rsub])
|
| 175 |
+
def test_objarr_add_invalid(self, op, box_with_array):
|
| 176 |
+
# invalid ops
|
| 177 |
+
box = box_with_array
|
| 178 |
+
|
| 179 |
+
obj_ser = Series(list("abc"), dtype=object, name="objects")
|
| 180 |
+
|
| 181 |
+
obj_ser = tm.box_expected(obj_ser, box)
|
| 182 |
+
msg = "|".join(
|
| 183 |
+
[
|
| 184 |
+
"can only concatenate str",
|
| 185 |
+
"unsupported operand type",
|
| 186 |
+
"must be str",
|
| 187 |
+
"has no kernel",
|
| 188 |
+
]
|
| 189 |
+
)
|
| 190 |
+
with pytest.raises(Exception, match=msg):
|
| 191 |
+
op(obj_ser, 1)
|
| 192 |
+
with pytest.raises(Exception, match=msg):
|
| 193 |
+
op(obj_ser, np.array(1, dtype=np.int64))
|
| 194 |
+
|
| 195 |
+
# TODO: Moved from tests.series.test_operators; needs cleanup
|
| 196 |
+
def test_operators_na_handling(self):
|
| 197 |
+
ser = Series(["foo", "bar", "baz", np.nan])
|
| 198 |
+
result = "prefix_" + ser
|
| 199 |
+
expected = Series(["prefix_foo", "prefix_bar", "prefix_baz", np.nan])
|
| 200 |
+
tm.assert_series_equal(result, expected)
|
| 201 |
+
|
| 202 |
+
result = ser + "_suffix"
|
| 203 |
+
expected = Series(["foo_suffix", "bar_suffix", "baz_suffix", np.nan])
|
| 204 |
+
tm.assert_series_equal(result, expected)
|
| 205 |
+
|
| 206 |
+
# TODO: parametrize over box
|
| 207 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 208 |
+
def test_series_with_dtype_radd_timedelta(self, dtype):
|
| 209 |
+
# note this test is _not_ aimed at timedelta64-dtyped Series
|
| 210 |
+
# as of 2.0 we retain object dtype when ser.dtype == object
|
| 211 |
+
ser = Series(
|
| 212 |
+
[pd.Timedelta("1 days"), pd.Timedelta("2 days"), pd.Timedelta("3 days")],
|
| 213 |
+
dtype=dtype,
|
| 214 |
+
)
|
| 215 |
+
expected = Series(
|
| 216 |
+
[pd.Timedelta("4 days"), pd.Timedelta("5 days"), pd.Timedelta("6 days")],
|
| 217 |
+
dtype=dtype,
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
result = pd.Timedelta("3 days") + ser
|
| 221 |
+
tm.assert_series_equal(result, expected)
|
| 222 |
+
|
| 223 |
+
result = ser + pd.Timedelta("3 days")
|
| 224 |
+
tm.assert_series_equal(result, expected)
|
| 225 |
+
|
| 226 |
+
# TODO: cleanup & parametrize over box
|
| 227 |
+
def test_mixed_timezone_series_ops_object(self):
|
| 228 |
+
# GH#13043
|
| 229 |
+
ser = Series(
|
| 230 |
+
[
|
| 231 |
+
Timestamp("2015-01-01", tz="US/Eastern"),
|
| 232 |
+
Timestamp("2015-01-01", tz="Asia/Tokyo"),
|
| 233 |
+
],
|
| 234 |
+
name="xxx",
|
| 235 |
+
)
|
| 236 |
+
assert ser.dtype == object
|
| 237 |
+
|
| 238 |
+
exp = Series(
|
| 239 |
+
[
|
| 240 |
+
Timestamp("2015-01-02", tz="US/Eastern"),
|
| 241 |
+
Timestamp("2015-01-02", tz="Asia/Tokyo"),
|
| 242 |
+
],
|
| 243 |
+
name="xxx",
|
| 244 |
+
)
|
| 245 |
+
tm.assert_series_equal(ser + pd.Timedelta("1 days"), exp)
|
| 246 |
+
tm.assert_series_equal(pd.Timedelta("1 days") + ser, exp)
|
| 247 |
+
|
| 248 |
+
# object series & object series
|
| 249 |
+
ser2 = Series(
|
| 250 |
+
[
|
| 251 |
+
Timestamp("2015-01-03", tz="US/Eastern"),
|
| 252 |
+
Timestamp("2015-01-05", tz="Asia/Tokyo"),
|
| 253 |
+
],
|
| 254 |
+
name="xxx",
|
| 255 |
+
)
|
| 256 |
+
assert ser2.dtype == object
|
| 257 |
+
exp = Series(
|
| 258 |
+
[pd.Timedelta("2 days"), pd.Timedelta("4 days")], name="xxx", dtype=object
|
| 259 |
+
)
|
| 260 |
+
tm.assert_series_equal(ser2 - ser, exp)
|
| 261 |
+
tm.assert_series_equal(ser - ser2, -exp)
|
| 262 |
+
|
| 263 |
+
ser = Series(
|
| 264 |
+
[pd.Timedelta("01:00:00"), pd.Timedelta("02:00:00")],
|
| 265 |
+
name="xxx",
|
| 266 |
+
dtype=object,
|
| 267 |
+
)
|
| 268 |
+
assert ser.dtype == object
|
| 269 |
+
|
| 270 |
+
exp = Series(
|
| 271 |
+
[pd.Timedelta("01:30:00"), pd.Timedelta("02:30:00")],
|
| 272 |
+
name="xxx",
|
| 273 |
+
dtype=object,
|
| 274 |
+
)
|
| 275 |
+
tm.assert_series_equal(ser + pd.Timedelta("00:30:00"), exp)
|
| 276 |
+
tm.assert_series_equal(pd.Timedelta("00:30:00") + ser, exp)
|
| 277 |
+
|
| 278 |
+
# TODO: cleanup & parametrize over box
|
| 279 |
+
def test_iadd_preserves_name(self):
|
| 280 |
+
# GH#17067, GH#19723 __iadd__ and __isub__ should preserve index name
|
| 281 |
+
ser = Series([1, 2, 3])
|
| 282 |
+
ser.index.name = "foo"
|
| 283 |
+
|
| 284 |
+
ser.index += 1
|
| 285 |
+
assert ser.index.name == "foo"
|
| 286 |
+
|
| 287 |
+
ser.index -= 1
|
| 288 |
+
assert ser.index.name == "foo"
|
| 289 |
+
|
| 290 |
+
def test_add_string(self):
|
| 291 |
+
# from bug report
|
| 292 |
+
index = pd.Index(["a", "b", "c"])
|
| 293 |
+
index2 = index + "foo"
|
| 294 |
+
|
| 295 |
+
assert "a" not in index2
|
| 296 |
+
assert "afoo" in index2
|
| 297 |
+
|
| 298 |
+
def test_iadd_string(self):
|
| 299 |
+
index = pd.Index(["a", "b", "c"])
|
| 300 |
+
# doesn't fail test unless there is a check before `+=`
|
| 301 |
+
assert "a" in index
|
| 302 |
+
|
| 303 |
+
index += "_x"
|
| 304 |
+
assert "a_x" in index
|
| 305 |
+
|
| 306 |
+
@pytest.mark.xfail(using_pyarrow_string_dtype(), reason="add doesn't work")
|
| 307 |
+
def test_add(self):
|
| 308 |
+
index = pd.Index([str(i) for i in range(10)])
|
| 309 |
+
expected = pd.Index(index.values * 2)
|
| 310 |
+
tm.assert_index_equal(index + index, expected)
|
| 311 |
+
tm.assert_index_equal(index + index.tolist(), expected)
|
| 312 |
+
tm.assert_index_equal(index.tolist() + index, expected)
|
| 313 |
+
|
| 314 |
+
# test add and radd
|
| 315 |
+
index = pd.Index(list("abc"))
|
| 316 |
+
expected = pd.Index(["a1", "b1", "c1"])
|
| 317 |
+
tm.assert_index_equal(index + "1", expected)
|
| 318 |
+
expected = pd.Index(["1a", "1b", "1c"])
|
| 319 |
+
tm.assert_index_equal("1" + index, expected)
|
| 320 |
+
|
| 321 |
+
def test_sub_fail(self, using_infer_string):
|
| 322 |
+
index = pd.Index([str(i) for i in range(10)])
|
| 323 |
+
|
| 324 |
+
if using_infer_string:
|
| 325 |
+
import pyarrow as pa
|
| 326 |
+
|
| 327 |
+
err = pa.lib.ArrowNotImplementedError
|
| 328 |
+
msg = "has no kernel"
|
| 329 |
+
else:
|
| 330 |
+
err = TypeError
|
| 331 |
+
msg = "unsupported operand type|Cannot broadcast"
|
| 332 |
+
with pytest.raises(err, match=msg):
|
| 333 |
+
index - "a"
|
| 334 |
+
with pytest.raises(err, match=msg):
|
| 335 |
+
index - index
|
| 336 |
+
with pytest.raises(err, match=msg):
|
| 337 |
+
index - index.tolist()
|
| 338 |
+
with pytest.raises(err, match=msg):
|
| 339 |
+
index.tolist() - index
|
| 340 |
+
|
| 341 |
+
def test_sub_object(self):
|
| 342 |
+
# GH#19369
|
| 343 |
+
index = pd.Index([Decimal(1), Decimal(2)])
|
| 344 |
+
expected = pd.Index([Decimal(0), Decimal(1)])
|
| 345 |
+
|
| 346 |
+
result = index - Decimal(1)
|
| 347 |
+
tm.assert_index_equal(result, expected)
|
| 348 |
+
|
| 349 |
+
result = index - pd.Index([Decimal(1), Decimal(1)])
|
| 350 |
+
tm.assert_index_equal(result, expected)
|
| 351 |
+
|
| 352 |
+
msg = "unsupported operand type"
|
| 353 |
+
with pytest.raises(TypeError, match=msg):
|
| 354 |
+
index - "foo"
|
| 355 |
+
|
| 356 |
+
with pytest.raises(TypeError, match=msg):
|
| 357 |
+
index - np.array([2, "foo"], dtype=object)
|
| 358 |
+
|
| 359 |
+
def test_rsub_object(self, fixed_now_ts):
|
| 360 |
+
# GH#19369
|
| 361 |
+
index = pd.Index([Decimal(1), Decimal(2)])
|
| 362 |
+
expected = pd.Index([Decimal(1), Decimal(0)])
|
| 363 |
+
|
| 364 |
+
result = Decimal(2) - index
|
| 365 |
+
tm.assert_index_equal(result, expected)
|
| 366 |
+
|
| 367 |
+
result = np.array([Decimal(2), Decimal(2)]) - index
|
| 368 |
+
tm.assert_index_equal(result, expected)
|
| 369 |
+
|
| 370 |
+
msg = "unsupported operand type"
|
| 371 |
+
with pytest.raises(TypeError, match=msg):
|
| 372 |
+
"foo" - index
|
| 373 |
+
|
| 374 |
+
with pytest.raises(TypeError, match=msg):
|
| 375 |
+
np.array([True, fixed_now_ts]) - index
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
class MyIndex(pd.Index):
|
| 379 |
+
# Simple index subclass that tracks ops calls.
|
| 380 |
+
|
| 381 |
+
_calls: int
|
| 382 |
+
|
| 383 |
+
@classmethod
|
| 384 |
+
def _simple_new(cls, values, name=None, dtype=None):
|
| 385 |
+
result = object.__new__(cls)
|
| 386 |
+
result._data = values
|
| 387 |
+
result._name = name
|
| 388 |
+
result._calls = 0
|
| 389 |
+
result._reset_identity()
|
| 390 |
+
|
| 391 |
+
return result
|
| 392 |
+
|
| 393 |
+
def __add__(self, other):
|
| 394 |
+
self._calls += 1
|
| 395 |
+
return self._simple_new(self._data)
|
| 396 |
+
|
| 397 |
+
def __radd__(self, other):
|
| 398 |
+
return self.__add__(other)
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
@pytest.mark.parametrize(
|
| 402 |
+
"other",
|
| 403 |
+
[
|
| 404 |
+
[datetime.timedelta(1), datetime.timedelta(2)],
|
| 405 |
+
[datetime.datetime(2000, 1, 1), datetime.datetime(2000, 1, 2)],
|
| 406 |
+
[pd.Period("2000"), pd.Period("2001")],
|
| 407 |
+
["a", "b"],
|
| 408 |
+
],
|
| 409 |
+
ids=["timedelta", "datetime", "period", "object"],
|
| 410 |
+
)
|
| 411 |
+
def test_index_ops_defer_to_unknown_subclasses(other):
|
| 412 |
+
# https://github.com/pandas-dev/pandas/issues/31109
|
| 413 |
+
values = np.array(
|
| 414 |
+
[datetime.date(2000, 1, 1), datetime.date(2000, 1, 2)], dtype=object
|
| 415 |
+
)
|
| 416 |
+
a = MyIndex._simple_new(values)
|
| 417 |
+
other = pd.Index(other)
|
| 418 |
+
result = other + a
|
| 419 |
+
assert isinstance(result, MyIndex)
|
| 420 |
+
assert a._calls == 1
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_period.py
ADDED
|
@@ -0,0 +1,1675 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Arithmetic tests for DataFrame/Series/Index/Array classes that should
|
| 2 |
+
# behave identically.
|
| 3 |
+
# Specifically for Period dtype
|
| 4 |
+
import operator
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from pandas._libs.tslibs import (
|
| 10 |
+
IncompatibleFrequency,
|
| 11 |
+
Period,
|
| 12 |
+
Timestamp,
|
| 13 |
+
to_offset,
|
| 14 |
+
)
|
| 15 |
+
from pandas.errors import PerformanceWarning
|
| 16 |
+
|
| 17 |
+
import pandas as pd
|
| 18 |
+
from pandas import (
|
| 19 |
+
PeriodIndex,
|
| 20 |
+
Series,
|
| 21 |
+
Timedelta,
|
| 22 |
+
TimedeltaIndex,
|
| 23 |
+
period_range,
|
| 24 |
+
)
|
| 25 |
+
import pandas._testing as tm
|
| 26 |
+
from pandas.core import ops
|
| 27 |
+
from pandas.core.arrays import TimedeltaArray
|
| 28 |
+
from pandas.tests.arithmetic.common import (
|
| 29 |
+
assert_invalid_addsub_type,
|
| 30 |
+
assert_invalid_comparison,
|
| 31 |
+
get_upcast_box,
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
_common_mismatch = [
|
| 35 |
+
pd.offsets.YearBegin(2),
|
| 36 |
+
pd.offsets.MonthBegin(1),
|
| 37 |
+
pd.offsets.Minute(),
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@pytest.fixture(
|
| 42 |
+
params=[
|
| 43 |
+
Timedelta(minutes=30).to_pytimedelta(),
|
| 44 |
+
np.timedelta64(30, "s"),
|
| 45 |
+
Timedelta(seconds=30),
|
| 46 |
+
]
|
| 47 |
+
+ _common_mismatch
|
| 48 |
+
)
|
| 49 |
+
def not_hourly(request):
|
| 50 |
+
"""
|
| 51 |
+
Several timedelta-like and DateOffset instances that are _not_
|
| 52 |
+
compatible with Hourly frequencies.
|
| 53 |
+
"""
|
| 54 |
+
return request.param
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
@pytest.fixture(
|
| 58 |
+
params=[
|
| 59 |
+
np.timedelta64(365, "D"),
|
| 60 |
+
Timedelta(days=365).to_pytimedelta(),
|
| 61 |
+
Timedelta(days=365),
|
| 62 |
+
]
|
| 63 |
+
+ _common_mismatch
|
| 64 |
+
)
|
| 65 |
+
def mismatched_freq(request):
|
| 66 |
+
"""
|
| 67 |
+
Several timedelta-like and DateOffset instances that are _not_
|
| 68 |
+
compatible with Monthly or Annual frequencies.
|
| 69 |
+
"""
|
| 70 |
+
return request.param
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
# ------------------------------------------------------------------
|
| 74 |
+
# Comparisons
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class TestPeriodArrayLikeComparisons:
|
| 78 |
+
# Comparison tests for PeriodDtype vectors fully parametrized over
|
| 79 |
+
# DataFrame/Series/PeriodIndex/PeriodArray. Ideally all comparison
|
| 80 |
+
# tests will eventually end up here.
|
| 81 |
+
|
| 82 |
+
@pytest.mark.parametrize("other", ["2017", Period("2017", freq="D")])
|
| 83 |
+
def test_eq_scalar(self, other, box_with_array):
|
| 84 |
+
idx = PeriodIndex(["2017", "2017", "2018"], freq="D")
|
| 85 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 86 |
+
xbox = get_upcast_box(idx, other, True)
|
| 87 |
+
|
| 88 |
+
expected = np.array([True, True, False])
|
| 89 |
+
expected = tm.box_expected(expected, xbox)
|
| 90 |
+
|
| 91 |
+
result = idx == other
|
| 92 |
+
|
| 93 |
+
tm.assert_equal(result, expected)
|
| 94 |
+
|
| 95 |
+
def test_compare_zerodim(self, box_with_array):
|
| 96 |
+
# GH#26689 make sure we unbox zero-dimensional arrays
|
| 97 |
+
|
| 98 |
+
pi = period_range("2000", periods=4)
|
| 99 |
+
other = np.array(pi.to_numpy()[0])
|
| 100 |
+
|
| 101 |
+
pi = tm.box_expected(pi, box_with_array)
|
| 102 |
+
xbox = get_upcast_box(pi, other, True)
|
| 103 |
+
|
| 104 |
+
result = pi <= other
|
| 105 |
+
expected = np.array([True, False, False, False])
|
| 106 |
+
expected = tm.box_expected(expected, xbox)
|
| 107 |
+
tm.assert_equal(result, expected)
|
| 108 |
+
|
| 109 |
+
@pytest.mark.parametrize(
|
| 110 |
+
"scalar",
|
| 111 |
+
[
|
| 112 |
+
"foo",
|
| 113 |
+
Timestamp("2021-01-01"),
|
| 114 |
+
Timedelta(days=4),
|
| 115 |
+
9,
|
| 116 |
+
9.5,
|
| 117 |
+
2000, # specifically don't consider 2000 to match Period("2000", "D")
|
| 118 |
+
False,
|
| 119 |
+
None,
|
| 120 |
+
],
|
| 121 |
+
)
|
| 122 |
+
def test_compare_invalid_scalar(self, box_with_array, scalar):
|
| 123 |
+
# GH#28980
|
| 124 |
+
# comparison with scalar that cannot be interpreted as a Period
|
| 125 |
+
pi = period_range("2000", periods=4)
|
| 126 |
+
parr = tm.box_expected(pi, box_with_array)
|
| 127 |
+
assert_invalid_comparison(parr, scalar, box_with_array)
|
| 128 |
+
|
| 129 |
+
@pytest.mark.parametrize(
|
| 130 |
+
"other",
|
| 131 |
+
[
|
| 132 |
+
pd.date_range("2000", periods=4).array,
|
| 133 |
+
pd.timedelta_range("1D", periods=4).array,
|
| 134 |
+
np.arange(4),
|
| 135 |
+
np.arange(4).astype(np.float64),
|
| 136 |
+
list(range(4)),
|
| 137 |
+
# match Period semantics by not treating integers as Periods
|
| 138 |
+
[2000, 2001, 2002, 2003],
|
| 139 |
+
np.arange(2000, 2004),
|
| 140 |
+
np.arange(2000, 2004).astype(object),
|
| 141 |
+
pd.Index([2000, 2001, 2002, 2003]),
|
| 142 |
+
],
|
| 143 |
+
)
|
| 144 |
+
def test_compare_invalid_listlike(self, box_with_array, other):
|
| 145 |
+
pi = period_range("2000", periods=4)
|
| 146 |
+
parr = tm.box_expected(pi, box_with_array)
|
| 147 |
+
assert_invalid_comparison(parr, other, box_with_array)
|
| 148 |
+
|
| 149 |
+
@pytest.mark.parametrize("other_box", [list, np.array, lambda x: x.astype(object)])
|
| 150 |
+
def test_compare_object_dtype(self, box_with_array, other_box):
|
| 151 |
+
pi = period_range("2000", periods=5)
|
| 152 |
+
parr = tm.box_expected(pi, box_with_array)
|
| 153 |
+
|
| 154 |
+
other = other_box(pi)
|
| 155 |
+
xbox = get_upcast_box(parr, other, True)
|
| 156 |
+
|
| 157 |
+
expected = np.array([True, True, True, True, True])
|
| 158 |
+
expected = tm.box_expected(expected, xbox)
|
| 159 |
+
|
| 160 |
+
result = parr == other
|
| 161 |
+
tm.assert_equal(result, expected)
|
| 162 |
+
result = parr <= other
|
| 163 |
+
tm.assert_equal(result, expected)
|
| 164 |
+
result = parr >= other
|
| 165 |
+
tm.assert_equal(result, expected)
|
| 166 |
+
|
| 167 |
+
result = parr != other
|
| 168 |
+
tm.assert_equal(result, ~expected)
|
| 169 |
+
result = parr < other
|
| 170 |
+
tm.assert_equal(result, ~expected)
|
| 171 |
+
result = parr > other
|
| 172 |
+
tm.assert_equal(result, ~expected)
|
| 173 |
+
|
| 174 |
+
other = other_box(pi[::-1])
|
| 175 |
+
|
| 176 |
+
expected = np.array([False, False, True, False, False])
|
| 177 |
+
expected = tm.box_expected(expected, xbox)
|
| 178 |
+
result = parr == other
|
| 179 |
+
tm.assert_equal(result, expected)
|
| 180 |
+
|
| 181 |
+
expected = np.array([True, True, True, False, False])
|
| 182 |
+
expected = tm.box_expected(expected, xbox)
|
| 183 |
+
result = parr <= other
|
| 184 |
+
tm.assert_equal(result, expected)
|
| 185 |
+
|
| 186 |
+
expected = np.array([False, False, True, True, True])
|
| 187 |
+
expected = tm.box_expected(expected, xbox)
|
| 188 |
+
result = parr >= other
|
| 189 |
+
tm.assert_equal(result, expected)
|
| 190 |
+
|
| 191 |
+
expected = np.array([True, True, False, True, True])
|
| 192 |
+
expected = tm.box_expected(expected, xbox)
|
| 193 |
+
result = parr != other
|
| 194 |
+
tm.assert_equal(result, expected)
|
| 195 |
+
|
| 196 |
+
expected = np.array([True, True, False, False, False])
|
| 197 |
+
expected = tm.box_expected(expected, xbox)
|
| 198 |
+
result = parr < other
|
| 199 |
+
tm.assert_equal(result, expected)
|
| 200 |
+
|
| 201 |
+
expected = np.array([False, False, False, True, True])
|
| 202 |
+
expected = tm.box_expected(expected, xbox)
|
| 203 |
+
result = parr > other
|
| 204 |
+
tm.assert_equal(result, expected)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
class TestPeriodIndexComparisons:
|
| 208 |
+
# TODO: parameterize over boxes
|
| 209 |
+
|
| 210 |
+
def test_pi_cmp_period(self):
|
| 211 |
+
idx = period_range("2007-01", periods=20, freq="M")
|
| 212 |
+
per = idx[10]
|
| 213 |
+
|
| 214 |
+
result = idx < per
|
| 215 |
+
exp = idx.values < idx.values[10]
|
| 216 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 217 |
+
|
| 218 |
+
# Tests Period.__richcmp__ against ndarray[object, ndim=2]
|
| 219 |
+
result = idx.values.reshape(10, 2) < per
|
| 220 |
+
tm.assert_numpy_array_equal(result, exp.reshape(10, 2))
|
| 221 |
+
|
| 222 |
+
# Tests Period.__richcmp__ against ndarray[object, ndim=0]
|
| 223 |
+
result = idx < np.array(per)
|
| 224 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 225 |
+
|
| 226 |
+
# TODO: moved from test_datetime64; de-duplicate with version below
|
| 227 |
+
def test_parr_cmp_period_scalar2(self, box_with_array):
|
| 228 |
+
pi = period_range("2000-01-01", periods=10, freq="D")
|
| 229 |
+
|
| 230 |
+
val = pi[3]
|
| 231 |
+
expected = [x > val for x in pi]
|
| 232 |
+
|
| 233 |
+
ser = tm.box_expected(pi, box_with_array)
|
| 234 |
+
xbox = get_upcast_box(ser, val, True)
|
| 235 |
+
|
| 236 |
+
expected = tm.box_expected(expected, xbox)
|
| 237 |
+
result = ser > val
|
| 238 |
+
tm.assert_equal(result, expected)
|
| 239 |
+
|
| 240 |
+
val = pi[5]
|
| 241 |
+
result = ser > val
|
| 242 |
+
expected = [x > val for x in pi]
|
| 243 |
+
expected = tm.box_expected(expected, xbox)
|
| 244 |
+
tm.assert_equal(result, expected)
|
| 245 |
+
|
| 246 |
+
@pytest.mark.parametrize("freq", ["M", "2M", "3M"])
|
| 247 |
+
def test_parr_cmp_period_scalar(self, freq, box_with_array):
|
| 248 |
+
# GH#13200
|
| 249 |
+
base = PeriodIndex(["2011-01", "2011-02", "2011-03", "2011-04"], freq=freq)
|
| 250 |
+
base = tm.box_expected(base, box_with_array)
|
| 251 |
+
per = Period("2011-02", freq=freq)
|
| 252 |
+
xbox = get_upcast_box(base, per, True)
|
| 253 |
+
|
| 254 |
+
exp = np.array([False, True, False, False])
|
| 255 |
+
exp = tm.box_expected(exp, xbox)
|
| 256 |
+
tm.assert_equal(base == per, exp)
|
| 257 |
+
tm.assert_equal(per == base, exp)
|
| 258 |
+
|
| 259 |
+
exp = np.array([True, False, True, True])
|
| 260 |
+
exp = tm.box_expected(exp, xbox)
|
| 261 |
+
tm.assert_equal(base != per, exp)
|
| 262 |
+
tm.assert_equal(per != base, exp)
|
| 263 |
+
|
| 264 |
+
exp = np.array([False, False, True, True])
|
| 265 |
+
exp = tm.box_expected(exp, xbox)
|
| 266 |
+
tm.assert_equal(base > per, exp)
|
| 267 |
+
tm.assert_equal(per < base, exp)
|
| 268 |
+
|
| 269 |
+
exp = np.array([True, False, False, False])
|
| 270 |
+
exp = tm.box_expected(exp, xbox)
|
| 271 |
+
tm.assert_equal(base < per, exp)
|
| 272 |
+
tm.assert_equal(per > base, exp)
|
| 273 |
+
|
| 274 |
+
exp = np.array([False, True, True, True])
|
| 275 |
+
exp = tm.box_expected(exp, xbox)
|
| 276 |
+
tm.assert_equal(base >= per, exp)
|
| 277 |
+
tm.assert_equal(per <= base, exp)
|
| 278 |
+
|
| 279 |
+
exp = np.array([True, True, False, False])
|
| 280 |
+
exp = tm.box_expected(exp, xbox)
|
| 281 |
+
tm.assert_equal(base <= per, exp)
|
| 282 |
+
tm.assert_equal(per >= base, exp)
|
| 283 |
+
|
| 284 |
+
@pytest.mark.parametrize("freq", ["M", "2M", "3M"])
|
| 285 |
+
def test_parr_cmp_pi(self, freq, box_with_array):
|
| 286 |
+
# GH#13200
|
| 287 |
+
base = PeriodIndex(["2011-01", "2011-02", "2011-03", "2011-04"], freq=freq)
|
| 288 |
+
base = tm.box_expected(base, box_with_array)
|
| 289 |
+
|
| 290 |
+
# TODO: could also box idx?
|
| 291 |
+
idx = PeriodIndex(["2011-02", "2011-01", "2011-03", "2011-05"], freq=freq)
|
| 292 |
+
|
| 293 |
+
xbox = get_upcast_box(base, idx, True)
|
| 294 |
+
|
| 295 |
+
exp = np.array([False, False, True, False])
|
| 296 |
+
exp = tm.box_expected(exp, xbox)
|
| 297 |
+
tm.assert_equal(base == idx, exp)
|
| 298 |
+
|
| 299 |
+
exp = np.array([True, True, False, True])
|
| 300 |
+
exp = tm.box_expected(exp, xbox)
|
| 301 |
+
tm.assert_equal(base != idx, exp)
|
| 302 |
+
|
| 303 |
+
exp = np.array([False, True, False, False])
|
| 304 |
+
exp = tm.box_expected(exp, xbox)
|
| 305 |
+
tm.assert_equal(base > idx, exp)
|
| 306 |
+
|
| 307 |
+
exp = np.array([True, False, False, True])
|
| 308 |
+
exp = tm.box_expected(exp, xbox)
|
| 309 |
+
tm.assert_equal(base < idx, exp)
|
| 310 |
+
|
| 311 |
+
exp = np.array([False, True, True, False])
|
| 312 |
+
exp = tm.box_expected(exp, xbox)
|
| 313 |
+
tm.assert_equal(base >= idx, exp)
|
| 314 |
+
|
| 315 |
+
exp = np.array([True, False, True, True])
|
| 316 |
+
exp = tm.box_expected(exp, xbox)
|
| 317 |
+
tm.assert_equal(base <= idx, exp)
|
| 318 |
+
|
| 319 |
+
@pytest.mark.parametrize("freq", ["M", "2M", "3M"])
|
| 320 |
+
def test_parr_cmp_pi_mismatched_freq(self, freq, box_with_array):
|
| 321 |
+
# GH#13200
|
| 322 |
+
# different base freq
|
| 323 |
+
base = PeriodIndex(["2011-01", "2011-02", "2011-03", "2011-04"], freq=freq)
|
| 324 |
+
base = tm.box_expected(base, box_with_array)
|
| 325 |
+
|
| 326 |
+
msg = rf"Invalid comparison between dtype=period\[{freq}\] and Period"
|
| 327 |
+
with pytest.raises(TypeError, match=msg):
|
| 328 |
+
base <= Period("2011", freq="Y")
|
| 329 |
+
|
| 330 |
+
with pytest.raises(TypeError, match=msg):
|
| 331 |
+
Period("2011", freq="Y") >= base
|
| 332 |
+
|
| 333 |
+
# TODO: Could parametrize over boxes for idx?
|
| 334 |
+
idx = PeriodIndex(["2011", "2012", "2013", "2014"], freq="Y")
|
| 335 |
+
rev_msg = r"Invalid comparison between dtype=period\[Y-DEC\] and PeriodArray"
|
| 336 |
+
idx_msg = rev_msg if box_with_array in [tm.to_array, pd.array] else msg
|
| 337 |
+
with pytest.raises(TypeError, match=idx_msg):
|
| 338 |
+
base <= idx
|
| 339 |
+
|
| 340 |
+
# Different frequency
|
| 341 |
+
msg = rf"Invalid comparison between dtype=period\[{freq}\] and Period"
|
| 342 |
+
with pytest.raises(TypeError, match=msg):
|
| 343 |
+
base <= Period("2011", freq="4M")
|
| 344 |
+
|
| 345 |
+
with pytest.raises(TypeError, match=msg):
|
| 346 |
+
Period("2011", freq="4M") >= base
|
| 347 |
+
|
| 348 |
+
idx = PeriodIndex(["2011", "2012", "2013", "2014"], freq="4M")
|
| 349 |
+
rev_msg = r"Invalid comparison between dtype=period\[4M\] and PeriodArray"
|
| 350 |
+
idx_msg = rev_msg if box_with_array in [tm.to_array, pd.array] else msg
|
| 351 |
+
with pytest.raises(TypeError, match=idx_msg):
|
| 352 |
+
base <= idx
|
| 353 |
+
|
| 354 |
+
@pytest.mark.parametrize("freq", ["M", "2M", "3M"])
|
| 355 |
+
def test_pi_cmp_nat(self, freq):
|
| 356 |
+
idx1 = PeriodIndex(["2011-01", "2011-02", "NaT", "2011-05"], freq=freq)
|
| 357 |
+
per = idx1[1]
|
| 358 |
+
|
| 359 |
+
result = idx1 > per
|
| 360 |
+
exp = np.array([False, False, False, True])
|
| 361 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 362 |
+
result = per < idx1
|
| 363 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 364 |
+
|
| 365 |
+
result = idx1 == pd.NaT
|
| 366 |
+
exp = np.array([False, False, False, False])
|
| 367 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 368 |
+
result = pd.NaT == idx1
|
| 369 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 370 |
+
|
| 371 |
+
result = idx1 != pd.NaT
|
| 372 |
+
exp = np.array([True, True, True, True])
|
| 373 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 374 |
+
result = pd.NaT != idx1
|
| 375 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 376 |
+
|
| 377 |
+
idx2 = PeriodIndex(["2011-02", "2011-01", "2011-04", "NaT"], freq=freq)
|
| 378 |
+
result = idx1 < idx2
|
| 379 |
+
exp = np.array([True, False, False, False])
|
| 380 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 381 |
+
|
| 382 |
+
result = idx1 == idx2
|
| 383 |
+
exp = np.array([False, False, False, False])
|
| 384 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 385 |
+
|
| 386 |
+
result = idx1 != idx2
|
| 387 |
+
exp = np.array([True, True, True, True])
|
| 388 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 389 |
+
|
| 390 |
+
result = idx1 == idx1
|
| 391 |
+
exp = np.array([True, True, False, True])
|
| 392 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 393 |
+
|
| 394 |
+
result = idx1 != idx1
|
| 395 |
+
exp = np.array([False, False, True, False])
|
| 396 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 397 |
+
|
| 398 |
+
@pytest.mark.parametrize("freq", ["M", "2M", "3M"])
|
| 399 |
+
def test_pi_cmp_nat_mismatched_freq_raises(self, freq):
|
| 400 |
+
idx1 = PeriodIndex(["2011-01", "2011-02", "NaT", "2011-05"], freq=freq)
|
| 401 |
+
|
| 402 |
+
diff = PeriodIndex(["2011-02", "2011-01", "2011-04", "NaT"], freq="4M")
|
| 403 |
+
msg = rf"Invalid comparison between dtype=period\[{freq}\] and PeriodArray"
|
| 404 |
+
with pytest.raises(TypeError, match=msg):
|
| 405 |
+
idx1 > diff
|
| 406 |
+
|
| 407 |
+
result = idx1 == diff
|
| 408 |
+
expected = np.array([False, False, False, False], dtype=bool)
|
| 409 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 410 |
+
|
| 411 |
+
# TODO: De-duplicate with test_pi_cmp_nat
|
| 412 |
+
@pytest.mark.parametrize("dtype", [object, None])
|
| 413 |
+
def test_comp_nat(self, dtype):
|
| 414 |
+
left = PeriodIndex([Period("2011-01-01"), pd.NaT, Period("2011-01-03")])
|
| 415 |
+
right = PeriodIndex([pd.NaT, pd.NaT, Period("2011-01-03")])
|
| 416 |
+
|
| 417 |
+
if dtype is not None:
|
| 418 |
+
left = left.astype(dtype)
|
| 419 |
+
right = right.astype(dtype)
|
| 420 |
+
|
| 421 |
+
result = left == right
|
| 422 |
+
expected = np.array([False, False, True])
|
| 423 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 424 |
+
|
| 425 |
+
result = left != right
|
| 426 |
+
expected = np.array([True, True, False])
|
| 427 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 428 |
+
|
| 429 |
+
expected = np.array([False, False, False])
|
| 430 |
+
tm.assert_numpy_array_equal(left == pd.NaT, expected)
|
| 431 |
+
tm.assert_numpy_array_equal(pd.NaT == right, expected)
|
| 432 |
+
|
| 433 |
+
expected = np.array([True, True, True])
|
| 434 |
+
tm.assert_numpy_array_equal(left != pd.NaT, expected)
|
| 435 |
+
tm.assert_numpy_array_equal(pd.NaT != left, expected)
|
| 436 |
+
|
| 437 |
+
expected = np.array([False, False, False])
|
| 438 |
+
tm.assert_numpy_array_equal(left < pd.NaT, expected)
|
| 439 |
+
tm.assert_numpy_array_equal(pd.NaT > left, expected)
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
class TestPeriodSeriesComparisons:
|
| 443 |
+
def test_cmp_series_period_series_mixed_freq(self):
|
| 444 |
+
# GH#13200
|
| 445 |
+
base = Series(
|
| 446 |
+
[
|
| 447 |
+
Period("2011", freq="Y"),
|
| 448 |
+
Period("2011-02", freq="M"),
|
| 449 |
+
Period("2013", freq="Y"),
|
| 450 |
+
Period("2011-04", freq="M"),
|
| 451 |
+
]
|
| 452 |
+
)
|
| 453 |
+
|
| 454 |
+
ser = Series(
|
| 455 |
+
[
|
| 456 |
+
Period("2012", freq="Y"),
|
| 457 |
+
Period("2011-01", freq="M"),
|
| 458 |
+
Period("2013", freq="Y"),
|
| 459 |
+
Period("2011-05", freq="M"),
|
| 460 |
+
]
|
| 461 |
+
)
|
| 462 |
+
|
| 463 |
+
exp = Series([False, False, True, False])
|
| 464 |
+
tm.assert_series_equal(base == ser, exp)
|
| 465 |
+
|
| 466 |
+
exp = Series([True, True, False, True])
|
| 467 |
+
tm.assert_series_equal(base != ser, exp)
|
| 468 |
+
|
| 469 |
+
exp = Series([False, True, False, False])
|
| 470 |
+
tm.assert_series_equal(base > ser, exp)
|
| 471 |
+
|
| 472 |
+
exp = Series([True, False, False, True])
|
| 473 |
+
tm.assert_series_equal(base < ser, exp)
|
| 474 |
+
|
| 475 |
+
exp = Series([False, True, True, False])
|
| 476 |
+
tm.assert_series_equal(base >= ser, exp)
|
| 477 |
+
|
| 478 |
+
exp = Series([True, False, True, True])
|
| 479 |
+
tm.assert_series_equal(base <= ser, exp)
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
class TestPeriodIndexSeriesComparisonConsistency:
|
| 483 |
+
"""Test PeriodIndex and Period Series Ops consistency"""
|
| 484 |
+
|
| 485 |
+
# TODO: needs parametrization+de-duplication
|
| 486 |
+
|
| 487 |
+
def _check(self, values, func, expected):
|
| 488 |
+
# Test PeriodIndex and Period Series Ops consistency
|
| 489 |
+
|
| 490 |
+
idx = PeriodIndex(values)
|
| 491 |
+
result = func(idx)
|
| 492 |
+
|
| 493 |
+
# check that we don't pass an unwanted type to tm.assert_equal
|
| 494 |
+
assert isinstance(expected, (pd.Index, np.ndarray))
|
| 495 |
+
tm.assert_equal(result, expected)
|
| 496 |
+
|
| 497 |
+
s = Series(values)
|
| 498 |
+
result = func(s)
|
| 499 |
+
|
| 500 |
+
exp = Series(expected, name=values.name)
|
| 501 |
+
tm.assert_series_equal(result, exp)
|
| 502 |
+
|
| 503 |
+
def test_pi_comp_period(self):
|
| 504 |
+
idx = PeriodIndex(
|
| 505 |
+
["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
|
| 506 |
+
)
|
| 507 |
+
per = idx[2]
|
| 508 |
+
|
| 509 |
+
f = lambda x: x == per
|
| 510 |
+
exp = np.array([False, False, True, False], dtype=np.bool_)
|
| 511 |
+
self._check(idx, f, exp)
|
| 512 |
+
f = lambda x: per == x
|
| 513 |
+
self._check(idx, f, exp)
|
| 514 |
+
|
| 515 |
+
f = lambda x: x != per
|
| 516 |
+
exp = np.array([True, True, False, True], dtype=np.bool_)
|
| 517 |
+
self._check(idx, f, exp)
|
| 518 |
+
f = lambda x: per != x
|
| 519 |
+
self._check(idx, f, exp)
|
| 520 |
+
|
| 521 |
+
f = lambda x: per >= x
|
| 522 |
+
exp = np.array([True, True, True, False], dtype=np.bool_)
|
| 523 |
+
self._check(idx, f, exp)
|
| 524 |
+
|
| 525 |
+
f = lambda x: x > per
|
| 526 |
+
exp = np.array([False, False, False, True], dtype=np.bool_)
|
| 527 |
+
self._check(idx, f, exp)
|
| 528 |
+
|
| 529 |
+
f = lambda x: per >= x
|
| 530 |
+
exp = np.array([True, True, True, False], dtype=np.bool_)
|
| 531 |
+
self._check(idx, f, exp)
|
| 532 |
+
|
| 533 |
+
def test_pi_comp_period_nat(self):
|
| 534 |
+
idx = PeriodIndex(
|
| 535 |
+
["2011-01", "NaT", "2011-03", "2011-04"], freq="M", name="idx"
|
| 536 |
+
)
|
| 537 |
+
per = idx[2]
|
| 538 |
+
|
| 539 |
+
f = lambda x: x == per
|
| 540 |
+
exp = np.array([False, False, True, False], dtype=np.bool_)
|
| 541 |
+
self._check(idx, f, exp)
|
| 542 |
+
f = lambda x: per == x
|
| 543 |
+
self._check(idx, f, exp)
|
| 544 |
+
|
| 545 |
+
f = lambda x: x == pd.NaT
|
| 546 |
+
exp = np.array([False, False, False, False], dtype=np.bool_)
|
| 547 |
+
self._check(idx, f, exp)
|
| 548 |
+
f = lambda x: pd.NaT == x
|
| 549 |
+
self._check(idx, f, exp)
|
| 550 |
+
|
| 551 |
+
f = lambda x: x != per
|
| 552 |
+
exp = np.array([True, True, False, True], dtype=np.bool_)
|
| 553 |
+
self._check(idx, f, exp)
|
| 554 |
+
f = lambda x: per != x
|
| 555 |
+
self._check(idx, f, exp)
|
| 556 |
+
|
| 557 |
+
f = lambda x: x != pd.NaT
|
| 558 |
+
exp = np.array([True, True, True, True], dtype=np.bool_)
|
| 559 |
+
self._check(idx, f, exp)
|
| 560 |
+
f = lambda x: pd.NaT != x
|
| 561 |
+
self._check(idx, f, exp)
|
| 562 |
+
|
| 563 |
+
f = lambda x: per >= x
|
| 564 |
+
exp = np.array([True, False, True, False], dtype=np.bool_)
|
| 565 |
+
self._check(idx, f, exp)
|
| 566 |
+
|
| 567 |
+
f = lambda x: x < per
|
| 568 |
+
exp = np.array([True, False, False, False], dtype=np.bool_)
|
| 569 |
+
self._check(idx, f, exp)
|
| 570 |
+
|
| 571 |
+
f = lambda x: x > pd.NaT
|
| 572 |
+
exp = np.array([False, False, False, False], dtype=np.bool_)
|
| 573 |
+
self._check(idx, f, exp)
|
| 574 |
+
|
| 575 |
+
f = lambda x: pd.NaT >= x
|
| 576 |
+
exp = np.array([False, False, False, False], dtype=np.bool_)
|
| 577 |
+
self._check(idx, f, exp)
|
| 578 |
+
|
| 579 |
+
|
| 580 |
+
# ------------------------------------------------------------------
|
| 581 |
+
# Arithmetic
|
| 582 |
+
|
| 583 |
+
|
| 584 |
+
class TestPeriodFrameArithmetic:
|
| 585 |
+
def test_ops_frame_period(self):
|
| 586 |
+
# GH#13043
|
| 587 |
+
df = pd.DataFrame(
|
| 588 |
+
{
|
| 589 |
+
"A": [Period("2015-01", freq="M"), Period("2015-02", freq="M")],
|
| 590 |
+
"B": [Period("2014-01", freq="M"), Period("2014-02", freq="M")],
|
| 591 |
+
}
|
| 592 |
+
)
|
| 593 |
+
assert df["A"].dtype == "Period[M]"
|
| 594 |
+
assert df["B"].dtype == "Period[M]"
|
| 595 |
+
|
| 596 |
+
p = Period("2015-03", freq="M")
|
| 597 |
+
off = p.freq
|
| 598 |
+
# dtype will be object because of original dtype
|
| 599 |
+
exp = pd.DataFrame(
|
| 600 |
+
{
|
| 601 |
+
"A": np.array([2 * off, 1 * off], dtype=object),
|
| 602 |
+
"B": np.array([14 * off, 13 * off], dtype=object),
|
| 603 |
+
}
|
| 604 |
+
)
|
| 605 |
+
tm.assert_frame_equal(p - df, exp)
|
| 606 |
+
tm.assert_frame_equal(df - p, -1 * exp)
|
| 607 |
+
|
| 608 |
+
df2 = pd.DataFrame(
|
| 609 |
+
{
|
| 610 |
+
"A": [Period("2015-05", freq="M"), Period("2015-06", freq="M")],
|
| 611 |
+
"B": [Period("2015-05", freq="M"), Period("2015-06", freq="M")],
|
| 612 |
+
}
|
| 613 |
+
)
|
| 614 |
+
assert df2["A"].dtype == "Period[M]"
|
| 615 |
+
assert df2["B"].dtype == "Period[M]"
|
| 616 |
+
|
| 617 |
+
exp = pd.DataFrame(
|
| 618 |
+
{
|
| 619 |
+
"A": np.array([4 * off, 4 * off], dtype=object),
|
| 620 |
+
"B": np.array([16 * off, 16 * off], dtype=object),
|
| 621 |
+
}
|
| 622 |
+
)
|
| 623 |
+
tm.assert_frame_equal(df2 - df, exp)
|
| 624 |
+
tm.assert_frame_equal(df - df2, -1 * exp)
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
class TestPeriodIndexArithmetic:
|
| 628 |
+
# ---------------------------------------------------------------
|
| 629 |
+
# __add__/__sub__ with PeriodIndex
|
| 630 |
+
# PeriodIndex + other is defined for integers and timedelta-like others
|
| 631 |
+
# PeriodIndex - other is defined for integers, timedelta-like others,
|
| 632 |
+
# and PeriodIndex (with matching freq)
|
| 633 |
+
|
| 634 |
+
def test_parr_add_iadd_parr_raises(self, box_with_array):
|
| 635 |
+
rng = period_range("1/1/2000", freq="D", periods=5)
|
| 636 |
+
other = period_range("1/6/2000", freq="D", periods=5)
|
| 637 |
+
# TODO: parametrize over boxes for other?
|
| 638 |
+
|
| 639 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 640 |
+
# An earlier implementation of PeriodIndex addition performed
|
| 641 |
+
# a set operation (union). This has since been changed to
|
| 642 |
+
# raise a TypeError. See GH#14164 and GH#13077 for historical
|
| 643 |
+
# reference.
|
| 644 |
+
msg = r"unsupported operand type\(s\) for \+: .* and .*"
|
| 645 |
+
with pytest.raises(TypeError, match=msg):
|
| 646 |
+
rng + other
|
| 647 |
+
|
| 648 |
+
with pytest.raises(TypeError, match=msg):
|
| 649 |
+
rng += other
|
| 650 |
+
|
| 651 |
+
def test_pi_sub_isub_pi(self):
|
| 652 |
+
# GH#20049
|
| 653 |
+
# For historical reference see GH#14164, GH#13077.
|
| 654 |
+
# PeriodIndex subtraction originally performed set difference,
|
| 655 |
+
# then changed to raise TypeError before being implemented in GH#20049
|
| 656 |
+
rng = period_range("1/1/2000", freq="D", periods=5)
|
| 657 |
+
other = period_range("1/6/2000", freq="D", periods=5)
|
| 658 |
+
|
| 659 |
+
off = rng.freq
|
| 660 |
+
expected = pd.Index([-5 * off] * 5)
|
| 661 |
+
result = rng - other
|
| 662 |
+
tm.assert_index_equal(result, expected)
|
| 663 |
+
|
| 664 |
+
rng -= other
|
| 665 |
+
tm.assert_index_equal(rng, expected)
|
| 666 |
+
|
| 667 |
+
def test_pi_sub_pi_with_nat(self):
|
| 668 |
+
rng = period_range("1/1/2000", freq="D", periods=5)
|
| 669 |
+
other = rng[1:].insert(0, pd.NaT)
|
| 670 |
+
assert other[1:].equals(rng[1:])
|
| 671 |
+
|
| 672 |
+
result = rng - other
|
| 673 |
+
off = rng.freq
|
| 674 |
+
expected = pd.Index([pd.NaT, 0 * off, 0 * off, 0 * off, 0 * off])
|
| 675 |
+
tm.assert_index_equal(result, expected)
|
| 676 |
+
|
| 677 |
+
def test_parr_sub_pi_mismatched_freq(self, box_with_array, box_with_array2):
|
| 678 |
+
rng = period_range("1/1/2000", freq="D", periods=5)
|
| 679 |
+
other = period_range("1/6/2000", freq="h", periods=5)
|
| 680 |
+
|
| 681 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 682 |
+
other = tm.box_expected(other, box_with_array2)
|
| 683 |
+
msg = r"Input has different freq=[hD] from PeriodArray\(freq=[Dh]\)"
|
| 684 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 685 |
+
rng - other
|
| 686 |
+
|
| 687 |
+
@pytest.mark.parametrize("n", [1, 2, 3, 4])
|
| 688 |
+
def test_sub_n_gt_1_ticks(self, tick_classes, n):
|
| 689 |
+
# GH 23878
|
| 690 |
+
p1_d = "19910905"
|
| 691 |
+
p2_d = "19920406"
|
| 692 |
+
p1 = PeriodIndex([p1_d], freq=tick_classes(n))
|
| 693 |
+
p2 = PeriodIndex([p2_d], freq=tick_classes(n))
|
| 694 |
+
|
| 695 |
+
expected = PeriodIndex([p2_d], freq=p2.freq.base) - PeriodIndex(
|
| 696 |
+
[p1_d], freq=p1.freq.base
|
| 697 |
+
)
|
| 698 |
+
|
| 699 |
+
tm.assert_index_equal((p2 - p1), expected)
|
| 700 |
+
|
| 701 |
+
@pytest.mark.parametrize("n", [1, 2, 3, 4])
|
| 702 |
+
@pytest.mark.parametrize(
|
| 703 |
+
"offset, kwd_name",
|
| 704 |
+
[
|
| 705 |
+
(pd.offsets.YearEnd, "month"),
|
| 706 |
+
(pd.offsets.QuarterEnd, "startingMonth"),
|
| 707 |
+
(pd.offsets.MonthEnd, None),
|
| 708 |
+
(pd.offsets.Week, "weekday"),
|
| 709 |
+
],
|
| 710 |
+
)
|
| 711 |
+
def test_sub_n_gt_1_offsets(self, offset, kwd_name, n):
|
| 712 |
+
# GH 23878
|
| 713 |
+
kwds = {kwd_name: 3} if kwd_name is not None else {}
|
| 714 |
+
p1_d = "19910905"
|
| 715 |
+
p2_d = "19920406"
|
| 716 |
+
freq = offset(n, normalize=False, **kwds)
|
| 717 |
+
p1 = PeriodIndex([p1_d], freq=freq)
|
| 718 |
+
p2 = PeriodIndex([p2_d], freq=freq)
|
| 719 |
+
|
| 720 |
+
result = p2 - p1
|
| 721 |
+
expected = PeriodIndex([p2_d], freq=freq.base) - PeriodIndex(
|
| 722 |
+
[p1_d], freq=freq.base
|
| 723 |
+
)
|
| 724 |
+
|
| 725 |
+
tm.assert_index_equal(result, expected)
|
| 726 |
+
|
| 727 |
+
# -------------------------------------------------------------
|
| 728 |
+
# Invalid Operations
|
| 729 |
+
|
| 730 |
+
@pytest.mark.parametrize(
|
| 731 |
+
"other",
|
| 732 |
+
[
|
| 733 |
+
# datetime scalars
|
| 734 |
+
Timestamp("2016-01-01"),
|
| 735 |
+
Timestamp("2016-01-01").to_pydatetime(),
|
| 736 |
+
Timestamp("2016-01-01").to_datetime64(),
|
| 737 |
+
# datetime-like arrays
|
| 738 |
+
pd.date_range("2016-01-01", periods=3, freq="h"),
|
| 739 |
+
pd.date_range("2016-01-01", periods=3, tz="Europe/Brussels"),
|
| 740 |
+
pd.date_range("2016-01-01", periods=3, freq="s")._data,
|
| 741 |
+
pd.date_range("2016-01-01", periods=3, tz="Asia/Tokyo")._data,
|
| 742 |
+
# Miscellaneous invalid types
|
| 743 |
+
3.14,
|
| 744 |
+
np.array([2.0, 3.0, 4.0]),
|
| 745 |
+
],
|
| 746 |
+
)
|
| 747 |
+
def test_parr_add_sub_invalid(self, other, box_with_array):
|
| 748 |
+
# GH#23215
|
| 749 |
+
rng = period_range("1/1/2000", freq="D", periods=3)
|
| 750 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 751 |
+
|
| 752 |
+
msg = "|".join(
|
| 753 |
+
[
|
| 754 |
+
r"(:?cannot add PeriodArray and .*)",
|
| 755 |
+
r"(:?cannot subtract .* from (:?a\s)?.*)",
|
| 756 |
+
r"(:?unsupported operand type\(s\) for \+: .* and .*)",
|
| 757 |
+
r"unsupported operand type\(s\) for [+-]: .* and .*",
|
| 758 |
+
]
|
| 759 |
+
)
|
| 760 |
+
assert_invalid_addsub_type(rng, other, msg)
|
| 761 |
+
with pytest.raises(TypeError, match=msg):
|
| 762 |
+
rng + other
|
| 763 |
+
with pytest.raises(TypeError, match=msg):
|
| 764 |
+
other + rng
|
| 765 |
+
with pytest.raises(TypeError, match=msg):
|
| 766 |
+
rng - other
|
| 767 |
+
with pytest.raises(TypeError, match=msg):
|
| 768 |
+
other - rng
|
| 769 |
+
|
| 770 |
+
# -----------------------------------------------------------------
|
| 771 |
+
# __add__/__sub__ with ndarray[datetime64] and ndarray[timedelta64]
|
| 772 |
+
|
| 773 |
+
def test_pi_add_sub_td64_array_non_tick_raises(self):
|
| 774 |
+
rng = period_range("1/1/2000", freq="Q", periods=3)
|
| 775 |
+
tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
|
| 776 |
+
tdarr = tdi.values
|
| 777 |
+
|
| 778 |
+
msg = r"Cannot add or subtract timedelta64\[ns\] dtype from period\[Q-DEC\]"
|
| 779 |
+
with pytest.raises(TypeError, match=msg):
|
| 780 |
+
rng + tdarr
|
| 781 |
+
with pytest.raises(TypeError, match=msg):
|
| 782 |
+
tdarr + rng
|
| 783 |
+
|
| 784 |
+
with pytest.raises(TypeError, match=msg):
|
| 785 |
+
rng - tdarr
|
| 786 |
+
msg = r"cannot subtract PeriodArray from TimedeltaArray"
|
| 787 |
+
with pytest.raises(TypeError, match=msg):
|
| 788 |
+
tdarr - rng
|
| 789 |
+
|
| 790 |
+
def test_pi_add_sub_td64_array_tick(self):
|
| 791 |
+
# PeriodIndex + Timedelta-like is allowed only with
|
| 792 |
+
# tick-like frequencies
|
| 793 |
+
rng = period_range("1/1/2000", freq="90D", periods=3)
|
| 794 |
+
tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
|
| 795 |
+
tdarr = tdi.values
|
| 796 |
+
|
| 797 |
+
expected = period_range("12/31/1999", freq="90D", periods=3)
|
| 798 |
+
result = rng + tdi
|
| 799 |
+
tm.assert_index_equal(result, expected)
|
| 800 |
+
result = rng + tdarr
|
| 801 |
+
tm.assert_index_equal(result, expected)
|
| 802 |
+
result = tdi + rng
|
| 803 |
+
tm.assert_index_equal(result, expected)
|
| 804 |
+
result = tdarr + rng
|
| 805 |
+
tm.assert_index_equal(result, expected)
|
| 806 |
+
|
| 807 |
+
expected = period_range("1/2/2000", freq="90D", periods=3)
|
| 808 |
+
|
| 809 |
+
result = rng - tdi
|
| 810 |
+
tm.assert_index_equal(result, expected)
|
| 811 |
+
result = rng - tdarr
|
| 812 |
+
tm.assert_index_equal(result, expected)
|
| 813 |
+
|
| 814 |
+
msg = r"cannot subtract .* from .*"
|
| 815 |
+
with pytest.raises(TypeError, match=msg):
|
| 816 |
+
tdarr - rng
|
| 817 |
+
|
| 818 |
+
with pytest.raises(TypeError, match=msg):
|
| 819 |
+
tdi - rng
|
| 820 |
+
|
| 821 |
+
@pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"])
|
| 822 |
+
@pytest.mark.parametrize("tdi_freq", [None, "h"])
|
| 823 |
+
def test_parr_sub_td64array(self, box_with_array, tdi_freq, pi_freq):
|
| 824 |
+
box = box_with_array
|
| 825 |
+
xbox = box if box not in [pd.array, tm.to_array] else pd.Index
|
| 826 |
+
|
| 827 |
+
tdi = TimedeltaIndex(["1 hours", "2 hours"], freq=tdi_freq)
|
| 828 |
+
dti = Timestamp("2018-03-07 17:16:40") + tdi
|
| 829 |
+
pi = dti.to_period(pi_freq)
|
| 830 |
+
|
| 831 |
+
# TODO: parametrize over box for pi?
|
| 832 |
+
td64obj = tm.box_expected(tdi, box)
|
| 833 |
+
|
| 834 |
+
if pi_freq == "h":
|
| 835 |
+
result = pi - td64obj
|
| 836 |
+
expected = (pi.to_timestamp("s") - tdi).to_period(pi_freq)
|
| 837 |
+
expected = tm.box_expected(expected, xbox)
|
| 838 |
+
tm.assert_equal(result, expected)
|
| 839 |
+
|
| 840 |
+
# Subtract from scalar
|
| 841 |
+
result = pi[0] - td64obj
|
| 842 |
+
expected = (pi[0].to_timestamp("s") - tdi).to_period(pi_freq)
|
| 843 |
+
expected = tm.box_expected(expected, box)
|
| 844 |
+
tm.assert_equal(result, expected)
|
| 845 |
+
|
| 846 |
+
elif pi_freq == "D":
|
| 847 |
+
# Tick, but non-compatible
|
| 848 |
+
msg = (
|
| 849 |
+
"Cannot add/subtract timedelta-like from PeriodArray that is "
|
| 850 |
+
"not an integer multiple of the PeriodArray's freq."
|
| 851 |
+
)
|
| 852 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 853 |
+
pi - td64obj
|
| 854 |
+
|
| 855 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 856 |
+
pi[0] - td64obj
|
| 857 |
+
|
| 858 |
+
else:
|
| 859 |
+
# With non-Tick freq, we could not add timedelta64 array regardless
|
| 860 |
+
# of what its resolution is
|
| 861 |
+
msg = "Cannot add or subtract timedelta64"
|
| 862 |
+
with pytest.raises(TypeError, match=msg):
|
| 863 |
+
pi - td64obj
|
| 864 |
+
with pytest.raises(TypeError, match=msg):
|
| 865 |
+
pi[0] - td64obj
|
| 866 |
+
|
| 867 |
+
# -----------------------------------------------------------------
|
| 868 |
+
# operations with array/Index of DateOffset objects
|
| 869 |
+
|
| 870 |
+
@pytest.mark.parametrize("box", [np.array, pd.Index])
|
| 871 |
+
def test_pi_add_offset_array(self, box):
|
| 872 |
+
# GH#18849
|
| 873 |
+
pi = PeriodIndex([Period("2015Q1"), Period("2016Q2")])
|
| 874 |
+
offs = box(
|
| 875 |
+
[
|
| 876 |
+
pd.offsets.QuarterEnd(n=1, startingMonth=12),
|
| 877 |
+
pd.offsets.QuarterEnd(n=-2, startingMonth=12),
|
| 878 |
+
]
|
| 879 |
+
)
|
| 880 |
+
expected = PeriodIndex([Period("2015Q2"), Period("2015Q4")]).astype(object)
|
| 881 |
+
|
| 882 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 883 |
+
res = pi + offs
|
| 884 |
+
tm.assert_index_equal(res, expected)
|
| 885 |
+
|
| 886 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 887 |
+
res2 = offs + pi
|
| 888 |
+
tm.assert_index_equal(res2, expected)
|
| 889 |
+
|
| 890 |
+
unanchored = np.array([pd.offsets.Hour(n=1), pd.offsets.Minute(n=-2)])
|
| 891 |
+
# addition/subtraction ops with incompatible offsets should issue
|
| 892 |
+
# a PerformanceWarning and _then_ raise a TypeError.
|
| 893 |
+
msg = r"Input cannot be converted to Period\(freq=Q-DEC\)"
|
| 894 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 895 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 896 |
+
pi + unanchored
|
| 897 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 898 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 899 |
+
unanchored + pi
|
| 900 |
+
|
| 901 |
+
@pytest.mark.parametrize("box", [np.array, pd.Index])
|
| 902 |
+
def test_pi_sub_offset_array(self, box):
|
| 903 |
+
# GH#18824
|
| 904 |
+
pi = PeriodIndex([Period("2015Q1"), Period("2016Q2")])
|
| 905 |
+
other = box(
|
| 906 |
+
[
|
| 907 |
+
pd.offsets.QuarterEnd(n=1, startingMonth=12),
|
| 908 |
+
pd.offsets.QuarterEnd(n=-2, startingMonth=12),
|
| 909 |
+
]
|
| 910 |
+
)
|
| 911 |
+
|
| 912 |
+
expected = PeriodIndex([pi[n] - other[n] for n in range(len(pi))])
|
| 913 |
+
expected = expected.astype(object)
|
| 914 |
+
|
| 915 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 916 |
+
res = pi - other
|
| 917 |
+
tm.assert_index_equal(res, expected)
|
| 918 |
+
|
| 919 |
+
anchored = box([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)])
|
| 920 |
+
|
| 921 |
+
# addition/subtraction ops with anchored offsets should issue
|
| 922 |
+
# a PerformanceWarning and _then_ raise a TypeError.
|
| 923 |
+
msg = r"Input has different freq=-1M from Period\(freq=Q-DEC\)"
|
| 924 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 925 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 926 |
+
pi - anchored
|
| 927 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 928 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 929 |
+
anchored - pi
|
| 930 |
+
|
| 931 |
+
def test_pi_add_iadd_int(self, one):
|
| 932 |
+
# Variants of `one` for #19012
|
| 933 |
+
rng = period_range("2000-01-01 09:00", freq="h", periods=10)
|
| 934 |
+
result = rng + one
|
| 935 |
+
expected = period_range("2000-01-01 10:00", freq="h", periods=10)
|
| 936 |
+
tm.assert_index_equal(result, expected)
|
| 937 |
+
rng += one
|
| 938 |
+
tm.assert_index_equal(rng, expected)
|
| 939 |
+
|
| 940 |
+
def test_pi_sub_isub_int(self, one):
|
| 941 |
+
"""
|
| 942 |
+
PeriodIndex.__sub__ and __isub__ with several representations of
|
| 943 |
+
the integer 1, e.g. int, np.int64, np.uint8, ...
|
| 944 |
+
"""
|
| 945 |
+
rng = period_range("2000-01-01 09:00", freq="h", periods=10)
|
| 946 |
+
result = rng - one
|
| 947 |
+
expected = period_range("2000-01-01 08:00", freq="h", periods=10)
|
| 948 |
+
tm.assert_index_equal(result, expected)
|
| 949 |
+
rng -= one
|
| 950 |
+
tm.assert_index_equal(rng, expected)
|
| 951 |
+
|
| 952 |
+
@pytest.mark.parametrize("five", [5, np.array(5, dtype=np.int64)])
|
| 953 |
+
def test_pi_sub_intlike(self, five):
|
| 954 |
+
rng = period_range("2007-01", periods=50)
|
| 955 |
+
|
| 956 |
+
result = rng - five
|
| 957 |
+
exp = rng + (-five)
|
| 958 |
+
tm.assert_index_equal(result, exp)
|
| 959 |
+
|
| 960 |
+
def test_pi_add_sub_int_array_freqn_gt1(self):
|
| 961 |
+
# GH#47209 test adding array of ints when freq.n > 1 matches
|
| 962 |
+
# scalar behavior
|
| 963 |
+
pi = period_range("2016-01-01", periods=10, freq="2D")
|
| 964 |
+
arr = np.arange(10)
|
| 965 |
+
result = pi + arr
|
| 966 |
+
expected = pd.Index([x + y for x, y in zip(pi, arr)])
|
| 967 |
+
tm.assert_index_equal(result, expected)
|
| 968 |
+
|
| 969 |
+
result = pi - arr
|
| 970 |
+
expected = pd.Index([x - y for x, y in zip(pi, arr)])
|
| 971 |
+
tm.assert_index_equal(result, expected)
|
| 972 |
+
|
| 973 |
+
def test_pi_sub_isub_offset(self):
|
| 974 |
+
# offset
|
| 975 |
+
# DateOffset
|
| 976 |
+
rng = period_range("2014", "2024", freq="Y")
|
| 977 |
+
result = rng - pd.offsets.YearEnd(5)
|
| 978 |
+
expected = period_range("2009", "2019", freq="Y")
|
| 979 |
+
tm.assert_index_equal(result, expected)
|
| 980 |
+
rng -= pd.offsets.YearEnd(5)
|
| 981 |
+
tm.assert_index_equal(rng, expected)
|
| 982 |
+
|
| 983 |
+
rng = period_range("2014-01", "2016-12", freq="M")
|
| 984 |
+
result = rng - pd.offsets.MonthEnd(5)
|
| 985 |
+
expected = period_range("2013-08", "2016-07", freq="M")
|
| 986 |
+
tm.assert_index_equal(result, expected)
|
| 987 |
+
|
| 988 |
+
rng -= pd.offsets.MonthEnd(5)
|
| 989 |
+
tm.assert_index_equal(rng, expected)
|
| 990 |
+
|
| 991 |
+
@pytest.mark.parametrize("transpose", [True, False])
|
| 992 |
+
def test_pi_add_offset_n_gt1(self, box_with_array, transpose):
|
| 993 |
+
# GH#23215
|
| 994 |
+
# add offset to PeriodIndex with freq.n > 1
|
| 995 |
+
|
| 996 |
+
per = Period("2016-01", freq="2M")
|
| 997 |
+
pi = PeriodIndex([per])
|
| 998 |
+
|
| 999 |
+
expected = PeriodIndex(["2016-03"], freq="2M")
|
| 1000 |
+
|
| 1001 |
+
pi = tm.box_expected(pi, box_with_array, transpose=transpose)
|
| 1002 |
+
expected = tm.box_expected(expected, box_with_array, transpose=transpose)
|
| 1003 |
+
|
| 1004 |
+
result = pi + per.freq
|
| 1005 |
+
tm.assert_equal(result, expected)
|
| 1006 |
+
|
| 1007 |
+
result = per.freq + pi
|
| 1008 |
+
tm.assert_equal(result, expected)
|
| 1009 |
+
|
| 1010 |
+
def test_pi_add_offset_n_gt1_not_divisible(self, box_with_array):
|
| 1011 |
+
# GH#23215
|
| 1012 |
+
# PeriodIndex with freq.n > 1 add offset with offset.n % freq.n != 0
|
| 1013 |
+
pi = PeriodIndex(["2016-01"], freq="2M")
|
| 1014 |
+
expected = PeriodIndex(["2016-04"], freq="2M")
|
| 1015 |
+
|
| 1016 |
+
pi = tm.box_expected(pi, box_with_array)
|
| 1017 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1018 |
+
|
| 1019 |
+
result = pi + to_offset("3ME")
|
| 1020 |
+
tm.assert_equal(result, expected)
|
| 1021 |
+
|
| 1022 |
+
result = to_offset("3ME") + pi
|
| 1023 |
+
tm.assert_equal(result, expected)
|
| 1024 |
+
|
| 1025 |
+
# ---------------------------------------------------------------
|
| 1026 |
+
# __add__/__sub__ with integer arrays
|
| 1027 |
+
|
| 1028 |
+
@pytest.mark.parametrize("int_holder", [np.array, pd.Index])
|
| 1029 |
+
@pytest.mark.parametrize("op", [operator.add, ops.radd])
|
| 1030 |
+
def test_pi_add_intarray(self, int_holder, op):
|
| 1031 |
+
# GH#19959
|
| 1032 |
+
pi = PeriodIndex([Period("2015Q1"), Period("NaT")])
|
| 1033 |
+
other = int_holder([4, -1])
|
| 1034 |
+
|
| 1035 |
+
result = op(pi, other)
|
| 1036 |
+
expected = PeriodIndex([Period("2016Q1"), Period("NaT")])
|
| 1037 |
+
tm.assert_index_equal(result, expected)
|
| 1038 |
+
|
| 1039 |
+
@pytest.mark.parametrize("int_holder", [np.array, pd.Index])
|
| 1040 |
+
def test_pi_sub_intarray(self, int_holder):
|
| 1041 |
+
# GH#19959
|
| 1042 |
+
pi = PeriodIndex([Period("2015Q1"), Period("NaT")])
|
| 1043 |
+
other = int_holder([4, -1])
|
| 1044 |
+
|
| 1045 |
+
result = pi - other
|
| 1046 |
+
expected = PeriodIndex([Period("2014Q1"), Period("NaT")])
|
| 1047 |
+
tm.assert_index_equal(result, expected)
|
| 1048 |
+
|
| 1049 |
+
msg = r"bad operand type for unary -: 'PeriodArray'"
|
| 1050 |
+
with pytest.raises(TypeError, match=msg):
|
| 1051 |
+
other - pi
|
| 1052 |
+
|
| 1053 |
+
# ---------------------------------------------------------------
|
| 1054 |
+
# Timedelta-like (timedelta, timedelta64, Timedelta, Tick)
|
| 1055 |
+
# TODO: Some of these are misnomers because of non-Tick DateOffsets
|
| 1056 |
+
|
| 1057 |
+
def test_parr_add_timedeltalike_minute_gt1(self, three_days, box_with_array):
|
| 1058 |
+
# GH#23031 adding a time-delta-like offset to a PeriodArray that has
|
| 1059 |
+
# minute frequency with n != 1. A more general case is tested below
|
| 1060 |
+
# in test_pi_add_timedeltalike_tick_gt1, but here we write out the
|
| 1061 |
+
# expected result more explicitly.
|
| 1062 |
+
other = three_days
|
| 1063 |
+
rng = period_range("2014-05-01", periods=3, freq="2D")
|
| 1064 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 1065 |
+
|
| 1066 |
+
expected = PeriodIndex(["2014-05-04", "2014-05-06", "2014-05-08"], freq="2D")
|
| 1067 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1068 |
+
|
| 1069 |
+
result = rng + other
|
| 1070 |
+
tm.assert_equal(result, expected)
|
| 1071 |
+
|
| 1072 |
+
result = other + rng
|
| 1073 |
+
tm.assert_equal(result, expected)
|
| 1074 |
+
|
| 1075 |
+
# subtraction
|
| 1076 |
+
expected = PeriodIndex(["2014-04-28", "2014-04-30", "2014-05-02"], freq="2D")
|
| 1077 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1078 |
+
result = rng - other
|
| 1079 |
+
tm.assert_equal(result, expected)
|
| 1080 |
+
|
| 1081 |
+
msg = "|".join(
|
| 1082 |
+
[
|
| 1083 |
+
r"bad operand type for unary -: 'PeriodArray'",
|
| 1084 |
+
r"cannot subtract PeriodArray from timedelta64\[[hD]\]",
|
| 1085 |
+
]
|
| 1086 |
+
)
|
| 1087 |
+
with pytest.raises(TypeError, match=msg):
|
| 1088 |
+
other - rng
|
| 1089 |
+
|
| 1090 |
+
@pytest.mark.parametrize("freqstr", ["5ns", "5us", "5ms", "5s", "5min", "5h", "5d"])
|
| 1091 |
+
def test_parr_add_timedeltalike_tick_gt1(self, three_days, freqstr, box_with_array):
|
| 1092 |
+
# GH#23031 adding a time-delta-like offset to a PeriodArray that has
|
| 1093 |
+
# tick-like frequency with n != 1
|
| 1094 |
+
other = three_days
|
| 1095 |
+
rng = period_range("2014-05-01", periods=6, freq=freqstr)
|
| 1096 |
+
first = rng[0]
|
| 1097 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 1098 |
+
|
| 1099 |
+
expected = period_range(first + other, periods=6, freq=freqstr)
|
| 1100 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1101 |
+
|
| 1102 |
+
result = rng + other
|
| 1103 |
+
tm.assert_equal(result, expected)
|
| 1104 |
+
|
| 1105 |
+
result = other + rng
|
| 1106 |
+
tm.assert_equal(result, expected)
|
| 1107 |
+
|
| 1108 |
+
# subtraction
|
| 1109 |
+
expected = period_range(first - other, periods=6, freq=freqstr)
|
| 1110 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1111 |
+
result = rng - other
|
| 1112 |
+
tm.assert_equal(result, expected)
|
| 1113 |
+
msg = "|".join(
|
| 1114 |
+
[
|
| 1115 |
+
r"bad operand type for unary -: 'PeriodArray'",
|
| 1116 |
+
r"cannot subtract PeriodArray from timedelta64\[[hD]\]",
|
| 1117 |
+
]
|
| 1118 |
+
)
|
| 1119 |
+
with pytest.raises(TypeError, match=msg):
|
| 1120 |
+
other - rng
|
| 1121 |
+
|
| 1122 |
+
def test_pi_add_iadd_timedeltalike_daily(self, three_days):
|
| 1123 |
+
# Tick
|
| 1124 |
+
other = three_days
|
| 1125 |
+
rng = period_range("2014-05-01", "2014-05-15", freq="D")
|
| 1126 |
+
expected = period_range("2014-05-04", "2014-05-18", freq="D")
|
| 1127 |
+
|
| 1128 |
+
result = rng + other
|
| 1129 |
+
tm.assert_index_equal(result, expected)
|
| 1130 |
+
|
| 1131 |
+
rng += other
|
| 1132 |
+
tm.assert_index_equal(rng, expected)
|
| 1133 |
+
|
| 1134 |
+
def test_pi_sub_isub_timedeltalike_daily(self, three_days):
|
| 1135 |
+
# Tick-like 3 Days
|
| 1136 |
+
other = three_days
|
| 1137 |
+
rng = period_range("2014-05-01", "2014-05-15", freq="D")
|
| 1138 |
+
expected = period_range("2014-04-28", "2014-05-12", freq="D")
|
| 1139 |
+
|
| 1140 |
+
result = rng - other
|
| 1141 |
+
tm.assert_index_equal(result, expected)
|
| 1142 |
+
|
| 1143 |
+
rng -= other
|
| 1144 |
+
tm.assert_index_equal(rng, expected)
|
| 1145 |
+
|
| 1146 |
+
def test_parr_add_sub_timedeltalike_freq_mismatch_daily(
|
| 1147 |
+
self, not_daily, box_with_array
|
| 1148 |
+
):
|
| 1149 |
+
other = not_daily
|
| 1150 |
+
rng = period_range("2014-05-01", "2014-05-15", freq="D")
|
| 1151 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 1152 |
+
|
| 1153 |
+
msg = "|".join(
|
| 1154 |
+
[
|
| 1155 |
+
# non-timedelta-like DateOffset
|
| 1156 |
+
"Input has different freq(=.+)? from Period.*?\\(freq=D\\)",
|
| 1157 |
+
# timedelta/td64/Timedelta but not a multiple of 24H
|
| 1158 |
+
"Cannot add/subtract timedelta-like from PeriodArray that is "
|
| 1159 |
+
"not an integer multiple of the PeriodArray's freq.",
|
| 1160 |
+
]
|
| 1161 |
+
)
|
| 1162 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1163 |
+
rng + other
|
| 1164 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1165 |
+
rng += other
|
| 1166 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1167 |
+
rng - other
|
| 1168 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1169 |
+
rng -= other
|
| 1170 |
+
|
| 1171 |
+
def test_pi_add_iadd_timedeltalike_hourly(self, two_hours):
|
| 1172 |
+
other = two_hours
|
| 1173 |
+
rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h")
|
| 1174 |
+
expected = period_range("2014-01-01 12:00", "2014-01-05 12:00", freq="h")
|
| 1175 |
+
|
| 1176 |
+
result = rng + other
|
| 1177 |
+
tm.assert_index_equal(result, expected)
|
| 1178 |
+
|
| 1179 |
+
rng += other
|
| 1180 |
+
tm.assert_index_equal(rng, expected)
|
| 1181 |
+
|
| 1182 |
+
def test_parr_add_timedeltalike_mismatched_freq_hourly(
|
| 1183 |
+
self, not_hourly, box_with_array
|
| 1184 |
+
):
|
| 1185 |
+
other = not_hourly
|
| 1186 |
+
rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h")
|
| 1187 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 1188 |
+
msg = "|".join(
|
| 1189 |
+
[
|
| 1190 |
+
# non-timedelta-like DateOffset
|
| 1191 |
+
"Input has different freq(=.+)? from Period.*?\\(freq=h\\)",
|
| 1192 |
+
# timedelta/td64/Timedelta but not a multiple of 24H
|
| 1193 |
+
"Cannot add/subtract timedelta-like from PeriodArray that is "
|
| 1194 |
+
"not an integer multiple of the PeriodArray's freq.",
|
| 1195 |
+
]
|
| 1196 |
+
)
|
| 1197 |
+
|
| 1198 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1199 |
+
rng + other
|
| 1200 |
+
|
| 1201 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1202 |
+
rng += other
|
| 1203 |
+
|
| 1204 |
+
def test_pi_sub_isub_timedeltalike_hourly(self, two_hours):
|
| 1205 |
+
other = two_hours
|
| 1206 |
+
rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h")
|
| 1207 |
+
expected = period_range("2014-01-01 08:00", "2014-01-05 08:00", freq="h")
|
| 1208 |
+
|
| 1209 |
+
result = rng - other
|
| 1210 |
+
tm.assert_index_equal(result, expected)
|
| 1211 |
+
|
| 1212 |
+
rng -= other
|
| 1213 |
+
tm.assert_index_equal(rng, expected)
|
| 1214 |
+
|
| 1215 |
+
def test_add_iadd_timedeltalike_annual(self):
|
| 1216 |
+
# offset
|
| 1217 |
+
# DateOffset
|
| 1218 |
+
rng = period_range("2014", "2024", freq="Y")
|
| 1219 |
+
result = rng + pd.offsets.YearEnd(5)
|
| 1220 |
+
expected = period_range("2019", "2029", freq="Y")
|
| 1221 |
+
tm.assert_index_equal(result, expected)
|
| 1222 |
+
rng += pd.offsets.YearEnd(5)
|
| 1223 |
+
tm.assert_index_equal(rng, expected)
|
| 1224 |
+
|
| 1225 |
+
def test_pi_add_sub_timedeltalike_freq_mismatch_annual(self, mismatched_freq):
|
| 1226 |
+
other = mismatched_freq
|
| 1227 |
+
rng = period_range("2014", "2024", freq="Y")
|
| 1228 |
+
msg = "Input has different freq(=.+)? from Period.*?\\(freq=Y-DEC\\)"
|
| 1229 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1230 |
+
rng + other
|
| 1231 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1232 |
+
rng += other
|
| 1233 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1234 |
+
rng - other
|
| 1235 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1236 |
+
rng -= other
|
| 1237 |
+
|
| 1238 |
+
def test_pi_add_iadd_timedeltalike_M(self):
|
| 1239 |
+
rng = period_range("2014-01", "2016-12", freq="M")
|
| 1240 |
+
expected = period_range("2014-06", "2017-05", freq="M")
|
| 1241 |
+
|
| 1242 |
+
result = rng + pd.offsets.MonthEnd(5)
|
| 1243 |
+
tm.assert_index_equal(result, expected)
|
| 1244 |
+
|
| 1245 |
+
rng += pd.offsets.MonthEnd(5)
|
| 1246 |
+
tm.assert_index_equal(rng, expected)
|
| 1247 |
+
|
| 1248 |
+
def test_pi_add_sub_timedeltalike_freq_mismatch_monthly(self, mismatched_freq):
|
| 1249 |
+
other = mismatched_freq
|
| 1250 |
+
rng = period_range("2014-01", "2016-12", freq="M")
|
| 1251 |
+
msg = "Input has different freq(=.+)? from Period.*?\\(freq=M\\)"
|
| 1252 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1253 |
+
rng + other
|
| 1254 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1255 |
+
rng += other
|
| 1256 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1257 |
+
rng - other
|
| 1258 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1259 |
+
rng -= other
|
| 1260 |
+
|
| 1261 |
+
@pytest.mark.parametrize("transpose", [True, False])
|
| 1262 |
+
def test_parr_add_sub_td64_nat(self, box_with_array, transpose):
|
| 1263 |
+
# GH#23320 special handling for timedelta64("NaT")
|
| 1264 |
+
pi = period_range("1994-04-01", periods=9, freq="19D")
|
| 1265 |
+
other = np.timedelta64("NaT")
|
| 1266 |
+
expected = PeriodIndex(["NaT"] * 9, freq="19D")
|
| 1267 |
+
|
| 1268 |
+
obj = tm.box_expected(pi, box_with_array, transpose=transpose)
|
| 1269 |
+
expected = tm.box_expected(expected, box_with_array, transpose=transpose)
|
| 1270 |
+
|
| 1271 |
+
result = obj + other
|
| 1272 |
+
tm.assert_equal(result, expected)
|
| 1273 |
+
result = other + obj
|
| 1274 |
+
tm.assert_equal(result, expected)
|
| 1275 |
+
result = obj - other
|
| 1276 |
+
tm.assert_equal(result, expected)
|
| 1277 |
+
msg = r"cannot subtract .* from .*"
|
| 1278 |
+
with pytest.raises(TypeError, match=msg):
|
| 1279 |
+
other - obj
|
| 1280 |
+
|
| 1281 |
+
@pytest.mark.parametrize(
|
| 1282 |
+
"other",
|
| 1283 |
+
[
|
| 1284 |
+
np.array(["NaT"] * 9, dtype="m8[ns]"),
|
| 1285 |
+
TimedeltaArray._from_sequence(["NaT"] * 9, dtype="m8[ns]"),
|
| 1286 |
+
],
|
| 1287 |
+
)
|
| 1288 |
+
def test_parr_add_sub_tdt64_nat_array(self, box_with_array, other):
|
| 1289 |
+
pi = period_range("1994-04-01", periods=9, freq="19D")
|
| 1290 |
+
expected = PeriodIndex(["NaT"] * 9, freq="19D")
|
| 1291 |
+
|
| 1292 |
+
obj = tm.box_expected(pi, box_with_array)
|
| 1293 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1294 |
+
|
| 1295 |
+
result = obj + other
|
| 1296 |
+
tm.assert_equal(result, expected)
|
| 1297 |
+
result = other + obj
|
| 1298 |
+
tm.assert_equal(result, expected)
|
| 1299 |
+
result = obj - other
|
| 1300 |
+
tm.assert_equal(result, expected)
|
| 1301 |
+
msg = r"cannot subtract .* from .*"
|
| 1302 |
+
with pytest.raises(TypeError, match=msg):
|
| 1303 |
+
other - obj
|
| 1304 |
+
|
| 1305 |
+
# some but not *all* NaT
|
| 1306 |
+
other = other.copy()
|
| 1307 |
+
other[0] = np.timedelta64(0, "ns")
|
| 1308 |
+
expected = PeriodIndex([pi[0]] + ["NaT"] * 8, freq="19D")
|
| 1309 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1310 |
+
|
| 1311 |
+
result = obj + other
|
| 1312 |
+
tm.assert_equal(result, expected)
|
| 1313 |
+
result = other + obj
|
| 1314 |
+
tm.assert_equal(result, expected)
|
| 1315 |
+
result = obj - other
|
| 1316 |
+
tm.assert_equal(result, expected)
|
| 1317 |
+
with pytest.raises(TypeError, match=msg):
|
| 1318 |
+
other - obj
|
| 1319 |
+
|
| 1320 |
+
# ---------------------------------------------------------------
|
| 1321 |
+
# Unsorted
|
| 1322 |
+
|
| 1323 |
+
def test_parr_add_sub_index(self):
|
| 1324 |
+
# Check that PeriodArray defers to Index on arithmetic ops
|
| 1325 |
+
pi = period_range("2000-12-31", periods=3)
|
| 1326 |
+
parr = pi.array
|
| 1327 |
+
|
| 1328 |
+
result = parr - pi
|
| 1329 |
+
expected = pi - pi
|
| 1330 |
+
tm.assert_index_equal(result, expected)
|
| 1331 |
+
|
| 1332 |
+
def test_parr_add_sub_object_array(self):
|
| 1333 |
+
pi = period_range("2000-12-31", periods=3, freq="D")
|
| 1334 |
+
parr = pi.array
|
| 1335 |
+
|
| 1336 |
+
other = np.array([Timedelta(days=1), pd.offsets.Day(2), 3])
|
| 1337 |
+
|
| 1338 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1339 |
+
result = parr + other
|
| 1340 |
+
|
| 1341 |
+
expected = PeriodIndex(
|
| 1342 |
+
["2001-01-01", "2001-01-03", "2001-01-05"], freq="D"
|
| 1343 |
+
)._data.astype(object)
|
| 1344 |
+
tm.assert_equal(result, expected)
|
| 1345 |
+
|
| 1346 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1347 |
+
result = parr - other
|
| 1348 |
+
|
| 1349 |
+
expected = PeriodIndex(["2000-12-30"] * 3, freq="D")._data.astype(object)
|
| 1350 |
+
tm.assert_equal(result, expected)
|
| 1351 |
+
|
| 1352 |
+
def test_period_add_timestamp_raises(self, box_with_array):
|
| 1353 |
+
# GH#17983
|
| 1354 |
+
ts = Timestamp("2017")
|
| 1355 |
+
per = Period("2017", freq="M")
|
| 1356 |
+
|
| 1357 |
+
arr = pd.Index([per], dtype="Period[M]")
|
| 1358 |
+
arr = tm.box_expected(arr, box_with_array)
|
| 1359 |
+
|
| 1360 |
+
msg = "cannot add PeriodArray and Timestamp"
|
| 1361 |
+
with pytest.raises(TypeError, match=msg):
|
| 1362 |
+
arr + ts
|
| 1363 |
+
with pytest.raises(TypeError, match=msg):
|
| 1364 |
+
ts + arr
|
| 1365 |
+
msg = "cannot add PeriodArray and DatetimeArray"
|
| 1366 |
+
with pytest.raises(TypeError, match=msg):
|
| 1367 |
+
arr + Series([ts])
|
| 1368 |
+
with pytest.raises(TypeError, match=msg):
|
| 1369 |
+
Series([ts]) + arr
|
| 1370 |
+
with pytest.raises(TypeError, match=msg):
|
| 1371 |
+
arr + pd.Index([ts])
|
| 1372 |
+
with pytest.raises(TypeError, match=msg):
|
| 1373 |
+
pd.Index([ts]) + arr
|
| 1374 |
+
|
| 1375 |
+
if box_with_array is pd.DataFrame:
|
| 1376 |
+
msg = "cannot add PeriodArray and DatetimeArray"
|
| 1377 |
+
else:
|
| 1378 |
+
msg = r"unsupported operand type\(s\) for \+: 'Period' and 'DatetimeArray"
|
| 1379 |
+
with pytest.raises(TypeError, match=msg):
|
| 1380 |
+
arr + pd.DataFrame([ts])
|
| 1381 |
+
if box_with_array is pd.DataFrame:
|
| 1382 |
+
msg = "cannot add PeriodArray and DatetimeArray"
|
| 1383 |
+
else:
|
| 1384 |
+
msg = r"unsupported operand type\(s\) for \+: 'DatetimeArray' and 'Period'"
|
| 1385 |
+
with pytest.raises(TypeError, match=msg):
|
| 1386 |
+
pd.DataFrame([ts]) + arr
|
| 1387 |
+
|
| 1388 |
+
|
| 1389 |
+
class TestPeriodSeriesArithmetic:
|
| 1390 |
+
def test_parr_add_timedeltalike_scalar(self, three_days, box_with_array):
|
| 1391 |
+
# GH#13043
|
| 1392 |
+
ser = Series(
|
| 1393 |
+
[Period("2015-01-01", freq="D"), Period("2015-01-02", freq="D")],
|
| 1394 |
+
name="xxx",
|
| 1395 |
+
)
|
| 1396 |
+
assert ser.dtype == "Period[D]"
|
| 1397 |
+
|
| 1398 |
+
expected = Series(
|
| 1399 |
+
[Period("2015-01-04", freq="D"), Period("2015-01-05", freq="D")],
|
| 1400 |
+
name="xxx",
|
| 1401 |
+
)
|
| 1402 |
+
|
| 1403 |
+
obj = tm.box_expected(ser, box_with_array)
|
| 1404 |
+
if box_with_array is pd.DataFrame:
|
| 1405 |
+
assert (obj.dtypes == "Period[D]").all()
|
| 1406 |
+
|
| 1407 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1408 |
+
|
| 1409 |
+
result = obj + three_days
|
| 1410 |
+
tm.assert_equal(result, expected)
|
| 1411 |
+
|
| 1412 |
+
result = three_days + obj
|
| 1413 |
+
tm.assert_equal(result, expected)
|
| 1414 |
+
|
| 1415 |
+
def test_ops_series_period(self):
|
| 1416 |
+
# GH#13043
|
| 1417 |
+
ser = Series(
|
| 1418 |
+
[Period("2015-01-01", freq="D"), Period("2015-01-02", freq="D")],
|
| 1419 |
+
name="xxx",
|
| 1420 |
+
)
|
| 1421 |
+
assert ser.dtype == "Period[D]"
|
| 1422 |
+
|
| 1423 |
+
per = Period("2015-01-10", freq="D")
|
| 1424 |
+
off = per.freq
|
| 1425 |
+
# dtype will be object because of original dtype
|
| 1426 |
+
expected = Series([9 * off, 8 * off], name="xxx", dtype=object)
|
| 1427 |
+
tm.assert_series_equal(per - ser, expected)
|
| 1428 |
+
tm.assert_series_equal(ser - per, -1 * expected)
|
| 1429 |
+
|
| 1430 |
+
s2 = Series(
|
| 1431 |
+
[Period("2015-01-05", freq="D"), Period("2015-01-04", freq="D")],
|
| 1432 |
+
name="xxx",
|
| 1433 |
+
)
|
| 1434 |
+
assert s2.dtype == "Period[D]"
|
| 1435 |
+
|
| 1436 |
+
expected = Series([4 * off, 2 * off], name="xxx", dtype=object)
|
| 1437 |
+
tm.assert_series_equal(s2 - ser, expected)
|
| 1438 |
+
tm.assert_series_equal(ser - s2, -1 * expected)
|
| 1439 |
+
|
| 1440 |
+
|
| 1441 |
+
class TestPeriodIndexSeriesMethods:
|
| 1442 |
+
"""Test PeriodIndex and Period Series Ops consistency"""
|
| 1443 |
+
|
| 1444 |
+
def _check(self, values, func, expected):
|
| 1445 |
+
idx = PeriodIndex(values)
|
| 1446 |
+
result = func(idx)
|
| 1447 |
+
tm.assert_equal(result, expected)
|
| 1448 |
+
|
| 1449 |
+
ser = Series(values)
|
| 1450 |
+
result = func(ser)
|
| 1451 |
+
|
| 1452 |
+
exp = Series(expected, name=values.name)
|
| 1453 |
+
tm.assert_series_equal(result, exp)
|
| 1454 |
+
|
| 1455 |
+
def test_pi_ops(self):
|
| 1456 |
+
idx = PeriodIndex(
|
| 1457 |
+
["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
|
| 1458 |
+
)
|
| 1459 |
+
|
| 1460 |
+
expected = PeriodIndex(
|
| 1461 |
+
["2011-03", "2011-04", "2011-05", "2011-06"], freq="M", name="idx"
|
| 1462 |
+
)
|
| 1463 |
+
|
| 1464 |
+
self._check(idx, lambda x: x + 2, expected)
|
| 1465 |
+
self._check(idx, lambda x: 2 + x, expected)
|
| 1466 |
+
|
| 1467 |
+
self._check(idx + 2, lambda x: x - 2, idx)
|
| 1468 |
+
|
| 1469 |
+
result = idx - Period("2011-01", freq="M")
|
| 1470 |
+
off = idx.freq
|
| 1471 |
+
exp = pd.Index([0 * off, 1 * off, 2 * off, 3 * off], name="idx")
|
| 1472 |
+
tm.assert_index_equal(result, exp)
|
| 1473 |
+
|
| 1474 |
+
result = Period("2011-01", freq="M") - idx
|
| 1475 |
+
exp = pd.Index([0 * off, -1 * off, -2 * off, -3 * off], name="idx")
|
| 1476 |
+
tm.assert_index_equal(result, exp)
|
| 1477 |
+
|
| 1478 |
+
@pytest.mark.parametrize("ng", ["str", 1.5])
|
| 1479 |
+
@pytest.mark.parametrize(
|
| 1480 |
+
"func",
|
| 1481 |
+
[
|
| 1482 |
+
lambda obj, ng: obj + ng,
|
| 1483 |
+
lambda obj, ng: ng + obj,
|
| 1484 |
+
lambda obj, ng: obj - ng,
|
| 1485 |
+
lambda obj, ng: ng - obj,
|
| 1486 |
+
lambda obj, ng: np.add(obj, ng),
|
| 1487 |
+
lambda obj, ng: np.add(ng, obj),
|
| 1488 |
+
lambda obj, ng: np.subtract(obj, ng),
|
| 1489 |
+
lambda obj, ng: np.subtract(ng, obj),
|
| 1490 |
+
],
|
| 1491 |
+
)
|
| 1492 |
+
def test_parr_ops_errors(self, ng, func, box_with_array):
|
| 1493 |
+
idx = PeriodIndex(
|
| 1494 |
+
["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
|
| 1495 |
+
)
|
| 1496 |
+
obj = tm.box_expected(idx, box_with_array)
|
| 1497 |
+
msg = "|".join(
|
| 1498 |
+
[
|
| 1499 |
+
r"unsupported operand type\(s\)",
|
| 1500 |
+
"can only concatenate",
|
| 1501 |
+
r"must be str",
|
| 1502 |
+
"object to str implicitly",
|
| 1503 |
+
]
|
| 1504 |
+
)
|
| 1505 |
+
|
| 1506 |
+
with pytest.raises(TypeError, match=msg):
|
| 1507 |
+
func(obj, ng)
|
| 1508 |
+
|
| 1509 |
+
def test_pi_ops_nat(self):
|
| 1510 |
+
idx = PeriodIndex(
|
| 1511 |
+
["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
|
| 1512 |
+
)
|
| 1513 |
+
expected = PeriodIndex(
|
| 1514 |
+
["2011-03", "2011-04", "NaT", "2011-06"], freq="M", name="idx"
|
| 1515 |
+
)
|
| 1516 |
+
|
| 1517 |
+
self._check(idx, lambda x: x + 2, expected)
|
| 1518 |
+
self._check(idx, lambda x: 2 + x, expected)
|
| 1519 |
+
self._check(idx, lambda x: np.add(x, 2), expected)
|
| 1520 |
+
|
| 1521 |
+
self._check(idx + 2, lambda x: x - 2, idx)
|
| 1522 |
+
self._check(idx + 2, lambda x: np.subtract(x, 2), idx)
|
| 1523 |
+
|
| 1524 |
+
# freq with mult
|
| 1525 |
+
idx = PeriodIndex(
|
| 1526 |
+
["2011-01", "2011-02", "NaT", "2011-04"], freq="2M", name="idx"
|
| 1527 |
+
)
|
| 1528 |
+
expected = PeriodIndex(
|
| 1529 |
+
["2011-07", "2011-08", "NaT", "2011-10"], freq="2M", name="idx"
|
| 1530 |
+
)
|
| 1531 |
+
|
| 1532 |
+
self._check(idx, lambda x: x + 3, expected)
|
| 1533 |
+
self._check(idx, lambda x: 3 + x, expected)
|
| 1534 |
+
self._check(idx, lambda x: np.add(x, 3), expected)
|
| 1535 |
+
|
| 1536 |
+
self._check(idx + 3, lambda x: x - 3, idx)
|
| 1537 |
+
self._check(idx + 3, lambda x: np.subtract(x, 3), idx)
|
| 1538 |
+
|
| 1539 |
+
def test_pi_ops_array_int(self):
|
| 1540 |
+
idx = PeriodIndex(
|
| 1541 |
+
["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
|
| 1542 |
+
)
|
| 1543 |
+
f = lambda x: x + np.array([1, 2, 3, 4])
|
| 1544 |
+
exp = PeriodIndex(
|
| 1545 |
+
["2011-02", "2011-04", "NaT", "2011-08"], freq="M", name="idx"
|
| 1546 |
+
)
|
| 1547 |
+
self._check(idx, f, exp)
|
| 1548 |
+
|
| 1549 |
+
f = lambda x: np.add(x, np.array([4, -1, 1, 2]))
|
| 1550 |
+
exp = PeriodIndex(
|
| 1551 |
+
["2011-05", "2011-01", "NaT", "2011-06"], freq="M", name="idx"
|
| 1552 |
+
)
|
| 1553 |
+
self._check(idx, f, exp)
|
| 1554 |
+
|
| 1555 |
+
f = lambda x: x - np.array([1, 2, 3, 4])
|
| 1556 |
+
exp = PeriodIndex(
|
| 1557 |
+
["2010-12", "2010-12", "NaT", "2010-12"], freq="M", name="idx"
|
| 1558 |
+
)
|
| 1559 |
+
self._check(idx, f, exp)
|
| 1560 |
+
|
| 1561 |
+
f = lambda x: np.subtract(x, np.array([3, 2, 3, -2]))
|
| 1562 |
+
exp = PeriodIndex(
|
| 1563 |
+
["2010-10", "2010-12", "NaT", "2011-06"], freq="M", name="idx"
|
| 1564 |
+
)
|
| 1565 |
+
self._check(idx, f, exp)
|
| 1566 |
+
|
| 1567 |
+
def test_pi_ops_offset(self):
|
| 1568 |
+
idx = PeriodIndex(
|
| 1569 |
+
["2011-01-01", "2011-02-01", "2011-03-01", "2011-04-01"],
|
| 1570 |
+
freq="D",
|
| 1571 |
+
name="idx",
|
| 1572 |
+
)
|
| 1573 |
+
f = lambda x: x + pd.offsets.Day()
|
| 1574 |
+
exp = PeriodIndex(
|
| 1575 |
+
["2011-01-02", "2011-02-02", "2011-03-02", "2011-04-02"],
|
| 1576 |
+
freq="D",
|
| 1577 |
+
name="idx",
|
| 1578 |
+
)
|
| 1579 |
+
self._check(idx, f, exp)
|
| 1580 |
+
|
| 1581 |
+
f = lambda x: x + pd.offsets.Day(2)
|
| 1582 |
+
exp = PeriodIndex(
|
| 1583 |
+
["2011-01-03", "2011-02-03", "2011-03-03", "2011-04-03"],
|
| 1584 |
+
freq="D",
|
| 1585 |
+
name="idx",
|
| 1586 |
+
)
|
| 1587 |
+
self._check(idx, f, exp)
|
| 1588 |
+
|
| 1589 |
+
f = lambda x: x - pd.offsets.Day(2)
|
| 1590 |
+
exp = PeriodIndex(
|
| 1591 |
+
["2010-12-30", "2011-01-30", "2011-02-27", "2011-03-30"],
|
| 1592 |
+
freq="D",
|
| 1593 |
+
name="idx",
|
| 1594 |
+
)
|
| 1595 |
+
self._check(idx, f, exp)
|
| 1596 |
+
|
| 1597 |
+
def test_pi_offset_errors(self):
|
| 1598 |
+
idx = PeriodIndex(
|
| 1599 |
+
["2011-01-01", "2011-02-01", "2011-03-01", "2011-04-01"],
|
| 1600 |
+
freq="D",
|
| 1601 |
+
name="idx",
|
| 1602 |
+
)
|
| 1603 |
+
ser = Series(idx)
|
| 1604 |
+
|
| 1605 |
+
msg = (
|
| 1606 |
+
"Cannot add/subtract timedelta-like from PeriodArray that is not "
|
| 1607 |
+
"an integer multiple of the PeriodArray's freq"
|
| 1608 |
+
)
|
| 1609 |
+
for obj in [idx, ser]:
|
| 1610 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1611 |
+
obj + pd.offsets.Hour(2)
|
| 1612 |
+
|
| 1613 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1614 |
+
pd.offsets.Hour(2) + obj
|
| 1615 |
+
|
| 1616 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
| 1617 |
+
obj - pd.offsets.Hour(2)
|
| 1618 |
+
|
| 1619 |
+
def test_pi_sub_period(self):
|
| 1620 |
+
# GH#13071
|
| 1621 |
+
idx = PeriodIndex(
|
| 1622 |
+
["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
|
| 1623 |
+
)
|
| 1624 |
+
|
| 1625 |
+
result = idx - Period("2012-01", freq="M")
|
| 1626 |
+
off = idx.freq
|
| 1627 |
+
exp = pd.Index([-12 * off, -11 * off, -10 * off, -9 * off], name="idx")
|
| 1628 |
+
tm.assert_index_equal(result, exp)
|
| 1629 |
+
|
| 1630 |
+
result = np.subtract(idx, Period("2012-01", freq="M"))
|
| 1631 |
+
tm.assert_index_equal(result, exp)
|
| 1632 |
+
|
| 1633 |
+
result = Period("2012-01", freq="M") - idx
|
| 1634 |
+
exp = pd.Index([12 * off, 11 * off, 10 * off, 9 * off], name="idx")
|
| 1635 |
+
tm.assert_index_equal(result, exp)
|
| 1636 |
+
|
| 1637 |
+
result = np.subtract(Period("2012-01", freq="M"), idx)
|
| 1638 |
+
tm.assert_index_equal(result, exp)
|
| 1639 |
+
|
| 1640 |
+
exp = TimedeltaIndex([np.nan, np.nan, np.nan, np.nan], name="idx")
|
| 1641 |
+
result = idx - Period("NaT", freq="M")
|
| 1642 |
+
tm.assert_index_equal(result, exp)
|
| 1643 |
+
assert result.freq == exp.freq
|
| 1644 |
+
|
| 1645 |
+
result = Period("NaT", freq="M") - idx
|
| 1646 |
+
tm.assert_index_equal(result, exp)
|
| 1647 |
+
assert result.freq == exp.freq
|
| 1648 |
+
|
| 1649 |
+
def test_pi_sub_pdnat(self):
|
| 1650 |
+
# GH#13071, GH#19389
|
| 1651 |
+
idx = PeriodIndex(
|
| 1652 |
+
["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
|
| 1653 |
+
)
|
| 1654 |
+
exp = TimedeltaIndex([pd.NaT] * 4, name="idx")
|
| 1655 |
+
tm.assert_index_equal(pd.NaT - idx, exp)
|
| 1656 |
+
tm.assert_index_equal(idx - pd.NaT, exp)
|
| 1657 |
+
|
| 1658 |
+
def test_pi_sub_period_nat(self):
|
| 1659 |
+
# GH#13071
|
| 1660 |
+
idx = PeriodIndex(
|
| 1661 |
+
["2011-01", "NaT", "2011-03", "2011-04"], freq="M", name="idx"
|
| 1662 |
+
)
|
| 1663 |
+
|
| 1664 |
+
result = idx - Period("2012-01", freq="M")
|
| 1665 |
+
off = idx.freq
|
| 1666 |
+
exp = pd.Index([-12 * off, pd.NaT, -10 * off, -9 * off], name="idx")
|
| 1667 |
+
tm.assert_index_equal(result, exp)
|
| 1668 |
+
|
| 1669 |
+
result = Period("2012-01", freq="M") - idx
|
| 1670 |
+
exp = pd.Index([12 * off, pd.NaT, 10 * off, 9 * off], name="idx")
|
| 1671 |
+
tm.assert_index_equal(result, exp)
|
| 1672 |
+
|
| 1673 |
+
exp = TimedeltaIndex([np.nan, np.nan, np.nan, np.nan], name="idx")
|
| 1674 |
+
tm.assert_index_equal(idx - Period("NaT", freq="M"), exp)
|
| 1675 |
+
tm.assert_index_equal(Period("NaT", freq="M") - idx, exp)
|
omnilmm/lib/python3.10/site-packages/pandas/tests/arithmetic/test_timedelta64.py
ADDED
|
@@ -0,0 +1,2179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Arithmetic tests for DataFrame/Series/Index/Array classes that should
|
| 2 |
+
# behave identically.
|
| 3 |
+
from datetime import (
|
| 4 |
+
datetime,
|
| 5 |
+
timedelta,
|
| 6 |
+
)
|
| 7 |
+
|
| 8 |
+
import numpy as np
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from pandas.errors import (
|
| 12 |
+
OutOfBoundsDatetime,
|
| 13 |
+
PerformanceWarning,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
import pandas as pd
|
| 17 |
+
from pandas import (
|
| 18 |
+
DataFrame,
|
| 19 |
+
DatetimeIndex,
|
| 20 |
+
Index,
|
| 21 |
+
NaT,
|
| 22 |
+
Series,
|
| 23 |
+
Timedelta,
|
| 24 |
+
TimedeltaIndex,
|
| 25 |
+
Timestamp,
|
| 26 |
+
offsets,
|
| 27 |
+
timedelta_range,
|
| 28 |
+
)
|
| 29 |
+
import pandas._testing as tm
|
| 30 |
+
from pandas.core.arrays import NumpyExtensionArray
|
| 31 |
+
from pandas.tests.arithmetic.common import (
|
| 32 |
+
assert_invalid_addsub_type,
|
| 33 |
+
assert_invalid_comparison,
|
| 34 |
+
get_upcast_box,
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def assert_dtype(obj, expected_dtype):
|
| 39 |
+
"""
|
| 40 |
+
Helper to check the dtype for a Series, Index, or single-column DataFrame.
|
| 41 |
+
"""
|
| 42 |
+
dtype = tm.get_dtype(obj)
|
| 43 |
+
|
| 44 |
+
assert dtype == expected_dtype
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def get_expected_name(box, names):
|
| 48 |
+
if box is DataFrame:
|
| 49 |
+
# Since we are operating with a DataFrame and a non-DataFrame,
|
| 50 |
+
# the non-DataFrame is cast to Series and its name ignored.
|
| 51 |
+
exname = names[0]
|
| 52 |
+
elif box in [tm.to_array, pd.array]:
|
| 53 |
+
exname = names[1]
|
| 54 |
+
else:
|
| 55 |
+
exname = names[2]
|
| 56 |
+
return exname
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
# ------------------------------------------------------------------
|
| 60 |
+
# Timedelta64[ns] dtype Comparisons
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class TestTimedelta64ArrayLikeComparisons:
|
| 64 |
+
# Comparison tests for timedelta64[ns] vectors fully parametrized over
|
| 65 |
+
# DataFrame/Series/TimedeltaIndex/TimedeltaArray. Ideally all comparison
|
| 66 |
+
# tests will eventually end up here.
|
| 67 |
+
|
| 68 |
+
def test_compare_timedelta64_zerodim(self, box_with_array):
|
| 69 |
+
# GH#26689 should unbox when comparing with zerodim array
|
| 70 |
+
box = box_with_array
|
| 71 |
+
xbox = box_with_array if box_with_array not in [Index, pd.array] else np.ndarray
|
| 72 |
+
|
| 73 |
+
tdi = timedelta_range("2h", periods=4)
|
| 74 |
+
other = np.array(tdi.to_numpy()[0])
|
| 75 |
+
|
| 76 |
+
tdi = tm.box_expected(tdi, box)
|
| 77 |
+
res = tdi <= other
|
| 78 |
+
expected = np.array([True, False, False, False])
|
| 79 |
+
expected = tm.box_expected(expected, xbox)
|
| 80 |
+
tm.assert_equal(res, expected)
|
| 81 |
+
|
| 82 |
+
@pytest.mark.parametrize(
|
| 83 |
+
"td_scalar",
|
| 84 |
+
[
|
| 85 |
+
timedelta(days=1),
|
| 86 |
+
Timedelta(days=1),
|
| 87 |
+
Timedelta(days=1).to_timedelta64(),
|
| 88 |
+
offsets.Hour(24),
|
| 89 |
+
],
|
| 90 |
+
)
|
| 91 |
+
def test_compare_timedeltalike_scalar(self, box_with_array, td_scalar):
|
| 92 |
+
# regression test for GH#5963
|
| 93 |
+
box = box_with_array
|
| 94 |
+
xbox = box if box not in [Index, pd.array] else np.ndarray
|
| 95 |
+
|
| 96 |
+
ser = Series([timedelta(days=1), timedelta(days=2)])
|
| 97 |
+
ser = tm.box_expected(ser, box)
|
| 98 |
+
actual = ser > td_scalar
|
| 99 |
+
expected = Series([False, True])
|
| 100 |
+
expected = tm.box_expected(expected, xbox)
|
| 101 |
+
tm.assert_equal(actual, expected)
|
| 102 |
+
|
| 103 |
+
@pytest.mark.parametrize(
|
| 104 |
+
"invalid",
|
| 105 |
+
[
|
| 106 |
+
345600000000000,
|
| 107 |
+
"a",
|
| 108 |
+
Timestamp("2021-01-01"),
|
| 109 |
+
Timestamp("2021-01-01").now("UTC"),
|
| 110 |
+
Timestamp("2021-01-01").now().to_datetime64(),
|
| 111 |
+
Timestamp("2021-01-01").now().to_pydatetime(),
|
| 112 |
+
Timestamp("2021-01-01").date(),
|
| 113 |
+
np.array(4), # zero-dim mismatched dtype
|
| 114 |
+
],
|
| 115 |
+
)
|
| 116 |
+
def test_td64_comparisons_invalid(self, box_with_array, invalid):
|
| 117 |
+
# GH#13624 for str
|
| 118 |
+
box = box_with_array
|
| 119 |
+
|
| 120 |
+
rng = timedelta_range("1 days", periods=10)
|
| 121 |
+
obj = tm.box_expected(rng, box)
|
| 122 |
+
|
| 123 |
+
assert_invalid_comparison(obj, invalid, box)
|
| 124 |
+
|
| 125 |
+
@pytest.mark.parametrize(
|
| 126 |
+
"other",
|
| 127 |
+
[
|
| 128 |
+
list(range(10)),
|
| 129 |
+
np.arange(10),
|
| 130 |
+
np.arange(10).astype(np.float32),
|
| 131 |
+
np.arange(10).astype(object),
|
| 132 |
+
pd.date_range("1970-01-01", periods=10, tz="UTC").array,
|
| 133 |
+
np.array(pd.date_range("1970-01-01", periods=10)),
|
| 134 |
+
list(pd.date_range("1970-01-01", periods=10)),
|
| 135 |
+
pd.date_range("1970-01-01", periods=10).astype(object),
|
| 136 |
+
pd.period_range("1971-01-01", freq="D", periods=10).array,
|
| 137 |
+
pd.period_range("1971-01-01", freq="D", periods=10).astype(object),
|
| 138 |
+
],
|
| 139 |
+
)
|
| 140 |
+
def test_td64arr_cmp_arraylike_invalid(self, other, box_with_array):
|
| 141 |
+
# We don't parametrize this over box_with_array because listlike
|
| 142 |
+
# other plays poorly with assert_invalid_comparison reversed checks
|
| 143 |
+
|
| 144 |
+
rng = timedelta_range("1 days", periods=10)._data
|
| 145 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 146 |
+
assert_invalid_comparison(rng, other, box_with_array)
|
| 147 |
+
|
| 148 |
+
def test_td64arr_cmp_mixed_invalid(self):
|
| 149 |
+
rng = timedelta_range("1 days", periods=5)._data
|
| 150 |
+
other = np.array([0, 1, 2, rng[3], Timestamp("2021-01-01")])
|
| 151 |
+
|
| 152 |
+
result = rng == other
|
| 153 |
+
expected = np.array([False, False, False, True, False])
|
| 154 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 155 |
+
|
| 156 |
+
result = rng != other
|
| 157 |
+
tm.assert_numpy_array_equal(result, ~expected)
|
| 158 |
+
|
| 159 |
+
msg = "Invalid comparison between|Cannot compare type|not supported between"
|
| 160 |
+
with pytest.raises(TypeError, match=msg):
|
| 161 |
+
rng < other
|
| 162 |
+
with pytest.raises(TypeError, match=msg):
|
| 163 |
+
rng > other
|
| 164 |
+
with pytest.raises(TypeError, match=msg):
|
| 165 |
+
rng <= other
|
| 166 |
+
with pytest.raises(TypeError, match=msg):
|
| 167 |
+
rng >= other
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class TestTimedelta64ArrayComparisons:
|
| 171 |
+
# TODO: All of these need to be parametrized over box
|
| 172 |
+
|
| 173 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
| 174 |
+
def test_comp_nat(self, dtype):
|
| 175 |
+
left = TimedeltaIndex([Timedelta("1 days"), NaT, Timedelta("3 days")])
|
| 176 |
+
right = TimedeltaIndex([NaT, NaT, Timedelta("3 days")])
|
| 177 |
+
|
| 178 |
+
lhs, rhs = left, right
|
| 179 |
+
if dtype is object:
|
| 180 |
+
lhs, rhs = left.astype(object), right.astype(object)
|
| 181 |
+
|
| 182 |
+
result = rhs == lhs
|
| 183 |
+
expected = np.array([False, False, True])
|
| 184 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 185 |
+
|
| 186 |
+
result = rhs != lhs
|
| 187 |
+
expected = np.array([True, True, False])
|
| 188 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 189 |
+
|
| 190 |
+
expected = np.array([False, False, False])
|
| 191 |
+
tm.assert_numpy_array_equal(lhs == NaT, expected)
|
| 192 |
+
tm.assert_numpy_array_equal(NaT == rhs, expected)
|
| 193 |
+
|
| 194 |
+
expected = np.array([True, True, True])
|
| 195 |
+
tm.assert_numpy_array_equal(lhs != NaT, expected)
|
| 196 |
+
tm.assert_numpy_array_equal(NaT != lhs, expected)
|
| 197 |
+
|
| 198 |
+
expected = np.array([False, False, False])
|
| 199 |
+
tm.assert_numpy_array_equal(lhs < NaT, expected)
|
| 200 |
+
tm.assert_numpy_array_equal(NaT > lhs, expected)
|
| 201 |
+
|
| 202 |
+
@pytest.mark.parametrize(
|
| 203 |
+
"idx2",
|
| 204 |
+
[
|
| 205 |
+
TimedeltaIndex(
|
| 206 |
+
["2 day", "2 day", NaT, NaT, "1 day 00:00:02", "5 days 00:00:03"]
|
| 207 |
+
),
|
| 208 |
+
np.array(
|
| 209 |
+
[
|
| 210 |
+
np.timedelta64(2, "D"),
|
| 211 |
+
np.timedelta64(2, "D"),
|
| 212 |
+
np.timedelta64("nat"),
|
| 213 |
+
np.timedelta64("nat"),
|
| 214 |
+
np.timedelta64(1, "D") + np.timedelta64(2, "s"),
|
| 215 |
+
np.timedelta64(5, "D") + np.timedelta64(3, "s"),
|
| 216 |
+
]
|
| 217 |
+
),
|
| 218 |
+
],
|
| 219 |
+
)
|
| 220 |
+
def test_comparisons_nat(self, idx2):
|
| 221 |
+
idx1 = TimedeltaIndex(
|
| 222 |
+
[
|
| 223 |
+
"1 day",
|
| 224 |
+
NaT,
|
| 225 |
+
"1 day 00:00:01",
|
| 226 |
+
NaT,
|
| 227 |
+
"1 day 00:00:01",
|
| 228 |
+
"5 day 00:00:03",
|
| 229 |
+
]
|
| 230 |
+
)
|
| 231 |
+
# Check pd.NaT is handles as the same as np.nan
|
| 232 |
+
result = idx1 < idx2
|
| 233 |
+
expected = np.array([True, False, False, False, True, False])
|
| 234 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 235 |
+
|
| 236 |
+
result = idx2 > idx1
|
| 237 |
+
expected = np.array([True, False, False, False, True, False])
|
| 238 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 239 |
+
|
| 240 |
+
result = idx1 <= idx2
|
| 241 |
+
expected = np.array([True, False, False, False, True, True])
|
| 242 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 243 |
+
|
| 244 |
+
result = idx2 >= idx1
|
| 245 |
+
expected = np.array([True, False, False, False, True, True])
|
| 246 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 247 |
+
|
| 248 |
+
result = idx1 == idx2
|
| 249 |
+
expected = np.array([False, False, False, False, False, True])
|
| 250 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 251 |
+
|
| 252 |
+
result = idx1 != idx2
|
| 253 |
+
expected = np.array([True, True, True, True, True, False])
|
| 254 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 255 |
+
|
| 256 |
+
# TODO: better name
|
| 257 |
+
def test_comparisons_coverage(self):
|
| 258 |
+
rng = timedelta_range("1 days", periods=10)
|
| 259 |
+
|
| 260 |
+
result = rng < rng[3]
|
| 261 |
+
expected = np.array([True, True, True] + [False] * 7)
|
| 262 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 263 |
+
|
| 264 |
+
result = rng == list(rng)
|
| 265 |
+
exp = rng == rng
|
| 266 |
+
tm.assert_numpy_array_equal(result, exp)
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
# ------------------------------------------------------------------
|
| 270 |
+
# Timedelta64[ns] dtype Arithmetic Operations
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
class TestTimedelta64ArithmeticUnsorted:
|
| 274 |
+
# Tests moved from type-specific test files but not
|
| 275 |
+
# yet sorted/parametrized/de-duplicated
|
| 276 |
+
|
| 277 |
+
def test_ufunc_coercions(self):
|
| 278 |
+
# normal ops are also tested in tseries/test_timedeltas.py
|
| 279 |
+
idx = TimedeltaIndex(["2h", "4h", "6h", "8h", "10h"], freq="2h", name="x")
|
| 280 |
+
|
| 281 |
+
for result in [idx * 2, np.multiply(idx, 2)]:
|
| 282 |
+
assert isinstance(result, TimedeltaIndex)
|
| 283 |
+
exp = TimedeltaIndex(["4h", "8h", "12h", "16h", "20h"], freq="4h", name="x")
|
| 284 |
+
tm.assert_index_equal(result, exp)
|
| 285 |
+
assert result.freq == "4h"
|
| 286 |
+
|
| 287 |
+
for result in [idx / 2, np.divide(idx, 2)]:
|
| 288 |
+
assert isinstance(result, TimedeltaIndex)
|
| 289 |
+
exp = TimedeltaIndex(["1h", "2h", "3h", "4h", "5h"], freq="h", name="x")
|
| 290 |
+
tm.assert_index_equal(result, exp)
|
| 291 |
+
assert result.freq == "h"
|
| 292 |
+
|
| 293 |
+
for result in [-idx, np.negative(idx)]:
|
| 294 |
+
assert isinstance(result, TimedeltaIndex)
|
| 295 |
+
exp = TimedeltaIndex(
|
| 296 |
+
["-2h", "-4h", "-6h", "-8h", "-10h"], freq="-2h", name="x"
|
| 297 |
+
)
|
| 298 |
+
tm.assert_index_equal(result, exp)
|
| 299 |
+
assert result.freq == "-2h"
|
| 300 |
+
|
| 301 |
+
idx = TimedeltaIndex(["-2h", "-1h", "0h", "1h", "2h"], freq="h", name="x")
|
| 302 |
+
for result in [abs(idx), np.absolute(idx)]:
|
| 303 |
+
assert isinstance(result, TimedeltaIndex)
|
| 304 |
+
exp = TimedeltaIndex(["2h", "1h", "0h", "1h", "2h"], freq=None, name="x")
|
| 305 |
+
tm.assert_index_equal(result, exp)
|
| 306 |
+
assert result.freq is None
|
| 307 |
+
|
| 308 |
+
def test_subtraction_ops(self):
|
| 309 |
+
# with datetimes/timedelta and tdi/dti
|
| 310 |
+
tdi = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
|
| 311 |
+
dti = pd.date_range("20130101", periods=3, name="bar")
|
| 312 |
+
td = Timedelta("1 days")
|
| 313 |
+
dt = Timestamp("20130101")
|
| 314 |
+
|
| 315 |
+
msg = "cannot subtract a datelike from a TimedeltaArray"
|
| 316 |
+
with pytest.raises(TypeError, match=msg):
|
| 317 |
+
tdi - dt
|
| 318 |
+
with pytest.raises(TypeError, match=msg):
|
| 319 |
+
tdi - dti
|
| 320 |
+
|
| 321 |
+
msg = r"unsupported operand type\(s\) for -"
|
| 322 |
+
with pytest.raises(TypeError, match=msg):
|
| 323 |
+
td - dt
|
| 324 |
+
|
| 325 |
+
msg = "(bad|unsupported) operand type for unary"
|
| 326 |
+
with pytest.raises(TypeError, match=msg):
|
| 327 |
+
td - dti
|
| 328 |
+
|
| 329 |
+
result = dt - dti
|
| 330 |
+
expected = TimedeltaIndex(["0 days", "-1 days", "-2 days"], name="bar")
|
| 331 |
+
tm.assert_index_equal(result, expected)
|
| 332 |
+
|
| 333 |
+
result = dti - dt
|
| 334 |
+
expected = TimedeltaIndex(["0 days", "1 days", "2 days"], name="bar")
|
| 335 |
+
tm.assert_index_equal(result, expected)
|
| 336 |
+
|
| 337 |
+
result = tdi - td
|
| 338 |
+
expected = TimedeltaIndex(["0 days", NaT, "1 days"], name="foo")
|
| 339 |
+
tm.assert_index_equal(result, expected)
|
| 340 |
+
|
| 341 |
+
result = td - tdi
|
| 342 |
+
expected = TimedeltaIndex(["0 days", NaT, "-1 days"], name="foo")
|
| 343 |
+
tm.assert_index_equal(result, expected)
|
| 344 |
+
|
| 345 |
+
result = dti - td
|
| 346 |
+
expected = DatetimeIndex(
|
| 347 |
+
["20121231", "20130101", "20130102"], dtype="M8[ns]", freq="D", name="bar"
|
| 348 |
+
)
|
| 349 |
+
tm.assert_index_equal(result, expected)
|
| 350 |
+
|
| 351 |
+
result = dt - tdi
|
| 352 |
+
expected = DatetimeIndex(
|
| 353 |
+
["20121231", NaT, "20121230"], dtype="M8[ns]", name="foo"
|
| 354 |
+
)
|
| 355 |
+
tm.assert_index_equal(result, expected)
|
| 356 |
+
|
| 357 |
+
def test_subtraction_ops_with_tz(self, box_with_array):
|
| 358 |
+
# check that dt/dti subtraction ops with tz are validated
|
| 359 |
+
dti = pd.date_range("20130101", periods=3)
|
| 360 |
+
dti = tm.box_expected(dti, box_with_array)
|
| 361 |
+
ts = Timestamp("20130101")
|
| 362 |
+
dt = ts.to_pydatetime()
|
| 363 |
+
dti_tz = pd.date_range("20130101", periods=3).tz_localize("US/Eastern")
|
| 364 |
+
dti_tz = tm.box_expected(dti_tz, box_with_array)
|
| 365 |
+
ts_tz = Timestamp("20130101").tz_localize("US/Eastern")
|
| 366 |
+
ts_tz2 = Timestamp("20130101").tz_localize("CET")
|
| 367 |
+
dt_tz = ts_tz.to_pydatetime()
|
| 368 |
+
td = Timedelta("1 days")
|
| 369 |
+
|
| 370 |
+
def _check(result, expected):
|
| 371 |
+
assert result == expected
|
| 372 |
+
assert isinstance(result, Timedelta)
|
| 373 |
+
|
| 374 |
+
# scalars
|
| 375 |
+
result = ts - ts
|
| 376 |
+
expected = Timedelta("0 days")
|
| 377 |
+
_check(result, expected)
|
| 378 |
+
|
| 379 |
+
result = dt_tz - ts_tz
|
| 380 |
+
expected = Timedelta("0 days")
|
| 381 |
+
_check(result, expected)
|
| 382 |
+
|
| 383 |
+
result = ts_tz - dt_tz
|
| 384 |
+
expected = Timedelta("0 days")
|
| 385 |
+
_check(result, expected)
|
| 386 |
+
|
| 387 |
+
# tz mismatches
|
| 388 |
+
msg = "Cannot subtract tz-naive and tz-aware datetime-like objects."
|
| 389 |
+
with pytest.raises(TypeError, match=msg):
|
| 390 |
+
dt_tz - ts
|
| 391 |
+
msg = "can't subtract offset-naive and offset-aware datetimes"
|
| 392 |
+
with pytest.raises(TypeError, match=msg):
|
| 393 |
+
dt_tz - dt
|
| 394 |
+
msg = "can't subtract offset-naive and offset-aware datetimes"
|
| 395 |
+
with pytest.raises(TypeError, match=msg):
|
| 396 |
+
dt - dt_tz
|
| 397 |
+
msg = "Cannot subtract tz-naive and tz-aware datetime-like objects."
|
| 398 |
+
with pytest.raises(TypeError, match=msg):
|
| 399 |
+
ts - dt_tz
|
| 400 |
+
with pytest.raises(TypeError, match=msg):
|
| 401 |
+
ts_tz2 - ts
|
| 402 |
+
with pytest.raises(TypeError, match=msg):
|
| 403 |
+
ts_tz2 - dt
|
| 404 |
+
|
| 405 |
+
msg = "Cannot subtract tz-naive and tz-aware"
|
| 406 |
+
# with dti
|
| 407 |
+
with pytest.raises(TypeError, match=msg):
|
| 408 |
+
dti - ts_tz
|
| 409 |
+
with pytest.raises(TypeError, match=msg):
|
| 410 |
+
dti_tz - ts
|
| 411 |
+
|
| 412 |
+
result = dti_tz - dt_tz
|
| 413 |
+
expected = TimedeltaIndex(["0 days", "1 days", "2 days"])
|
| 414 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 415 |
+
tm.assert_equal(result, expected)
|
| 416 |
+
|
| 417 |
+
result = dt_tz - dti_tz
|
| 418 |
+
expected = TimedeltaIndex(["0 days", "-1 days", "-2 days"])
|
| 419 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 420 |
+
tm.assert_equal(result, expected)
|
| 421 |
+
|
| 422 |
+
result = dti_tz - ts_tz
|
| 423 |
+
expected = TimedeltaIndex(["0 days", "1 days", "2 days"])
|
| 424 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 425 |
+
tm.assert_equal(result, expected)
|
| 426 |
+
|
| 427 |
+
result = ts_tz - dti_tz
|
| 428 |
+
expected = TimedeltaIndex(["0 days", "-1 days", "-2 days"])
|
| 429 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 430 |
+
tm.assert_equal(result, expected)
|
| 431 |
+
|
| 432 |
+
result = td - td
|
| 433 |
+
expected = Timedelta("0 days")
|
| 434 |
+
_check(result, expected)
|
| 435 |
+
|
| 436 |
+
result = dti_tz - td
|
| 437 |
+
expected = DatetimeIndex(
|
| 438 |
+
["20121231", "20130101", "20130102"], tz="US/Eastern"
|
| 439 |
+
).as_unit("ns")
|
| 440 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 441 |
+
tm.assert_equal(result, expected)
|
| 442 |
+
|
| 443 |
+
def test_dti_tdi_numeric_ops(self):
|
| 444 |
+
# These are normally union/diff set-like ops
|
| 445 |
+
tdi = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
|
| 446 |
+
dti = pd.date_range("20130101", periods=3, name="bar")
|
| 447 |
+
|
| 448 |
+
result = tdi - tdi
|
| 449 |
+
expected = TimedeltaIndex(["0 days", NaT, "0 days"], name="foo")
|
| 450 |
+
tm.assert_index_equal(result, expected)
|
| 451 |
+
|
| 452 |
+
result = tdi + tdi
|
| 453 |
+
expected = TimedeltaIndex(["2 days", NaT, "4 days"], name="foo")
|
| 454 |
+
tm.assert_index_equal(result, expected)
|
| 455 |
+
|
| 456 |
+
result = dti - tdi # name will be reset
|
| 457 |
+
expected = DatetimeIndex(["20121231", NaT, "20130101"], dtype="M8[ns]")
|
| 458 |
+
tm.assert_index_equal(result, expected)
|
| 459 |
+
|
| 460 |
+
def test_addition_ops(self):
|
| 461 |
+
# with datetimes/timedelta and tdi/dti
|
| 462 |
+
tdi = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
|
| 463 |
+
dti = pd.date_range("20130101", periods=3, name="bar")
|
| 464 |
+
td = Timedelta("1 days")
|
| 465 |
+
dt = Timestamp("20130101")
|
| 466 |
+
|
| 467 |
+
result = tdi + dt
|
| 468 |
+
expected = DatetimeIndex(
|
| 469 |
+
["20130102", NaT, "20130103"], dtype="M8[ns]", name="foo"
|
| 470 |
+
)
|
| 471 |
+
tm.assert_index_equal(result, expected)
|
| 472 |
+
|
| 473 |
+
result = dt + tdi
|
| 474 |
+
expected = DatetimeIndex(
|
| 475 |
+
["20130102", NaT, "20130103"], dtype="M8[ns]", name="foo"
|
| 476 |
+
)
|
| 477 |
+
tm.assert_index_equal(result, expected)
|
| 478 |
+
|
| 479 |
+
result = td + tdi
|
| 480 |
+
expected = TimedeltaIndex(["2 days", NaT, "3 days"], name="foo")
|
| 481 |
+
tm.assert_index_equal(result, expected)
|
| 482 |
+
|
| 483 |
+
result = tdi + td
|
| 484 |
+
expected = TimedeltaIndex(["2 days", NaT, "3 days"], name="foo")
|
| 485 |
+
tm.assert_index_equal(result, expected)
|
| 486 |
+
|
| 487 |
+
# unequal length
|
| 488 |
+
msg = "cannot add indices of unequal length"
|
| 489 |
+
with pytest.raises(ValueError, match=msg):
|
| 490 |
+
tdi + dti[0:1]
|
| 491 |
+
with pytest.raises(ValueError, match=msg):
|
| 492 |
+
tdi[0:1] + dti
|
| 493 |
+
|
| 494 |
+
# random indexes
|
| 495 |
+
msg = "Addition/subtraction of integers and integer-arrays"
|
| 496 |
+
with pytest.raises(TypeError, match=msg):
|
| 497 |
+
tdi + Index([1, 2, 3], dtype=np.int64)
|
| 498 |
+
|
| 499 |
+
# this is a union!
|
| 500 |
+
# FIXME: don't leave commented-out
|
| 501 |
+
# pytest.raises(TypeError, lambda : Index([1,2,3]) + tdi)
|
| 502 |
+
|
| 503 |
+
result = tdi + dti # name will be reset
|
| 504 |
+
expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[ns]")
|
| 505 |
+
tm.assert_index_equal(result, expected)
|
| 506 |
+
|
| 507 |
+
result = dti + tdi # name will be reset
|
| 508 |
+
expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[ns]")
|
| 509 |
+
tm.assert_index_equal(result, expected)
|
| 510 |
+
|
| 511 |
+
result = dt + td
|
| 512 |
+
expected = Timestamp("20130102")
|
| 513 |
+
assert result == expected
|
| 514 |
+
|
| 515 |
+
result = td + dt
|
| 516 |
+
expected = Timestamp("20130102")
|
| 517 |
+
assert result == expected
|
| 518 |
+
|
| 519 |
+
# TODO: Needs more informative name, probably split up into
|
| 520 |
+
# more targeted tests
|
| 521 |
+
@pytest.mark.parametrize("freq", ["D", "B"])
|
| 522 |
+
def test_timedelta(self, freq):
|
| 523 |
+
index = pd.date_range("1/1/2000", periods=50, freq=freq)
|
| 524 |
+
|
| 525 |
+
shifted = index + timedelta(1)
|
| 526 |
+
back = shifted + timedelta(-1)
|
| 527 |
+
back = back._with_freq("infer")
|
| 528 |
+
tm.assert_index_equal(index, back)
|
| 529 |
+
|
| 530 |
+
if freq == "D":
|
| 531 |
+
expected = pd.tseries.offsets.Day(1)
|
| 532 |
+
assert index.freq == expected
|
| 533 |
+
assert shifted.freq == expected
|
| 534 |
+
assert back.freq == expected
|
| 535 |
+
else: # freq == 'B'
|
| 536 |
+
assert index.freq == pd.tseries.offsets.BusinessDay(1)
|
| 537 |
+
assert shifted.freq is None
|
| 538 |
+
assert back.freq == pd.tseries.offsets.BusinessDay(1)
|
| 539 |
+
|
| 540 |
+
result = index - timedelta(1)
|
| 541 |
+
expected = index + timedelta(-1)
|
| 542 |
+
tm.assert_index_equal(result, expected)
|
| 543 |
+
|
| 544 |
+
def test_timedelta_tick_arithmetic(self):
|
| 545 |
+
# GH#4134, buggy with timedeltas
|
| 546 |
+
rng = pd.date_range("2013", "2014")
|
| 547 |
+
s = Series(rng)
|
| 548 |
+
result1 = rng - offsets.Hour(1)
|
| 549 |
+
result2 = DatetimeIndex(s - np.timedelta64(100000000))
|
| 550 |
+
result3 = rng - np.timedelta64(100000000)
|
| 551 |
+
result4 = DatetimeIndex(s - offsets.Hour(1))
|
| 552 |
+
|
| 553 |
+
assert result1.freq == rng.freq
|
| 554 |
+
result1 = result1._with_freq(None)
|
| 555 |
+
tm.assert_index_equal(result1, result4)
|
| 556 |
+
|
| 557 |
+
assert result3.freq == rng.freq
|
| 558 |
+
result3 = result3._with_freq(None)
|
| 559 |
+
tm.assert_index_equal(result2, result3)
|
| 560 |
+
|
| 561 |
+
def test_tda_add_sub_index(self):
|
| 562 |
+
# Check that TimedeltaArray defers to Index on arithmetic ops
|
| 563 |
+
tdi = TimedeltaIndex(["1 days", NaT, "2 days"])
|
| 564 |
+
tda = tdi.array
|
| 565 |
+
|
| 566 |
+
dti = pd.date_range("1999-12-31", periods=3, freq="D")
|
| 567 |
+
|
| 568 |
+
result = tda + dti
|
| 569 |
+
expected = tdi + dti
|
| 570 |
+
tm.assert_index_equal(result, expected)
|
| 571 |
+
|
| 572 |
+
result = tda + tdi
|
| 573 |
+
expected = tdi + tdi
|
| 574 |
+
tm.assert_index_equal(result, expected)
|
| 575 |
+
|
| 576 |
+
result = tda - tdi
|
| 577 |
+
expected = tdi - tdi
|
| 578 |
+
tm.assert_index_equal(result, expected)
|
| 579 |
+
|
| 580 |
+
def test_tda_add_dt64_object_array(self, box_with_array, tz_naive_fixture):
|
| 581 |
+
# Result should be cast back to DatetimeArray
|
| 582 |
+
box = box_with_array
|
| 583 |
+
|
| 584 |
+
dti = pd.date_range("2016-01-01", periods=3, tz=tz_naive_fixture)
|
| 585 |
+
dti = dti._with_freq(None)
|
| 586 |
+
tdi = dti - dti
|
| 587 |
+
|
| 588 |
+
obj = tm.box_expected(tdi, box)
|
| 589 |
+
other = tm.box_expected(dti, box)
|
| 590 |
+
|
| 591 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 592 |
+
result = obj + other.astype(object)
|
| 593 |
+
tm.assert_equal(result, other.astype(object))
|
| 594 |
+
|
| 595 |
+
# -------------------------------------------------------------
|
| 596 |
+
# Binary operations TimedeltaIndex and timedelta-like
|
| 597 |
+
|
| 598 |
+
def test_tdi_iadd_timedeltalike(self, two_hours, box_with_array):
|
| 599 |
+
# only test adding/sub offsets as + is now numeric
|
| 600 |
+
rng = timedelta_range("1 days", "10 days")
|
| 601 |
+
expected = timedelta_range("1 days 02:00:00", "10 days 02:00:00", freq="D")
|
| 602 |
+
|
| 603 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 604 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 605 |
+
|
| 606 |
+
orig_rng = rng
|
| 607 |
+
rng += two_hours
|
| 608 |
+
tm.assert_equal(rng, expected)
|
| 609 |
+
if box_with_array is not Index:
|
| 610 |
+
# Check that operation is actually inplace
|
| 611 |
+
tm.assert_equal(orig_rng, expected)
|
| 612 |
+
|
| 613 |
+
def test_tdi_isub_timedeltalike(self, two_hours, box_with_array):
|
| 614 |
+
# only test adding/sub offsets as - is now numeric
|
| 615 |
+
rng = timedelta_range("1 days", "10 days")
|
| 616 |
+
expected = timedelta_range("0 days 22:00:00", "9 days 22:00:00")
|
| 617 |
+
|
| 618 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 619 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 620 |
+
|
| 621 |
+
orig_rng = rng
|
| 622 |
+
rng -= two_hours
|
| 623 |
+
tm.assert_equal(rng, expected)
|
| 624 |
+
if box_with_array is not Index:
|
| 625 |
+
# Check that operation is actually inplace
|
| 626 |
+
tm.assert_equal(orig_rng, expected)
|
| 627 |
+
|
| 628 |
+
# -------------------------------------------------------------
|
| 629 |
+
|
| 630 |
+
def test_tdi_ops_attributes(self):
|
| 631 |
+
rng = timedelta_range("2 days", periods=5, freq="2D", name="x")
|
| 632 |
+
|
| 633 |
+
result = rng + 1 * rng.freq
|
| 634 |
+
exp = timedelta_range("4 days", periods=5, freq="2D", name="x")
|
| 635 |
+
tm.assert_index_equal(result, exp)
|
| 636 |
+
assert result.freq == "2D"
|
| 637 |
+
|
| 638 |
+
result = rng - 2 * rng.freq
|
| 639 |
+
exp = timedelta_range("-2 days", periods=5, freq="2D", name="x")
|
| 640 |
+
tm.assert_index_equal(result, exp)
|
| 641 |
+
assert result.freq == "2D"
|
| 642 |
+
|
| 643 |
+
result = rng * 2
|
| 644 |
+
exp = timedelta_range("4 days", periods=5, freq="4D", name="x")
|
| 645 |
+
tm.assert_index_equal(result, exp)
|
| 646 |
+
assert result.freq == "4D"
|
| 647 |
+
|
| 648 |
+
result = rng / 2
|
| 649 |
+
exp = timedelta_range("1 days", periods=5, freq="D", name="x")
|
| 650 |
+
tm.assert_index_equal(result, exp)
|
| 651 |
+
assert result.freq == "D"
|
| 652 |
+
|
| 653 |
+
result = -rng
|
| 654 |
+
exp = timedelta_range("-2 days", periods=5, freq="-2D", name="x")
|
| 655 |
+
tm.assert_index_equal(result, exp)
|
| 656 |
+
assert result.freq == "-2D"
|
| 657 |
+
|
| 658 |
+
rng = timedelta_range("-2 days", periods=5, freq="D", name="x")
|
| 659 |
+
|
| 660 |
+
result = abs(rng)
|
| 661 |
+
exp = TimedeltaIndex(
|
| 662 |
+
["2 days", "1 days", "0 days", "1 days", "2 days"], name="x"
|
| 663 |
+
)
|
| 664 |
+
tm.assert_index_equal(result, exp)
|
| 665 |
+
assert result.freq is None
|
| 666 |
+
|
| 667 |
+
|
| 668 |
+
class TestAddSubNaTMasking:
|
| 669 |
+
# TODO: parametrize over boxes
|
| 670 |
+
|
| 671 |
+
@pytest.mark.parametrize("str_ts", ["1950-01-01", "1980-01-01"])
|
| 672 |
+
def test_tdarr_add_timestamp_nat_masking(self, box_with_array, str_ts):
|
| 673 |
+
# GH#17991 checking for overflow-masking with NaT
|
| 674 |
+
tdinat = pd.to_timedelta(["24658 days 11:15:00", "NaT"])
|
| 675 |
+
tdobj = tm.box_expected(tdinat, box_with_array)
|
| 676 |
+
|
| 677 |
+
ts = Timestamp(str_ts)
|
| 678 |
+
ts_variants = [
|
| 679 |
+
ts,
|
| 680 |
+
ts.to_pydatetime(),
|
| 681 |
+
ts.to_datetime64().astype("datetime64[ns]"),
|
| 682 |
+
ts.to_datetime64().astype("datetime64[D]"),
|
| 683 |
+
]
|
| 684 |
+
|
| 685 |
+
for variant in ts_variants:
|
| 686 |
+
res = tdobj + variant
|
| 687 |
+
if box_with_array is DataFrame:
|
| 688 |
+
assert res.iloc[1, 1] is NaT
|
| 689 |
+
else:
|
| 690 |
+
assert res[1] is NaT
|
| 691 |
+
|
| 692 |
+
def test_tdi_add_overflow(self):
|
| 693 |
+
# See GH#14068
|
| 694 |
+
# preliminary test scalar analogue of vectorized tests below
|
| 695 |
+
# TODO: Make raised error message more informative and test
|
| 696 |
+
with pytest.raises(OutOfBoundsDatetime, match="10155196800000000000"):
|
| 697 |
+
pd.to_timedelta(106580, "D") + Timestamp("2000")
|
| 698 |
+
with pytest.raises(OutOfBoundsDatetime, match="10155196800000000000"):
|
| 699 |
+
Timestamp("2000") + pd.to_timedelta(106580, "D")
|
| 700 |
+
|
| 701 |
+
_NaT = NaT._value + 1
|
| 702 |
+
msg = "Overflow in int64 addition"
|
| 703 |
+
with pytest.raises(OverflowError, match=msg):
|
| 704 |
+
pd.to_timedelta([106580], "D") + Timestamp("2000")
|
| 705 |
+
with pytest.raises(OverflowError, match=msg):
|
| 706 |
+
Timestamp("2000") + pd.to_timedelta([106580], "D")
|
| 707 |
+
with pytest.raises(OverflowError, match=msg):
|
| 708 |
+
pd.to_timedelta([_NaT]) - Timedelta("1 days")
|
| 709 |
+
with pytest.raises(OverflowError, match=msg):
|
| 710 |
+
pd.to_timedelta(["5 days", _NaT]) - Timedelta("1 days")
|
| 711 |
+
with pytest.raises(OverflowError, match=msg):
|
| 712 |
+
(
|
| 713 |
+
pd.to_timedelta([_NaT, "5 days", "1 hours"])
|
| 714 |
+
- pd.to_timedelta(["7 seconds", _NaT, "4 hours"])
|
| 715 |
+
)
|
| 716 |
+
|
| 717 |
+
# These should not overflow!
|
| 718 |
+
exp = TimedeltaIndex([NaT])
|
| 719 |
+
result = pd.to_timedelta([NaT]) - Timedelta("1 days")
|
| 720 |
+
tm.assert_index_equal(result, exp)
|
| 721 |
+
|
| 722 |
+
exp = TimedeltaIndex(["4 days", NaT])
|
| 723 |
+
result = pd.to_timedelta(["5 days", NaT]) - Timedelta("1 days")
|
| 724 |
+
tm.assert_index_equal(result, exp)
|
| 725 |
+
|
| 726 |
+
exp = TimedeltaIndex([NaT, NaT, "5 hours"])
|
| 727 |
+
result = pd.to_timedelta([NaT, "5 days", "1 hours"]) + pd.to_timedelta(
|
| 728 |
+
["7 seconds", NaT, "4 hours"]
|
| 729 |
+
)
|
| 730 |
+
tm.assert_index_equal(result, exp)
|
| 731 |
+
|
| 732 |
+
|
| 733 |
+
class TestTimedeltaArraylikeAddSubOps:
|
| 734 |
+
# Tests for timedelta64[ns] __add__, __sub__, __radd__, __rsub__
|
| 735 |
+
|
| 736 |
+
def test_sub_nat_retain_unit(self):
|
| 737 |
+
ser = pd.to_timedelta(Series(["00:00:01"])).astype("m8[s]")
|
| 738 |
+
|
| 739 |
+
result = ser - NaT
|
| 740 |
+
expected = Series([NaT], dtype="m8[s]")
|
| 741 |
+
tm.assert_series_equal(result, expected)
|
| 742 |
+
|
| 743 |
+
# TODO: moved from tests.indexes.timedeltas.test_arithmetic; needs
|
| 744 |
+
# parametrization+de-duplication
|
| 745 |
+
def test_timedelta_ops_with_missing_values(self):
|
| 746 |
+
# setup
|
| 747 |
+
s1 = pd.to_timedelta(Series(["00:00:01"]))
|
| 748 |
+
s2 = pd.to_timedelta(Series(["00:00:02"]))
|
| 749 |
+
|
| 750 |
+
sn = pd.to_timedelta(Series([NaT], dtype="m8[ns]"))
|
| 751 |
+
|
| 752 |
+
df1 = DataFrame(["00:00:01"]).apply(pd.to_timedelta)
|
| 753 |
+
df2 = DataFrame(["00:00:02"]).apply(pd.to_timedelta)
|
| 754 |
+
|
| 755 |
+
dfn = DataFrame([NaT._value]).apply(pd.to_timedelta)
|
| 756 |
+
|
| 757 |
+
scalar1 = pd.to_timedelta("00:00:01")
|
| 758 |
+
scalar2 = pd.to_timedelta("00:00:02")
|
| 759 |
+
timedelta_NaT = pd.to_timedelta("NaT")
|
| 760 |
+
|
| 761 |
+
actual = scalar1 + scalar1
|
| 762 |
+
assert actual == scalar2
|
| 763 |
+
actual = scalar2 - scalar1
|
| 764 |
+
assert actual == scalar1
|
| 765 |
+
|
| 766 |
+
actual = s1 + s1
|
| 767 |
+
tm.assert_series_equal(actual, s2)
|
| 768 |
+
actual = s2 - s1
|
| 769 |
+
tm.assert_series_equal(actual, s1)
|
| 770 |
+
|
| 771 |
+
actual = s1 + scalar1
|
| 772 |
+
tm.assert_series_equal(actual, s2)
|
| 773 |
+
actual = scalar1 + s1
|
| 774 |
+
tm.assert_series_equal(actual, s2)
|
| 775 |
+
actual = s2 - scalar1
|
| 776 |
+
tm.assert_series_equal(actual, s1)
|
| 777 |
+
actual = -scalar1 + s2
|
| 778 |
+
tm.assert_series_equal(actual, s1)
|
| 779 |
+
|
| 780 |
+
actual = s1 + timedelta_NaT
|
| 781 |
+
tm.assert_series_equal(actual, sn)
|
| 782 |
+
actual = timedelta_NaT + s1
|
| 783 |
+
tm.assert_series_equal(actual, sn)
|
| 784 |
+
actual = s1 - timedelta_NaT
|
| 785 |
+
tm.assert_series_equal(actual, sn)
|
| 786 |
+
actual = -timedelta_NaT + s1
|
| 787 |
+
tm.assert_series_equal(actual, sn)
|
| 788 |
+
|
| 789 |
+
msg = "unsupported operand type"
|
| 790 |
+
with pytest.raises(TypeError, match=msg):
|
| 791 |
+
s1 + np.nan
|
| 792 |
+
with pytest.raises(TypeError, match=msg):
|
| 793 |
+
np.nan + s1
|
| 794 |
+
with pytest.raises(TypeError, match=msg):
|
| 795 |
+
s1 - np.nan
|
| 796 |
+
with pytest.raises(TypeError, match=msg):
|
| 797 |
+
-np.nan + s1
|
| 798 |
+
|
| 799 |
+
actual = s1 + NaT
|
| 800 |
+
tm.assert_series_equal(actual, sn)
|
| 801 |
+
actual = s2 - NaT
|
| 802 |
+
tm.assert_series_equal(actual, sn)
|
| 803 |
+
|
| 804 |
+
actual = s1 + df1
|
| 805 |
+
tm.assert_frame_equal(actual, df2)
|
| 806 |
+
actual = s2 - df1
|
| 807 |
+
tm.assert_frame_equal(actual, df1)
|
| 808 |
+
actual = df1 + s1
|
| 809 |
+
tm.assert_frame_equal(actual, df2)
|
| 810 |
+
actual = df2 - s1
|
| 811 |
+
tm.assert_frame_equal(actual, df1)
|
| 812 |
+
|
| 813 |
+
actual = df1 + df1
|
| 814 |
+
tm.assert_frame_equal(actual, df2)
|
| 815 |
+
actual = df2 - df1
|
| 816 |
+
tm.assert_frame_equal(actual, df1)
|
| 817 |
+
|
| 818 |
+
actual = df1 + scalar1
|
| 819 |
+
tm.assert_frame_equal(actual, df2)
|
| 820 |
+
actual = df2 - scalar1
|
| 821 |
+
tm.assert_frame_equal(actual, df1)
|
| 822 |
+
|
| 823 |
+
actual = df1 + timedelta_NaT
|
| 824 |
+
tm.assert_frame_equal(actual, dfn)
|
| 825 |
+
actual = df1 - timedelta_NaT
|
| 826 |
+
tm.assert_frame_equal(actual, dfn)
|
| 827 |
+
|
| 828 |
+
msg = "cannot subtract a datelike from|unsupported operand type"
|
| 829 |
+
with pytest.raises(TypeError, match=msg):
|
| 830 |
+
df1 + np.nan
|
| 831 |
+
with pytest.raises(TypeError, match=msg):
|
| 832 |
+
df1 - np.nan
|
| 833 |
+
|
| 834 |
+
actual = df1 + NaT # NaT is datetime, not timedelta
|
| 835 |
+
tm.assert_frame_equal(actual, dfn)
|
| 836 |
+
actual = df1 - NaT
|
| 837 |
+
tm.assert_frame_equal(actual, dfn)
|
| 838 |
+
|
| 839 |
+
# TODO: moved from tests.series.test_operators, needs splitting, cleanup,
|
| 840 |
+
# de-duplication, box-parametrization...
|
| 841 |
+
def test_operators_timedelta64(self):
|
| 842 |
+
# series ops
|
| 843 |
+
v1 = pd.date_range("2012-1-1", periods=3, freq="D")
|
| 844 |
+
v2 = pd.date_range("2012-1-2", periods=3, freq="D")
|
| 845 |
+
rs = Series(v2) - Series(v1)
|
| 846 |
+
xp = Series(1e9 * 3600 * 24, rs.index).astype("int64").astype("timedelta64[ns]")
|
| 847 |
+
tm.assert_series_equal(rs, xp)
|
| 848 |
+
assert rs.dtype == "timedelta64[ns]"
|
| 849 |
+
|
| 850 |
+
df = DataFrame({"A": v1})
|
| 851 |
+
td = Series([timedelta(days=i) for i in range(3)])
|
| 852 |
+
assert td.dtype == "timedelta64[ns]"
|
| 853 |
+
|
| 854 |
+
# series on the rhs
|
| 855 |
+
result = df["A"] - df["A"].shift()
|
| 856 |
+
assert result.dtype == "timedelta64[ns]"
|
| 857 |
+
|
| 858 |
+
result = df["A"] + td
|
| 859 |
+
assert result.dtype == "M8[ns]"
|
| 860 |
+
|
| 861 |
+
# scalar Timestamp on rhs
|
| 862 |
+
maxa = df["A"].max()
|
| 863 |
+
assert isinstance(maxa, Timestamp)
|
| 864 |
+
|
| 865 |
+
resultb = df["A"] - df["A"].max()
|
| 866 |
+
assert resultb.dtype == "timedelta64[ns]"
|
| 867 |
+
|
| 868 |
+
# timestamp on lhs
|
| 869 |
+
result = resultb + df["A"]
|
| 870 |
+
values = [Timestamp("20111230"), Timestamp("20120101"), Timestamp("20120103")]
|
| 871 |
+
expected = Series(values, dtype="M8[ns]", name="A")
|
| 872 |
+
tm.assert_series_equal(result, expected)
|
| 873 |
+
|
| 874 |
+
# datetimes on rhs
|
| 875 |
+
result = df["A"] - datetime(2001, 1, 1)
|
| 876 |
+
expected = Series([timedelta(days=4017 + i) for i in range(3)], name="A")
|
| 877 |
+
tm.assert_series_equal(result, expected)
|
| 878 |
+
assert result.dtype == "m8[ns]"
|
| 879 |
+
|
| 880 |
+
d = datetime(2001, 1, 1, 3, 4)
|
| 881 |
+
resulta = df["A"] - d
|
| 882 |
+
assert resulta.dtype == "m8[ns]"
|
| 883 |
+
|
| 884 |
+
# roundtrip
|
| 885 |
+
resultb = resulta + d
|
| 886 |
+
tm.assert_series_equal(df["A"], resultb)
|
| 887 |
+
|
| 888 |
+
# timedeltas on rhs
|
| 889 |
+
td = timedelta(days=1)
|
| 890 |
+
resulta = df["A"] + td
|
| 891 |
+
resultb = resulta - td
|
| 892 |
+
tm.assert_series_equal(resultb, df["A"])
|
| 893 |
+
assert resultb.dtype == "M8[ns]"
|
| 894 |
+
|
| 895 |
+
# roundtrip
|
| 896 |
+
td = timedelta(minutes=5, seconds=3)
|
| 897 |
+
resulta = df["A"] + td
|
| 898 |
+
resultb = resulta - td
|
| 899 |
+
tm.assert_series_equal(df["A"], resultb)
|
| 900 |
+
assert resultb.dtype == "M8[ns]"
|
| 901 |
+
|
| 902 |
+
# inplace
|
| 903 |
+
value = rs[2] + np.timedelta64(timedelta(minutes=5, seconds=1))
|
| 904 |
+
rs[2] += np.timedelta64(timedelta(minutes=5, seconds=1))
|
| 905 |
+
assert rs[2] == value
|
| 906 |
+
|
| 907 |
+
def test_timedelta64_ops_nat(self):
|
| 908 |
+
# GH 11349
|
| 909 |
+
timedelta_series = Series([NaT, Timedelta("1s")])
|
| 910 |
+
nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[ns]")
|
| 911 |
+
single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[ns]")
|
| 912 |
+
|
| 913 |
+
# subtraction
|
| 914 |
+
tm.assert_series_equal(timedelta_series - NaT, nat_series_dtype_timedelta)
|
| 915 |
+
tm.assert_series_equal(-NaT + timedelta_series, nat_series_dtype_timedelta)
|
| 916 |
+
|
| 917 |
+
tm.assert_series_equal(
|
| 918 |
+
timedelta_series - single_nat_dtype_timedelta, nat_series_dtype_timedelta
|
| 919 |
+
)
|
| 920 |
+
tm.assert_series_equal(
|
| 921 |
+
-single_nat_dtype_timedelta + timedelta_series, nat_series_dtype_timedelta
|
| 922 |
+
)
|
| 923 |
+
|
| 924 |
+
# addition
|
| 925 |
+
tm.assert_series_equal(
|
| 926 |
+
nat_series_dtype_timedelta + NaT, nat_series_dtype_timedelta
|
| 927 |
+
)
|
| 928 |
+
tm.assert_series_equal(
|
| 929 |
+
NaT + nat_series_dtype_timedelta, nat_series_dtype_timedelta
|
| 930 |
+
)
|
| 931 |
+
|
| 932 |
+
tm.assert_series_equal(
|
| 933 |
+
nat_series_dtype_timedelta + single_nat_dtype_timedelta,
|
| 934 |
+
nat_series_dtype_timedelta,
|
| 935 |
+
)
|
| 936 |
+
tm.assert_series_equal(
|
| 937 |
+
single_nat_dtype_timedelta + nat_series_dtype_timedelta,
|
| 938 |
+
nat_series_dtype_timedelta,
|
| 939 |
+
)
|
| 940 |
+
|
| 941 |
+
tm.assert_series_equal(timedelta_series + NaT, nat_series_dtype_timedelta)
|
| 942 |
+
tm.assert_series_equal(NaT + timedelta_series, nat_series_dtype_timedelta)
|
| 943 |
+
|
| 944 |
+
tm.assert_series_equal(
|
| 945 |
+
timedelta_series + single_nat_dtype_timedelta, nat_series_dtype_timedelta
|
| 946 |
+
)
|
| 947 |
+
tm.assert_series_equal(
|
| 948 |
+
single_nat_dtype_timedelta + timedelta_series, nat_series_dtype_timedelta
|
| 949 |
+
)
|
| 950 |
+
|
| 951 |
+
tm.assert_series_equal(
|
| 952 |
+
nat_series_dtype_timedelta + NaT, nat_series_dtype_timedelta
|
| 953 |
+
)
|
| 954 |
+
tm.assert_series_equal(
|
| 955 |
+
NaT + nat_series_dtype_timedelta, nat_series_dtype_timedelta
|
| 956 |
+
)
|
| 957 |
+
|
| 958 |
+
tm.assert_series_equal(
|
| 959 |
+
nat_series_dtype_timedelta + single_nat_dtype_timedelta,
|
| 960 |
+
nat_series_dtype_timedelta,
|
| 961 |
+
)
|
| 962 |
+
tm.assert_series_equal(
|
| 963 |
+
single_nat_dtype_timedelta + nat_series_dtype_timedelta,
|
| 964 |
+
nat_series_dtype_timedelta,
|
| 965 |
+
)
|
| 966 |
+
|
| 967 |
+
# multiplication
|
| 968 |
+
tm.assert_series_equal(
|
| 969 |
+
nat_series_dtype_timedelta * 1.0, nat_series_dtype_timedelta
|
| 970 |
+
)
|
| 971 |
+
tm.assert_series_equal(
|
| 972 |
+
1.0 * nat_series_dtype_timedelta, nat_series_dtype_timedelta
|
| 973 |
+
)
|
| 974 |
+
|
| 975 |
+
tm.assert_series_equal(timedelta_series * 1, timedelta_series)
|
| 976 |
+
tm.assert_series_equal(1 * timedelta_series, timedelta_series)
|
| 977 |
+
|
| 978 |
+
tm.assert_series_equal(timedelta_series * 1.5, Series([NaT, Timedelta("1.5s")]))
|
| 979 |
+
tm.assert_series_equal(1.5 * timedelta_series, Series([NaT, Timedelta("1.5s")]))
|
| 980 |
+
|
| 981 |
+
tm.assert_series_equal(timedelta_series * np.nan, nat_series_dtype_timedelta)
|
| 982 |
+
tm.assert_series_equal(np.nan * timedelta_series, nat_series_dtype_timedelta)
|
| 983 |
+
|
| 984 |
+
# division
|
| 985 |
+
tm.assert_series_equal(timedelta_series / 2, Series([NaT, Timedelta("0.5s")]))
|
| 986 |
+
tm.assert_series_equal(timedelta_series / 2.0, Series([NaT, Timedelta("0.5s")]))
|
| 987 |
+
tm.assert_series_equal(timedelta_series / np.nan, nat_series_dtype_timedelta)
|
| 988 |
+
|
| 989 |
+
# -------------------------------------------------------------
|
| 990 |
+
# Binary operations td64 arraylike and datetime-like
|
| 991 |
+
|
| 992 |
+
@pytest.mark.parametrize("cls", [Timestamp, datetime, np.datetime64])
|
| 993 |
+
def test_td64arr_add_sub_datetimelike_scalar(
|
| 994 |
+
self, cls, box_with_array, tz_naive_fixture
|
| 995 |
+
):
|
| 996 |
+
# GH#11925, GH#29558, GH#23215
|
| 997 |
+
tz = tz_naive_fixture
|
| 998 |
+
|
| 999 |
+
dt_scalar = Timestamp("2012-01-01", tz=tz)
|
| 1000 |
+
if cls is datetime:
|
| 1001 |
+
ts = dt_scalar.to_pydatetime()
|
| 1002 |
+
elif cls is np.datetime64:
|
| 1003 |
+
if tz_naive_fixture is not None:
|
| 1004 |
+
pytest.skip(f"{cls} doesn support {tz_naive_fixture}")
|
| 1005 |
+
ts = dt_scalar.to_datetime64()
|
| 1006 |
+
else:
|
| 1007 |
+
ts = dt_scalar
|
| 1008 |
+
|
| 1009 |
+
tdi = timedelta_range("1 day", periods=3)
|
| 1010 |
+
expected = pd.date_range("2012-01-02", periods=3, tz=tz)
|
| 1011 |
+
|
| 1012 |
+
tdarr = tm.box_expected(tdi, box_with_array)
|
| 1013 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1014 |
+
|
| 1015 |
+
tm.assert_equal(ts + tdarr, expected)
|
| 1016 |
+
tm.assert_equal(tdarr + ts, expected)
|
| 1017 |
+
|
| 1018 |
+
expected2 = pd.date_range("2011-12-31", periods=3, freq="-1D", tz=tz)
|
| 1019 |
+
expected2 = tm.box_expected(expected2, box_with_array)
|
| 1020 |
+
|
| 1021 |
+
tm.assert_equal(ts - tdarr, expected2)
|
| 1022 |
+
tm.assert_equal(ts + (-tdarr), expected2)
|
| 1023 |
+
|
| 1024 |
+
msg = "cannot subtract a datelike"
|
| 1025 |
+
with pytest.raises(TypeError, match=msg):
|
| 1026 |
+
tdarr - ts
|
| 1027 |
+
|
| 1028 |
+
def test_td64arr_add_datetime64_nat(self, box_with_array):
|
| 1029 |
+
# GH#23215
|
| 1030 |
+
other = np.datetime64("NaT")
|
| 1031 |
+
|
| 1032 |
+
tdi = timedelta_range("1 day", periods=3)
|
| 1033 |
+
expected = DatetimeIndex(["NaT", "NaT", "NaT"], dtype="M8[ns]")
|
| 1034 |
+
|
| 1035 |
+
tdser = tm.box_expected(tdi, box_with_array)
|
| 1036 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1037 |
+
|
| 1038 |
+
tm.assert_equal(tdser + other, expected)
|
| 1039 |
+
tm.assert_equal(other + tdser, expected)
|
| 1040 |
+
|
| 1041 |
+
def test_td64arr_sub_dt64_array(self, box_with_array):
|
| 1042 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 1043 |
+
tdi = TimedeltaIndex(["-1 Day"] * 3)
|
| 1044 |
+
dtarr = dti.values
|
| 1045 |
+
expected = DatetimeIndex(dtarr) - tdi
|
| 1046 |
+
|
| 1047 |
+
tdi = tm.box_expected(tdi, box_with_array)
|
| 1048 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1049 |
+
|
| 1050 |
+
msg = "cannot subtract a datelike from"
|
| 1051 |
+
with pytest.raises(TypeError, match=msg):
|
| 1052 |
+
tdi - dtarr
|
| 1053 |
+
|
| 1054 |
+
# TimedeltaIndex.__rsub__
|
| 1055 |
+
result = dtarr - tdi
|
| 1056 |
+
tm.assert_equal(result, expected)
|
| 1057 |
+
|
| 1058 |
+
def test_td64arr_add_dt64_array(self, box_with_array):
|
| 1059 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 1060 |
+
tdi = TimedeltaIndex(["-1 Day"] * 3)
|
| 1061 |
+
dtarr = dti.values
|
| 1062 |
+
expected = DatetimeIndex(dtarr) + tdi
|
| 1063 |
+
|
| 1064 |
+
tdi = tm.box_expected(tdi, box_with_array)
|
| 1065 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1066 |
+
|
| 1067 |
+
result = tdi + dtarr
|
| 1068 |
+
tm.assert_equal(result, expected)
|
| 1069 |
+
result = dtarr + tdi
|
| 1070 |
+
tm.assert_equal(result, expected)
|
| 1071 |
+
|
| 1072 |
+
# ------------------------------------------------------------------
|
| 1073 |
+
# Invalid __add__/__sub__ operations
|
| 1074 |
+
|
| 1075 |
+
@pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"])
|
| 1076 |
+
@pytest.mark.parametrize("tdi_freq", [None, "h"])
|
| 1077 |
+
def test_td64arr_sub_periodlike(
|
| 1078 |
+
self, box_with_array, box_with_array2, tdi_freq, pi_freq
|
| 1079 |
+
):
|
| 1080 |
+
# GH#20049 subtracting PeriodIndex should raise TypeError
|
| 1081 |
+
tdi = TimedeltaIndex(["1 hours", "2 hours"], freq=tdi_freq)
|
| 1082 |
+
dti = Timestamp("2018-03-07 17:16:40") + tdi
|
| 1083 |
+
pi = dti.to_period(pi_freq)
|
| 1084 |
+
per = pi[0]
|
| 1085 |
+
|
| 1086 |
+
tdi = tm.box_expected(tdi, box_with_array)
|
| 1087 |
+
pi = tm.box_expected(pi, box_with_array2)
|
| 1088 |
+
msg = "cannot subtract|unsupported operand type"
|
| 1089 |
+
with pytest.raises(TypeError, match=msg):
|
| 1090 |
+
tdi - pi
|
| 1091 |
+
|
| 1092 |
+
# GH#13078 subtraction of Period scalar not supported
|
| 1093 |
+
with pytest.raises(TypeError, match=msg):
|
| 1094 |
+
tdi - per
|
| 1095 |
+
|
| 1096 |
+
@pytest.mark.parametrize(
|
| 1097 |
+
"other",
|
| 1098 |
+
[
|
| 1099 |
+
# GH#12624 for str case
|
| 1100 |
+
"a",
|
| 1101 |
+
# GH#19123
|
| 1102 |
+
1,
|
| 1103 |
+
1.5,
|
| 1104 |
+
np.array(2),
|
| 1105 |
+
],
|
| 1106 |
+
)
|
| 1107 |
+
def test_td64arr_addsub_numeric_scalar_invalid(self, box_with_array, other):
|
| 1108 |
+
# vector-like others are tested in test_td64arr_add_sub_numeric_arr_invalid
|
| 1109 |
+
tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
|
| 1110 |
+
tdarr = tm.box_expected(tdser, box_with_array)
|
| 1111 |
+
|
| 1112 |
+
assert_invalid_addsub_type(tdarr, other)
|
| 1113 |
+
|
| 1114 |
+
@pytest.mark.parametrize(
|
| 1115 |
+
"vec",
|
| 1116 |
+
[
|
| 1117 |
+
np.array([1, 2, 3]),
|
| 1118 |
+
Index([1, 2, 3]),
|
| 1119 |
+
Series([1, 2, 3]),
|
| 1120 |
+
DataFrame([[1, 2, 3]]),
|
| 1121 |
+
],
|
| 1122 |
+
ids=lambda x: type(x).__name__,
|
| 1123 |
+
)
|
| 1124 |
+
def test_td64arr_addsub_numeric_arr_invalid(
|
| 1125 |
+
self, box_with_array, vec, any_real_numpy_dtype
|
| 1126 |
+
):
|
| 1127 |
+
tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
|
| 1128 |
+
tdarr = tm.box_expected(tdser, box_with_array)
|
| 1129 |
+
|
| 1130 |
+
vector = vec.astype(any_real_numpy_dtype)
|
| 1131 |
+
assert_invalid_addsub_type(tdarr, vector)
|
| 1132 |
+
|
| 1133 |
+
def test_td64arr_add_sub_int(self, box_with_array, one):
|
| 1134 |
+
# Variants of `one` for #19012, deprecated GH#22535
|
| 1135 |
+
rng = timedelta_range("1 days 09:00:00", freq="h", periods=10)
|
| 1136 |
+
tdarr = tm.box_expected(rng, box_with_array)
|
| 1137 |
+
|
| 1138 |
+
msg = "Addition/subtraction of integers"
|
| 1139 |
+
assert_invalid_addsub_type(tdarr, one, msg)
|
| 1140 |
+
|
| 1141 |
+
# TODO: get inplace ops into assert_invalid_addsub_type
|
| 1142 |
+
with pytest.raises(TypeError, match=msg):
|
| 1143 |
+
tdarr += one
|
| 1144 |
+
with pytest.raises(TypeError, match=msg):
|
| 1145 |
+
tdarr -= one
|
| 1146 |
+
|
| 1147 |
+
def test_td64arr_add_sub_integer_array(self, box_with_array):
|
| 1148 |
+
# GH#19959, deprecated GH#22535
|
| 1149 |
+
# GH#22696 for DataFrame case, check that we don't dispatch to numpy
|
| 1150 |
+
# implementation, which treats int64 as m8[ns]
|
| 1151 |
+
box = box_with_array
|
| 1152 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1153 |
+
|
| 1154 |
+
rng = timedelta_range("1 days 09:00:00", freq="h", periods=3)
|
| 1155 |
+
tdarr = tm.box_expected(rng, box)
|
| 1156 |
+
other = tm.box_expected([4, 3, 2], xbox)
|
| 1157 |
+
|
| 1158 |
+
msg = "Addition/subtraction of integers and integer-arrays"
|
| 1159 |
+
assert_invalid_addsub_type(tdarr, other, msg)
|
| 1160 |
+
|
| 1161 |
+
def test_td64arr_addsub_integer_array_no_freq(self, box_with_array):
|
| 1162 |
+
# GH#19959
|
| 1163 |
+
box = box_with_array
|
| 1164 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1165 |
+
|
| 1166 |
+
tdi = TimedeltaIndex(["1 Day", "NaT", "3 Hours"])
|
| 1167 |
+
tdarr = tm.box_expected(tdi, box)
|
| 1168 |
+
other = tm.box_expected([14, -1, 16], xbox)
|
| 1169 |
+
|
| 1170 |
+
msg = "Addition/subtraction of integers"
|
| 1171 |
+
assert_invalid_addsub_type(tdarr, other, msg)
|
| 1172 |
+
|
| 1173 |
+
# ------------------------------------------------------------------
|
| 1174 |
+
# Operations with timedelta-like others
|
| 1175 |
+
|
| 1176 |
+
def test_td64arr_add_sub_td64_array(self, box_with_array):
|
| 1177 |
+
box = box_with_array
|
| 1178 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 1179 |
+
tdi = dti - dti.shift(1)
|
| 1180 |
+
tdarr = tdi.values
|
| 1181 |
+
|
| 1182 |
+
expected = 2 * tdi
|
| 1183 |
+
tdi = tm.box_expected(tdi, box)
|
| 1184 |
+
expected = tm.box_expected(expected, box)
|
| 1185 |
+
|
| 1186 |
+
result = tdi + tdarr
|
| 1187 |
+
tm.assert_equal(result, expected)
|
| 1188 |
+
result = tdarr + tdi
|
| 1189 |
+
tm.assert_equal(result, expected)
|
| 1190 |
+
|
| 1191 |
+
expected_sub = 0 * tdi
|
| 1192 |
+
result = tdi - tdarr
|
| 1193 |
+
tm.assert_equal(result, expected_sub)
|
| 1194 |
+
result = tdarr - tdi
|
| 1195 |
+
tm.assert_equal(result, expected_sub)
|
| 1196 |
+
|
| 1197 |
+
def test_td64arr_add_sub_tdi(self, box_with_array, names):
|
| 1198 |
+
# GH#17250 make sure result dtype is correct
|
| 1199 |
+
# GH#19043 make sure names are propagated correctly
|
| 1200 |
+
box = box_with_array
|
| 1201 |
+
exname = get_expected_name(box, names)
|
| 1202 |
+
|
| 1203 |
+
tdi = TimedeltaIndex(["0 days", "1 day"], name=names[1])
|
| 1204 |
+
tdi = np.array(tdi) if box in [tm.to_array, pd.array] else tdi
|
| 1205 |
+
ser = Series([Timedelta(hours=3), Timedelta(hours=4)], name=names[0])
|
| 1206 |
+
expected = Series([Timedelta(hours=3), Timedelta(days=1, hours=4)], name=exname)
|
| 1207 |
+
|
| 1208 |
+
ser = tm.box_expected(ser, box)
|
| 1209 |
+
expected = tm.box_expected(expected, box)
|
| 1210 |
+
|
| 1211 |
+
result = tdi + ser
|
| 1212 |
+
tm.assert_equal(result, expected)
|
| 1213 |
+
assert_dtype(result, "timedelta64[ns]")
|
| 1214 |
+
|
| 1215 |
+
result = ser + tdi
|
| 1216 |
+
tm.assert_equal(result, expected)
|
| 1217 |
+
assert_dtype(result, "timedelta64[ns]")
|
| 1218 |
+
|
| 1219 |
+
expected = Series(
|
| 1220 |
+
[Timedelta(hours=-3), Timedelta(days=1, hours=-4)], name=exname
|
| 1221 |
+
)
|
| 1222 |
+
expected = tm.box_expected(expected, box)
|
| 1223 |
+
|
| 1224 |
+
result = tdi - ser
|
| 1225 |
+
tm.assert_equal(result, expected)
|
| 1226 |
+
assert_dtype(result, "timedelta64[ns]")
|
| 1227 |
+
|
| 1228 |
+
result = ser - tdi
|
| 1229 |
+
tm.assert_equal(result, -expected)
|
| 1230 |
+
assert_dtype(result, "timedelta64[ns]")
|
| 1231 |
+
|
| 1232 |
+
@pytest.mark.parametrize("tdnat", [np.timedelta64("NaT"), NaT])
|
| 1233 |
+
def test_td64arr_add_sub_td64_nat(self, box_with_array, tdnat):
|
| 1234 |
+
# GH#18808, GH#23320 special handling for timedelta64("NaT")
|
| 1235 |
+
box = box_with_array
|
| 1236 |
+
tdi = TimedeltaIndex([NaT, Timedelta("1s")])
|
| 1237 |
+
expected = TimedeltaIndex(["NaT"] * 2)
|
| 1238 |
+
|
| 1239 |
+
obj = tm.box_expected(tdi, box)
|
| 1240 |
+
expected = tm.box_expected(expected, box)
|
| 1241 |
+
|
| 1242 |
+
result = obj + tdnat
|
| 1243 |
+
tm.assert_equal(result, expected)
|
| 1244 |
+
result = tdnat + obj
|
| 1245 |
+
tm.assert_equal(result, expected)
|
| 1246 |
+
result = obj - tdnat
|
| 1247 |
+
tm.assert_equal(result, expected)
|
| 1248 |
+
result = tdnat - obj
|
| 1249 |
+
tm.assert_equal(result, expected)
|
| 1250 |
+
|
| 1251 |
+
def test_td64arr_add_timedeltalike(self, two_hours, box_with_array):
|
| 1252 |
+
# only test adding/sub offsets as + is now numeric
|
| 1253 |
+
# GH#10699 for Tick cases
|
| 1254 |
+
box = box_with_array
|
| 1255 |
+
rng = timedelta_range("1 days", "10 days")
|
| 1256 |
+
expected = timedelta_range("1 days 02:00:00", "10 days 02:00:00", freq="D")
|
| 1257 |
+
rng = tm.box_expected(rng, box)
|
| 1258 |
+
expected = tm.box_expected(expected, box)
|
| 1259 |
+
|
| 1260 |
+
result = rng + two_hours
|
| 1261 |
+
tm.assert_equal(result, expected)
|
| 1262 |
+
|
| 1263 |
+
result = two_hours + rng
|
| 1264 |
+
tm.assert_equal(result, expected)
|
| 1265 |
+
|
| 1266 |
+
def test_td64arr_sub_timedeltalike(self, two_hours, box_with_array):
|
| 1267 |
+
# only test adding/sub offsets as - is now numeric
|
| 1268 |
+
# GH#10699 for Tick cases
|
| 1269 |
+
box = box_with_array
|
| 1270 |
+
rng = timedelta_range("1 days", "10 days")
|
| 1271 |
+
expected = timedelta_range("0 days 22:00:00", "9 days 22:00:00")
|
| 1272 |
+
|
| 1273 |
+
rng = tm.box_expected(rng, box)
|
| 1274 |
+
expected = tm.box_expected(expected, box)
|
| 1275 |
+
|
| 1276 |
+
result = rng - two_hours
|
| 1277 |
+
tm.assert_equal(result, expected)
|
| 1278 |
+
|
| 1279 |
+
result = two_hours - rng
|
| 1280 |
+
tm.assert_equal(result, -expected)
|
| 1281 |
+
|
| 1282 |
+
# ------------------------------------------------------------------
|
| 1283 |
+
# __add__/__sub__ with DateOffsets and arrays of DateOffsets
|
| 1284 |
+
|
| 1285 |
+
def test_td64arr_add_sub_offset_index(self, names, box_with_array):
|
| 1286 |
+
# GH#18849, GH#19744
|
| 1287 |
+
box = box_with_array
|
| 1288 |
+
exname = get_expected_name(box, names)
|
| 1289 |
+
|
| 1290 |
+
tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"], name=names[0])
|
| 1291 |
+
other = Index([offsets.Hour(n=1), offsets.Minute(n=-2)], name=names[1])
|
| 1292 |
+
other = np.array(other) if box in [tm.to_array, pd.array] else other
|
| 1293 |
+
|
| 1294 |
+
expected = TimedeltaIndex(
|
| 1295 |
+
[tdi[n] + other[n] for n in range(len(tdi))], freq="infer", name=exname
|
| 1296 |
+
)
|
| 1297 |
+
expected_sub = TimedeltaIndex(
|
| 1298 |
+
[tdi[n] - other[n] for n in range(len(tdi))], freq="infer", name=exname
|
| 1299 |
+
)
|
| 1300 |
+
|
| 1301 |
+
tdi = tm.box_expected(tdi, box)
|
| 1302 |
+
expected = tm.box_expected(expected, box).astype(object, copy=False)
|
| 1303 |
+
expected_sub = tm.box_expected(expected_sub, box).astype(object, copy=False)
|
| 1304 |
+
|
| 1305 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1306 |
+
res = tdi + other
|
| 1307 |
+
tm.assert_equal(res, expected)
|
| 1308 |
+
|
| 1309 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1310 |
+
res2 = other + tdi
|
| 1311 |
+
tm.assert_equal(res2, expected)
|
| 1312 |
+
|
| 1313 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1314 |
+
res_sub = tdi - other
|
| 1315 |
+
tm.assert_equal(res_sub, expected_sub)
|
| 1316 |
+
|
| 1317 |
+
def test_td64arr_add_sub_offset_array(self, box_with_array):
|
| 1318 |
+
# GH#18849, GH#18824
|
| 1319 |
+
box = box_with_array
|
| 1320 |
+
tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"])
|
| 1321 |
+
other = np.array([offsets.Hour(n=1), offsets.Minute(n=-2)])
|
| 1322 |
+
|
| 1323 |
+
expected = TimedeltaIndex(
|
| 1324 |
+
[tdi[n] + other[n] for n in range(len(tdi))], freq="infer"
|
| 1325 |
+
)
|
| 1326 |
+
expected_sub = TimedeltaIndex(
|
| 1327 |
+
[tdi[n] - other[n] for n in range(len(tdi))], freq="infer"
|
| 1328 |
+
)
|
| 1329 |
+
|
| 1330 |
+
tdi = tm.box_expected(tdi, box)
|
| 1331 |
+
expected = tm.box_expected(expected, box).astype(object)
|
| 1332 |
+
|
| 1333 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1334 |
+
res = tdi + other
|
| 1335 |
+
tm.assert_equal(res, expected)
|
| 1336 |
+
|
| 1337 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1338 |
+
res2 = other + tdi
|
| 1339 |
+
tm.assert_equal(res2, expected)
|
| 1340 |
+
|
| 1341 |
+
expected_sub = tm.box_expected(expected_sub, box_with_array).astype(object)
|
| 1342 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1343 |
+
res_sub = tdi - other
|
| 1344 |
+
tm.assert_equal(res_sub, expected_sub)
|
| 1345 |
+
|
| 1346 |
+
def test_td64arr_with_offset_series(self, names, box_with_array):
|
| 1347 |
+
# GH#18849
|
| 1348 |
+
box = box_with_array
|
| 1349 |
+
box2 = Series if box in [Index, tm.to_array, pd.array] else box
|
| 1350 |
+
exname = get_expected_name(box, names)
|
| 1351 |
+
|
| 1352 |
+
tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"], name=names[0])
|
| 1353 |
+
other = Series([offsets.Hour(n=1), offsets.Minute(n=-2)], name=names[1])
|
| 1354 |
+
|
| 1355 |
+
expected_add = Series(
|
| 1356 |
+
[tdi[n] + other[n] for n in range(len(tdi))], name=exname, dtype=object
|
| 1357 |
+
)
|
| 1358 |
+
obj = tm.box_expected(tdi, box)
|
| 1359 |
+
expected_add = tm.box_expected(expected_add, box2).astype(object)
|
| 1360 |
+
|
| 1361 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1362 |
+
res = obj + other
|
| 1363 |
+
tm.assert_equal(res, expected_add)
|
| 1364 |
+
|
| 1365 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1366 |
+
res2 = other + obj
|
| 1367 |
+
tm.assert_equal(res2, expected_add)
|
| 1368 |
+
|
| 1369 |
+
expected_sub = Series(
|
| 1370 |
+
[tdi[n] - other[n] for n in range(len(tdi))], name=exname, dtype=object
|
| 1371 |
+
)
|
| 1372 |
+
expected_sub = tm.box_expected(expected_sub, box2).astype(object)
|
| 1373 |
+
|
| 1374 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1375 |
+
res3 = obj - other
|
| 1376 |
+
tm.assert_equal(res3, expected_sub)
|
| 1377 |
+
|
| 1378 |
+
@pytest.mark.parametrize("obox", [np.array, Index, Series])
|
| 1379 |
+
def test_td64arr_addsub_anchored_offset_arraylike(self, obox, box_with_array):
|
| 1380 |
+
# GH#18824
|
| 1381 |
+
tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"])
|
| 1382 |
+
tdi = tm.box_expected(tdi, box_with_array)
|
| 1383 |
+
|
| 1384 |
+
anchored = obox([offsets.MonthEnd(), offsets.Day(n=2)])
|
| 1385 |
+
|
| 1386 |
+
# addition/subtraction ops with anchored offsets should issue
|
| 1387 |
+
# a PerformanceWarning and _then_ raise a TypeError.
|
| 1388 |
+
msg = "has incorrect type|cannot add the type MonthEnd"
|
| 1389 |
+
with pytest.raises(TypeError, match=msg):
|
| 1390 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1391 |
+
tdi + anchored
|
| 1392 |
+
with pytest.raises(TypeError, match=msg):
|
| 1393 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1394 |
+
anchored + tdi
|
| 1395 |
+
with pytest.raises(TypeError, match=msg):
|
| 1396 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1397 |
+
tdi - anchored
|
| 1398 |
+
with pytest.raises(TypeError, match=msg):
|
| 1399 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1400 |
+
anchored - tdi
|
| 1401 |
+
|
| 1402 |
+
# ------------------------------------------------------------------
|
| 1403 |
+
# Unsorted
|
| 1404 |
+
|
| 1405 |
+
def test_td64arr_add_sub_object_array(self, box_with_array):
|
| 1406 |
+
box = box_with_array
|
| 1407 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1408 |
+
|
| 1409 |
+
tdi = timedelta_range("1 day", periods=3, freq="D")
|
| 1410 |
+
tdarr = tm.box_expected(tdi, box)
|
| 1411 |
+
|
| 1412 |
+
other = np.array([Timedelta(days=1), offsets.Day(2), Timestamp("2000-01-04")])
|
| 1413 |
+
|
| 1414 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1415 |
+
result = tdarr + other
|
| 1416 |
+
|
| 1417 |
+
expected = Index(
|
| 1418 |
+
[Timedelta(days=2), Timedelta(days=4), Timestamp("2000-01-07")]
|
| 1419 |
+
)
|
| 1420 |
+
expected = tm.box_expected(expected, xbox).astype(object)
|
| 1421 |
+
tm.assert_equal(result, expected)
|
| 1422 |
+
|
| 1423 |
+
msg = "unsupported operand type|cannot subtract a datelike"
|
| 1424 |
+
with pytest.raises(TypeError, match=msg):
|
| 1425 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1426 |
+
tdarr - other
|
| 1427 |
+
|
| 1428 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
| 1429 |
+
result = other - tdarr
|
| 1430 |
+
|
| 1431 |
+
expected = Index([Timedelta(0), Timedelta(0), Timestamp("2000-01-01")])
|
| 1432 |
+
expected = tm.box_expected(expected, xbox).astype(object)
|
| 1433 |
+
tm.assert_equal(result, expected)
|
| 1434 |
+
|
| 1435 |
+
|
| 1436 |
+
class TestTimedeltaArraylikeMulDivOps:
|
| 1437 |
+
# Tests for timedelta64[ns]
|
| 1438 |
+
# __mul__, __rmul__, __div__, __rdiv__, __floordiv__, __rfloordiv__
|
| 1439 |
+
|
| 1440 |
+
# ------------------------------------------------------------------
|
| 1441 |
+
# Multiplication
|
| 1442 |
+
# organized with scalar others first, then array-like
|
| 1443 |
+
|
| 1444 |
+
def test_td64arr_mul_int(self, box_with_array):
|
| 1445 |
+
idx = TimedeltaIndex(np.arange(5, dtype="int64"))
|
| 1446 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 1447 |
+
|
| 1448 |
+
result = idx * 1
|
| 1449 |
+
tm.assert_equal(result, idx)
|
| 1450 |
+
|
| 1451 |
+
result = 1 * idx
|
| 1452 |
+
tm.assert_equal(result, idx)
|
| 1453 |
+
|
| 1454 |
+
def test_td64arr_mul_tdlike_scalar_raises(self, two_hours, box_with_array):
|
| 1455 |
+
rng = timedelta_range("1 days", "10 days", name="foo")
|
| 1456 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 1457 |
+
msg = "|".join(
|
| 1458 |
+
[
|
| 1459 |
+
"argument must be an integer",
|
| 1460 |
+
"cannot use operands with types dtype",
|
| 1461 |
+
"Cannot multiply with",
|
| 1462 |
+
]
|
| 1463 |
+
)
|
| 1464 |
+
with pytest.raises(TypeError, match=msg):
|
| 1465 |
+
rng * two_hours
|
| 1466 |
+
|
| 1467 |
+
def test_tdi_mul_int_array_zerodim(self, box_with_array):
|
| 1468 |
+
rng5 = np.arange(5, dtype="int64")
|
| 1469 |
+
idx = TimedeltaIndex(rng5)
|
| 1470 |
+
expected = TimedeltaIndex(rng5 * 5)
|
| 1471 |
+
|
| 1472 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 1473 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1474 |
+
|
| 1475 |
+
result = idx * np.array(5, dtype="int64")
|
| 1476 |
+
tm.assert_equal(result, expected)
|
| 1477 |
+
|
| 1478 |
+
def test_tdi_mul_int_array(self, box_with_array):
|
| 1479 |
+
rng5 = np.arange(5, dtype="int64")
|
| 1480 |
+
idx = TimedeltaIndex(rng5)
|
| 1481 |
+
expected = TimedeltaIndex(rng5**2)
|
| 1482 |
+
|
| 1483 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 1484 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1485 |
+
|
| 1486 |
+
result = idx * rng5
|
| 1487 |
+
tm.assert_equal(result, expected)
|
| 1488 |
+
|
| 1489 |
+
def test_tdi_mul_int_series(self, box_with_array):
|
| 1490 |
+
box = box_with_array
|
| 1491 |
+
xbox = Series if box in [Index, tm.to_array, pd.array] else box
|
| 1492 |
+
|
| 1493 |
+
idx = TimedeltaIndex(np.arange(5, dtype="int64"))
|
| 1494 |
+
expected = TimedeltaIndex(np.arange(5, dtype="int64") ** 2)
|
| 1495 |
+
|
| 1496 |
+
idx = tm.box_expected(idx, box)
|
| 1497 |
+
expected = tm.box_expected(expected, xbox)
|
| 1498 |
+
|
| 1499 |
+
result = idx * Series(np.arange(5, dtype="int64"))
|
| 1500 |
+
tm.assert_equal(result, expected)
|
| 1501 |
+
|
| 1502 |
+
def test_tdi_mul_float_series(self, box_with_array):
|
| 1503 |
+
box = box_with_array
|
| 1504 |
+
xbox = Series if box in [Index, tm.to_array, pd.array] else box
|
| 1505 |
+
|
| 1506 |
+
idx = TimedeltaIndex(np.arange(5, dtype="int64"))
|
| 1507 |
+
idx = tm.box_expected(idx, box)
|
| 1508 |
+
|
| 1509 |
+
rng5f = np.arange(5, dtype="float64")
|
| 1510 |
+
expected = TimedeltaIndex(rng5f * (rng5f + 1.0))
|
| 1511 |
+
expected = tm.box_expected(expected, xbox)
|
| 1512 |
+
|
| 1513 |
+
result = idx * Series(rng5f + 1.0)
|
| 1514 |
+
tm.assert_equal(result, expected)
|
| 1515 |
+
|
| 1516 |
+
# TODO: Put Series/DataFrame in others?
|
| 1517 |
+
@pytest.mark.parametrize(
|
| 1518 |
+
"other",
|
| 1519 |
+
[
|
| 1520 |
+
np.arange(1, 11),
|
| 1521 |
+
Index(np.arange(1, 11), np.int64),
|
| 1522 |
+
Index(range(1, 11), np.uint64),
|
| 1523 |
+
Index(range(1, 11), np.float64),
|
| 1524 |
+
pd.RangeIndex(1, 11),
|
| 1525 |
+
],
|
| 1526 |
+
ids=lambda x: type(x).__name__,
|
| 1527 |
+
)
|
| 1528 |
+
def test_tdi_rmul_arraylike(self, other, box_with_array):
|
| 1529 |
+
box = box_with_array
|
| 1530 |
+
|
| 1531 |
+
tdi = TimedeltaIndex(["1 Day"] * 10)
|
| 1532 |
+
expected = timedelta_range("1 days", "10 days")._with_freq(None)
|
| 1533 |
+
|
| 1534 |
+
tdi = tm.box_expected(tdi, box)
|
| 1535 |
+
xbox = get_upcast_box(tdi, other)
|
| 1536 |
+
|
| 1537 |
+
expected = tm.box_expected(expected, xbox)
|
| 1538 |
+
|
| 1539 |
+
result = other * tdi
|
| 1540 |
+
tm.assert_equal(result, expected)
|
| 1541 |
+
commute = tdi * other
|
| 1542 |
+
tm.assert_equal(commute, expected)
|
| 1543 |
+
|
| 1544 |
+
# ------------------------------------------------------------------
|
| 1545 |
+
# __div__, __rdiv__
|
| 1546 |
+
|
| 1547 |
+
def test_td64arr_div_nat_invalid(self, box_with_array):
|
| 1548 |
+
# don't allow division by NaT (maybe could in the future)
|
| 1549 |
+
rng = timedelta_range("1 days", "10 days", name="foo")
|
| 1550 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 1551 |
+
|
| 1552 |
+
with pytest.raises(TypeError, match="unsupported operand type"):
|
| 1553 |
+
rng / NaT
|
| 1554 |
+
with pytest.raises(TypeError, match="Cannot divide NaTType by"):
|
| 1555 |
+
NaT / rng
|
| 1556 |
+
|
| 1557 |
+
dt64nat = np.datetime64("NaT", "ns")
|
| 1558 |
+
msg = "|".join(
|
| 1559 |
+
[
|
| 1560 |
+
# 'divide' on npdev as of 2021-12-18
|
| 1561 |
+
"ufunc '(true_divide|divide)' cannot use operands",
|
| 1562 |
+
"cannot perform __r?truediv__",
|
| 1563 |
+
"Cannot divide datetime64 by TimedeltaArray",
|
| 1564 |
+
]
|
| 1565 |
+
)
|
| 1566 |
+
with pytest.raises(TypeError, match=msg):
|
| 1567 |
+
rng / dt64nat
|
| 1568 |
+
with pytest.raises(TypeError, match=msg):
|
| 1569 |
+
dt64nat / rng
|
| 1570 |
+
|
| 1571 |
+
def test_td64arr_div_td64nat(self, box_with_array):
|
| 1572 |
+
# GH#23829
|
| 1573 |
+
box = box_with_array
|
| 1574 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1575 |
+
|
| 1576 |
+
rng = timedelta_range("1 days", "10 days")
|
| 1577 |
+
rng = tm.box_expected(rng, box)
|
| 1578 |
+
|
| 1579 |
+
other = np.timedelta64("NaT")
|
| 1580 |
+
|
| 1581 |
+
expected = np.array([np.nan] * 10)
|
| 1582 |
+
expected = tm.box_expected(expected, xbox)
|
| 1583 |
+
|
| 1584 |
+
result = rng / other
|
| 1585 |
+
tm.assert_equal(result, expected)
|
| 1586 |
+
|
| 1587 |
+
result = other / rng
|
| 1588 |
+
tm.assert_equal(result, expected)
|
| 1589 |
+
|
| 1590 |
+
def test_td64arr_div_int(self, box_with_array):
|
| 1591 |
+
idx = TimedeltaIndex(np.arange(5, dtype="int64"))
|
| 1592 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 1593 |
+
|
| 1594 |
+
result = idx / 1
|
| 1595 |
+
tm.assert_equal(result, idx)
|
| 1596 |
+
|
| 1597 |
+
with pytest.raises(TypeError, match="Cannot divide"):
|
| 1598 |
+
# GH#23829
|
| 1599 |
+
1 / idx
|
| 1600 |
+
|
| 1601 |
+
def test_td64arr_div_tdlike_scalar(self, two_hours, box_with_array):
|
| 1602 |
+
# GH#20088, GH#22163 ensure DataFrame returns correct dtype
|
| 1603 |
+
box = box_with_array
|
| 1604 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1605 |
+
|
| 1606 |
+
rng = timedelta_range("1 days", "10 days", name="foo")
|
| 1607 |
+
expected = Index((np.arange(10) + 1) * 12, dtype=np.float64, name="foo")
|
| 1608 |
+
|
| 1609 |
+
rng = tm.box_expected(rng, box)
|
| 1610 |
+
expected = tm.box_expected(expected, xbox)
|
| 1611 |
+
|
| 1612 |
+
result = rng / two_hours
|
| 1613 |
+
tm.assert_equal(result, expected)
|
| 1614 |
+
|
| 1615 |
+
result = two_hours / rng
|
| 1616 |
+
expected = 1 / expected
|
| 1617 |
+
tm.assert_equal(result, expected)
|
| 1618 |
+
|
| 1619 |
+
@pytest.mark.parametrize("m", [1, 3, 10])
|
| 1620 |
+
@pytest.mark.parametrize("unit", ["D", "h", "m", "s", "ms", "us", "ns"])
|
| 1621 |
+
def test_td64arr_div_td64_scalar(self, m, unit, box_with_array):
|
| 1622 |
+
box = box_with_array
|
| 1623 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1624 |
+
|
| 1625 |
+
ser = Series([Timedelta(days=59)] * 3)
|
| 1626 |
+
ser[2] = np.nan
|
| 1627 |
+
flat = ser
|
| 1628 |
+
ser = tm.box_expected(ser, box)
|
| 1629 |
+
|
| 1630 |
+
# op
|
| 1631 |
+
expected = Series([x / np.timedelta64(m, unit) for x in flat])
|
| 1632 |
+
expected = tm.box_expected(expected, xbox)
|
| 1633 |
+
result = ser / np.timedelta64(m, unit)
|
| 1634 |
+
tm.assert_equal(result, expected)
|
| 1635 |
+
|
| 1636 |
+
# reverse op
|
| 1637 |
+
expected = Series([Timedelta(np.timedelta64(m, unit)) / x for x in flat])
|
| 1638 |
+
expected = tm.box_expected(expected, xbox)
|
| 1639 |
+
result = np.timedelta64(m, unit) / ser
|
| 1640 |
+
tm.assert_equal(result, expected)
|
| 1641 |
+
|
| 1642 |
+
def test_td64arr_div_tdlike_scalar_with_nat(self, two_hours, box_with_array):
|
| 1643 |
+
box = box_with_array
|
| 1644 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1645 |
+
|
| 1646 |
+
rng = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
|
| 1647 |
+
expected = Index([12, np.nan, 24], dtype=np.float64, name="foo")
|
| 1648 |
+
|
| 1649 |
+
rng = tm.box_expected(rng, box)
|
| 1650 |
+
expected = tm.box_expected(expected, xbox)
|
| 1651 |
+
|
| 1652 |
+
result = rng / two_hours
|
| 1653 |
+
tm.assert_equal(result, expected)
|
| 1654 |
+
|
| 1655 |
+
result = two_hours / rng
|
| 1656 |
+
expected = 1 / expected
|
| 1657 |
+
tm.assert_equal(result, expected)
|
| 1658 |
+
|
| 1659 |
+
def test_td64arr_div_td64_ndarray(self, box_with_array):
|
| 1660 |
+
# GH#22631
|
| 1661 |
+
box = box_with_array
|
| 1662 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1663 |
+
|
| 1664 |
+
rng = TimedeltaIndex(["1 days", NaT, "2 days"])
|
| 1665 |
+
expected = Index([12, np.nan, 24], dtype=np.float64)
|
| 1666 |
+
|
| 1667 |
+
rng = tm.box_expected(rng, box)
|
| 1668 |
+
expected = tm.box_expected(expected, xbox)
|
| 1669 |
+
|
| 1670 |
+
other = np.array([2, 4, 2], dtype="m8[h]")
|
| 1671 |
+
result = rng / other
|
| 1672 |
+
tm.assert_equal(result, expected)
|
| 1673 |
+
|
| 1674 |
+
result = rng / tm.box_expected(other, box)
|
| 1675 |
+
tm.assert_equal(result, expected)
|
| 1676 |
+
|
| 1677 |
+
result = rng / other.astype(object)
|
| 1678 |
+
tm.assert_equal(result, expected.astype(object))
|
| 1679 |
+
|
| 1680 |
+
result = rng / list(other)
|
| 1681 |
+
tm.assert_equal(result, expected)
|
| 1682 |
+
|
| 1683 |
+
# reversed op
|
| 1684 |
+
expected = 1 / expected
|
| 1685 |
+
result = other / rng
|
| 1686 |
+
tm.assert_equal(result, expected)
|
| 1687 |
+
|
| 1688 |
+
result = tm.box_expected(other, box) / rng
|
| 1689 |
+
tm.assert_equal(result, expected)
|
| 1690 |
+
|
| 1691 |
+
result = other.astype(object) / rng
|
| 1692 |
+
tm.assert_equal(result, expected)
|
| 1693 |
+
|
| 1694 |
+
result = list(other) / rng
|
| 1695 |
+
tm.assert_equal(result, expected)
|
| 1696 |
+
|
| 1697 |
+
def test_tdarr_div_length_mismatch(self, box_with_array):
|
| 1698 |
+
rng = TimedeltaIndex(["1 days", NaT, "2 days"])
|
| 1699 |
+
mismatched = [1, 2, 3, 4]
|
| 1700 |
+
|
| 1701 |
+
rng = tm.box_expected(rng, box_with_array)
|
| 1702 |
+
msg = "Cannot divide vectors|Unable to coerce to Series"
|
| 1703 |
+
for obj in [mismatched, mismatched[:2]]:
|
| 1704 |
+
# one shorter, one longer
|
| 1705 |
+
for other in [obj, np.array(obj), Index(obj)]:
|
| 1706 |
+
with pytest.raises(ValueError, match=msg):
|
| 1707 |
+
rng / other
|
| 1708 |
+
with pytest.raises(ValueError, match=msg):
|
| 1709 |
+
other / rng
|
| 1710 |
+
|
| 1711 |
+
def test_td64_div_object_mixed_result(self, box_with_array):
|
| 1712 |
+
# Case where we having a NaT in the result inseat of timedelta64("NaT")
|
| 1713 |
+
# is misleading
|
| 1714 |
+
orig = timedelta_range("1 Day", periods=3).insert(1, NaT)
|
| 1715 |
+
tdi = tm.box_expected(orig, box_with_array, transpose=False)
|
| 1716 |
+
|
| 1717 |
+
other = np.array([orig[0], 1.5, 2.0, orig[2]], dtype=object)
|
| 1718 |
+
other = tm.box_expected(other, box_with_array, transpose=False)
|
| 1719 |
+
|
| 1720 |
+
res = tdi / other
|
| 1721 |
+
|
| 1722 |
+
expected = Index([1.0, np.timedelta64("NaT", "ns"), orig[0], 1.5], dtype=object)
|
| 1723 |
+
expected = tm.box_expected(expected, box_with_array, transpose=False)
|
| 1724 |
+
if isinstance(expected, NumpyExtensionArray):
|
| 1725 |
+
expected = expected.to_numpy()
|
| 1726 |
+
tm.assert_equal(res, expected)
|
| 1727 |
+
if box_with_array is DataFrame:
|
| 1728 |
+
# We have a np.timedelta64(NaT), not pd.NaT
|
| 1729 |
+
assert isinstance(res.iloc[1, 0], np.timedelta64)
|
| 1730 |
+
|
| 1731 |
+
res = tdi // other
|
| 1732 |
+
|
| 1733 |
+
expected = Index([1, np.timedelta64("NaT", "ns"), orig[0], 1], dtype=object)
|
| 1734 |
+
expected = tm.box_expected(expected, box_with_array, transpose=False)
|
| 1735 |
+
if isinstance(expected, NumpyExtensionArray):
|
| 1736 |
+
expected = expected.to_numpy()
|
| 1737 |
+
tm.assert_equal(res, expected)
|
| 1738 |
+
if box_with_array is DataFrame:
|
| 1739 |
+
# We have a np.timedelta64(NaT), not pd.NaT
|
| 1740 |
+
assert isinstance(res.iloc[1, 0], np.timedelta64)
|
| 1741 |
+
|
| 1742 |
+
# ------------------------------------------------------------------
|
| 1743 |
+
# __floordiv__, __rfloordiv__
|
| 1744 |
+
|
| 1745 |
+
def test_td64arr_floordiv_td64arr_with_nat(
|
| 1746 |
+
self, box_with_array, using_array_manager
|
| 1747 |
+
):
|
| 1748 |
+
# GH#35529
|
| 1749 |
+
box = box_with_array
|
| 1750 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1751 |
+
|
| 1752 |
+
left = Series([1000, 222330, 30], dtype="timedelta64[ns]")
|
| 1753 |
+
right = Series([1000, 222330, None], dtype="timedelta64[ns]")
|
| 1754 |
+
|
| 1755 |
+
left = tm.box_expected(left, box)
|
| 1756 |
+
right = tm.box_expected(right, box)
|
| 1757 |
+
|
| 1758 |
+
expected = np.array([1.0, 1.0, np.nan], dtype=np.float64)
|
| 1759 |
+
expected = tm.box_expected(expected, xbox)
|
| 1760 |
+
if box is DataFrame and using_array_manager:
|
| 1761 |
+
# INFO(ArrayManager) floordiv returns integer, and ArrayManager
|
| 1762 |
+
# performs ops column-wise and thus preserves int64 dtype for
|
| 1763 |
+
# columns without missing values
|
| 1764 |
+
expected[[0, 1]] = expected[[0, 1]].astype("int64")
|
| 1765 |
+
|
| 1766 |
+
with tm.maybe_produces_warning(
|
| 1767 |
+
RuntimeWarning, box is pd.array, check_stacklevel=False
|
| 1768 |
+
):
|
| 1769 |
+
result = left // right
|
| 1770 |
+
|
| 1771 |
+
tm.assert_equal(result, expected)
|
| 1772 |
+
|
| 1773 |
+
# case that goes through __rfloordiv__ with arraylike
|
| 1774 |
+
with tm.maybe_produces_warning(
|
| 1775 |
+
RuntimeWarning, box is pd.array, check_stacklevel=False
|
| 1776 |
+
):
|
| 1777 |
+
result = np.asarray(left) // right
|
| 1778 |
+
tm.assert_equal(result, expected)
|
| 1779 |
+
|
| 1780 |
+
@pytest.mark.filterwarnings("ignore:invalid value encountered:RuntimeWarning")
|
| 1781 |
+
def test_td64arr_floordiv_tdscalar(self, box_with_array, scalar_td):
|
| 1782 |
+
# GH#18831, GH#19125
|
| 1783 |
+
box = box_with_array
|
| 1784 |
+
xbox = np.ndarray if box is pd.array else box
|
| 1785 |
+
td = Timedelta("5m3s") # i.e. (scalar_td - 1sec) / 2
|
| 1786 |
+
|
| 1787 |
+
td1 = Series([td, td, NaT], dtype="m8[ns]")
|
| 1788 |
+
td1 = tm.box_expected(td1, box, transpose=False)
|
| 1789 |
+
|
| 1790 |
+
expected = Series([0, 0, np.nan])
|
| 1791 |
+
expected = tm.box_expected(expected, xbox, transpose=False)
|
| 1792 |
+
|
| 1793 |
+
result = td1 // scalar_td
|
| 1794 |
+
tm.assert_equal(result, expected)
|
| 1795 |
+
|
| 1796 |
+
# Reversed op
|
| 1797 |
+
expected = Series([2, 2, np.nan])
|
| 1798 |
+
expected = tm.box_expected(expected, xbox, transpose=False)
|
| 1799 |
+
|
| 1800 |
+
result = scalar_td // td1
|
| 1801 |
+
tm.assert_equal(result, expected)
|
| 1802 |
+
|
| 1803 |
+
# same thing buts let's be explicit about calling __rfloordiv__
|
| 1804 |
+
result = td1.__rfloordiv__(scalar_td)
|
| 1805 |
+
tm.assert_equal(result, expected)
|
| 1806 |
+
|
| 1807 |
+
def test_td64arr_floordiv_int(self, box_with_array):
|
| 1808 |
+
idx = TimedeltaIndex(np.arange(5, dtype="int64"))
|
| 1809 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 1810 |
+
result = idx // 1
|
| 1811 |
+
tm.assert_equal(result, idx)
|
| 1812 |
+
|
| 1813 |
+
pattern = "floor_divide cannot use operands|Cannot divide int by Timedelta*"
|
| 1814 |
+
with pytest.raises(TypeError, match=pattern):
|
| 1815 |
+
1 // idx
|
| 1816 |
+
|
| 1817 |
+
# ------------------------------------------------------------------
|
| 1818 |
+
# mod, divmod
|
| 1819 |
+
# TODO: operations with timedelta-like arrays, numeric arrays,
|
| 1820 |
+
# reversed ops
|
| 1821 |
+
|
| 1822 |
+
def test_td64arr_mod_tdscalar(self, box_with_array, three_days):
|
| 1823 |
+
tdi = timedelta_range("1 Day", "9 days")
|
| 1824 |
+
tdarr = tm.box_expected(tdi, box_with_array)
|
| 1825 |
+
|
| 1826 |
+
expected = TimedeltaIndex(["1 Day", "2 Days", "0 Days"] * 3)
|
| 1827 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1828 |
+
|
| 1829 |
+
result = tdarr % three_days
|
| 1830 |
+
tm.assert_equal(result, expected)
|
| 1831 |
+
|
| 1832 |
+
warn = None
|
| 1833 |
+
if box_with_array is DataFrame and isinstance(three_days, pd.DateOffset):
|
| 1834 |
+
warn = PerformanceWarning
|
| 1835 |
+
# TODO: making expected be object here a result of DataFrame.__divmod__
|
| 1836 |
+
# being defined in a naive way that does not dispatch to the underlying
|
| 1837 |
+
# array's __divmod__
|
| 1838 |
+
expected = expected.astype(object)
|
| 1839 |
+
|
| 1840 |
+
with tm.assert_produces_warning(warn):
|
| 1841 |
+
result = divmod(tdarr, three_days)
|
| 1842 |
+
|
| 1843 |
+
tm.assert_equal(result[1], expected)
|
| 1844 |
+
tm.assert_equal(result[0], tdarr // three_days)
|
| 1845 |
+
|
| 1846 |
+
def test_td64arr_mod_int(self, box_with_array):
|
| 1847 |
+
tdi = timedelta_range("1 ns", "10 ns", periods=10)
|
| 1848 |
+
tdarr = tm.box_expected(tdi, box_with_array)
|
| 1849 |
+
|
| 1850 |
+
expected = TimedeltaIndex(["1 ns", "0 ns"] * 5)
|
| 1851 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1852 |
+
|
| 1853 |
+
result = tdarr % 2
|
| 1854 |
+
tm.assert_equal(result, expected)
|
| 1855 |
+
|
| 1856 |
+
msg = "Cannot divide int by"
|
| 1857 |
+
with pytest.raises(TypeError, match=msg):
|
| 1858 |
+
2 % tdarr
|
| 1859 |
+
|
| 1860 |
+
result = divmod(tdarr, 2)
|
| 1861 |
+
tm.assert_equal(result[1], expected)
|
| 1862 |
+
tm.assert_equal(result[0], tdarr // 2)
|
| 1863 |
+
|
| 1864 |
+
def test_td64arr_rmod_tdscalar(self, box_with_array, three_days):
|
| 1865 |
+
tdi = timedelta_range("1 Day", "9 days")
|
| 1866 |
+
tdarr = tm.box_expected(tdi, box_with_array)
|
| 1867 |
+
|
| 1868 |
+
expected = ["0 Days", "1 Day", "0 Days"] + ["3 Days"] * 6
|
| 1869 |
+
expected = TimedeltaIndex(expected)
|
| 1870 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1871 |
+
|
| 1872 |
+
result = three_days % tdarr
|
| 1873 |
+
tm.assert_equal(result, expected)
|
| 1874 |
+
|
| 1875 |
+
result = divmod(three_days, tdarr)
|
| 1876 |
+
tm.assert_equal(result[1], expected)
|
| 1877 |
+
tm.assert_equal(result[0], three_days // tdarr)
|
| 1878 |
+
|
| 1879 |
+
# ------------------------------------------------------------------
|
| 1880 |
+
# Operations with invalid others
|
| 1881 |
+
|
| 1882 |
+
def test_td64arr_mul_tdscalar_invalid(self, box_with_array, scalar_td):
|
| 1883 |
+
td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
|
| 1884 |
+
td1.iloc[2] = np.nan
|
| 1885 |
+
|
| 1886 |
+
td1 = tm.box_expected(td1, box_with_array)
|
| 1887 |
+
|
| 1888 |
+
# check that we are getting a TypeError
|
| 1889 |
+
# with 'operate' (from core/ops.py) for the ops that are not
|
| 1890 |
+
# defined
|
| 1891 |
+
pattern = "operate|unsupported|cannot|not supported"
|
| 1892 |
+
with pytest.raises(TypeError, match=pattern):
|
| 1893 |
+
td1 * scalar_td
|
| 1894 |
+
with pytest.raises(TypeError, match=pattern):
|
| 1895 |
+
scalar_td * td1
|
| 1896 |
+
|
| 1897 |
+
def test_td64arr_mul_too_short_raises(self, box_with_array):
|
| 1898 |
+
idx = TimedeltaIndex(np.arange(5, dtype="int64"))
|
| 1899 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 1900 |
+
msg = "|".join(
|
| 1901 |
+
[
|
| 1902 |
+
"cannot use operands with types dtype",
|
| 1903 |
+
"Cannot multiply with unequal lengths",
|
| 1904 |
+
"Unable to coerce to Series",
|
| 1905 |
+
]
|
| 1906 |
+
)
|
| 1907 |
+
with pytest.raises(TypeError, match=msg):
|
| 1908 |
+
# length check before dtype check
|
| 1909 |
+
idx * idx[:3]
|
| 1910 |
+
with pytest.raises(ValueError, match=msg):
|
| 1911 |
+
idx * np.array([1, 2])
|
| 1912 |
+
|
| 1913 |
+
def test_td64arr_mul_td64arr_raises(self, box_with_array):
|
| 1914 |
+
idx = TimedeltaIndex(np.arange(5, dtype="int64"))
|
| 1915 |
+
idx = tm.box_expected(idx, box_with_array)
|
| 1916 |
+
msg = "cannot use operands with types dtype"
|
| 1917 |
+
with pytest.raises(TypeError, match=msg):
|
| 1918 |
+
idx * idx
|
| 1919 |
+
|
| 1920 |
+
# ------------------------------------------------------------------
|
| 1921 |
+
# Operations with numeric others
|
| 1922 |
+
|
| 1923 |
+
def test_td64arr_mul_numeric_scalar(self, box_with_array, one):
|
| 1924 |
+
# GH#4521
|
| 1925 |
+
# divide/multiply by integers
|
| 1926 |
+
tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
|
| 1927 |
+
expected = Series(["-59 Days", "-59 Days", "NaT"], dtype="timedelta64[ns]")
|
| 1928 |
+
|
| 1929 |
+
tdser = tm.box_expected(tdser, box_with_array)
|
| 1930 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1931 |
+
|
| 1932 |
+
result = tdser * (-one)
|
| 1933 |
+
tm.assert_equal(result, expected)
|
| 1934 |
+
result = (-one) * tdser
|
| 1935 |
+
tm.assert_equal(result, expected)
|
| 1936 |
+
|
| 1937 |
+
expected = Series(["118 Days", "118 Days", "NaT"], dtype="timedelta64[ns]")
|
| 1938 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1939 |
+
|
| 1940 |
+
result = tdser * (2 * one)
|
| 1941 |
+
tm.assert_equal(result, expected)
|
| 1942 |
+
result = (2 * one) * tdser
|
| 1943 |
+
tm.assert_equal(result, expected)
|
| 1944 |
+
|
| 1945 |
+
@pytest.mark.parametrize("two", [2, 2.0, np.array(2), np.array(2.0)])
|
| 1946 |
+
def test_td64arr_div_numeric_scalar(self, box_with_array, two):
|
| 1947 |
+
# GH#4521
|
| 1948 |
+
# divide/multiply by integers
|
| 1949 |
+
tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
|
| 1950 |
+
expected = Series(["29.5D", "29.5D", "NaT"], dtype="timedelta64[ns]")
|
| 1951 |
+
|
| 1952 |
+
tdser = tm.box_expected(tdser, box_with_array)
|
| 1953 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1954 |
+
|
| 1955 |
+
result = tdser / two
|
| 1956 |
+
tm.assert_equal(result, expected)
|
| 1957 |
+
|
| 1958 |
+
with pytest.raises(TypeError, match="Cannot divide"):
|
| 1959 |
+
two / tdser
|
| 1960 |
+
|
| 1961 |
+
@pytest.mark.parametrize("two", [2, 2.0, np.array(2), np.array(2.0)])
|
| 1962 |
+
def test_td64arr_floordiv_numeric_scalar(self, box_with_array, two):
|
| 1963 |
+
tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
|
| 1964 |
+
expected = Series(["29.5D", "29.5D", "NaT"], dtype="timedelta64[ns]")
|
| 1965 |
+
|
| 1966 |
+
tdser = tm.box_expected(tdser, box_with_array)
|
| 1967 |
+
expected = tm.box_expected(expected, box_with_array)
|
| 1968 |
+
|
| 1969 |
+
result = tdser // two
|
| 1970 |
+
tm.assert_equal(result, expected)
|
| 1971 |
+
|
| 1972 |
+
with pytest.raises(TypeError, match="Cannot divide"):
|
| 1973 |
+
two // tdser
|
| 1974 |
+
|
| 1975 |
+
@pytest.mark.parametrize(
|
| 1976 |
+
"vector",
|
| 1977 |
+
[np.array([20, 30, 40]), Index([20, 30, 40]), Series([20, 30, 40])],
|
| 1978 |
+
ids=lambda x: type(x).__name__,
|
| 1979 |
+
)
|
| 1980 |
+
def test_td64arr_rmul_numeric_array(
|
| 1981 |
+
self,
|
| 1982 |
+
box_with_array,
|
| 1983 |
+
vector,
|
| 1984 |
+
any_real_numpy_dtype,
|
| 1985 |
+
):
|
| 1986 |
+
# GH#4521
|
| 1987 |
+
# divide/multiply by integers
|
| 1988 |
+
|
| 1989 |
+
tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
|
| 1990 |
+
vector = vector.astype(any_real_numpy_dtype)
|
| 1991 |
+
|
| 1992 |
+
expected = Series(["1180 Days", "1770 Days", "NaT"], dtype="timedelta64[ns]")
|
| 1993 |
+
|
| 1994 |
+
tdser = tm.box_expected(tdser, box_with_array)
|
| 1995 |
+
xbox = get_upcast_box(tdser, vector)
|
| 1996 |
+
|
| 1997 |
+
expected = tm.box_expected(expected, xbox)
|
| 1998 |
+
|
| 1999 |
+
result = tdser * vector
|
| 2000 |
+
tm.assert_equal(result, expected)
|
| 2001 |
+
|
| 2002 |
+
result = vector * tdser
|
| 2003 |
+
tm.assert_equal(result, expected)
|
| 2004 |
+
|
| 2005 |
+
@pytest.mark.parametrize(
|
| 2006 |
+
"vector",
|
| 2007 |
+
[np.array([20, 30, 40]), Index([20, 30, 40]), Series([20, 30, 40])],
|
| 2008 |
+
ids=lambda x: type(x).__name__,
|
| 2009 |
+
)
|
| 2010 |
+
def test_td64arr_div_numeric_array(
|
| 2011 |
+
self, box_with_array, vector, any_real_numpy_dtype
|
| 2012 |
+
):
|
| 2013 |
+
# GH#4521
|
| 2014 |
+
# divide/multiply by integers
|
| 2015 |
+
|
| 2016 |
+
tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
|
| 2017 |
+
vector = vector.astype(any_real_numpy_dtype)
|
| 2018 |
+
|
| 2019 |
+
expected = Series(["2.95D", "1D 23h 12m", "NaT"], dtype="timedelta64[ns]")
|
| 2020 |
+
|
| 2021 |
+
tdser = tm.box_expected(tdser, box_with_array)
|
| 2022 |
+
xbox = get_upcast_box(tdser, vector)
|
| 2023 |
+
expected = tm.box_expected(expected, xbox)
|
| 2024 |
+
|
| 2025 |
+
result = tdser / vector
|
| 2026 |
+
tm.assert_equal(result, expected)
|
| 2027 |
+
|
| 2028 |
+
pattern = "|".join(
|
| 2029 |
+
[
|
| 2030 |
+
"true_divide'? cannot use operands",
|
| 2031 |
+
"cannot perform __div__",
|
| 2032 |
+
"cannot perform __truediv__",
|
| 2033 |
+
"unsupported operand",
|
| 2034 |
+
"Cannot divide",
|
| 2035 |
+
"ufunc 'divide' cannot use operands with types",
|
| 2036 |
+
]
|
| 2037 |
+
)
|
| 2038 |
+
with pytest.raises(TypeError, match=pattern):
|
| 2039 |
+
vector / tdser
|
| 2040 |
+
|
| 2041 |
+
result = tdser / vector.astype(object)
|
| 2042 |
+
if box_with_array is DataFrame:
|
| 2043 |
+
expected = [tdser.iloc[0, n] / vector[n] for n in range(len(vector))]
|
| 2044 |
+
expected = tm.box_expected(expected, xbox).astype(object)
|
| 2045 |
+
# We specifically expect timedelta64("NaT") here, not pd.NA
|
| 2046 |
+
msg = "The 'downcast' keyword in fillna"
|
| 2047 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 2048 |
+
expected[2] = expected[2].fillna(
|
| 2049 |
+
np.timedelta64("NaT", "ns"), downcast=False
|
| 2050 |
+
)
|
| 2051 |
+
else:
|
| 2052 |
+
expected = [tdser[n] / vector[n] for n in range(len(tdser))]
|
| 2053 |
+
expected = [
|
| 2054 |
+
x if x is not NaT else np.timedelta64("NaT", "ns") for x in expected
|
| 2055 |
+
]
|
| 2056 |
+
if xbox is tm.to_array:
|
| 2057 |
+
expected = tm.to_array(expected).astype(object)
|
| 2058 |
+
else:
|
| 2059 |
+
expected = xbox(expected, dtype=object)
|
| 2060 |
+
|
| 2061 |
+
tm.assert_equal(result, expected)
|
| 2062 |
+
|
| 2063 |
+
with pytest.raises(TypeError, match=pattern):
|
| 2064 |
+
vector.astype(object) / tdser
|
| 2065 |
+
|
| 2066 |
+
def test_td64arr_mul_int_series(self, box_with_array, names):
|
| 2067 |
+
# GH#19042 test for correct name attachment
|
| 2068 |
+
box = box_with_array
|
| 2069 |
+
exname = get_expected_name(box, names)
|
| 2070 |
+
|
| 2071 |
+
tdi = TimedeltaIndex(
|
| 2072 |
+
["0days", "1day", "2days", "3days", "4days"], name=names[0]
|
| 2073 |
+
)
|
| 2074 |
+
# TODO: Should we be parametrizing over types for `ser` too?
|
| 2075 |
+
ser = Series([0, 1, 2, 3, 4], dtype=np.int64, name=names[1])
|
| 2076 |
+
|
| 2077 |
+
expected = Series(
|
| 2078 |
+
["0days", "1day", "4days", "9days", "16days"],
|
| 2079 |
+
dtype="timedelta64[ns]",
|
| 2080 |
+
name=exname,
|
| 2081 |
+
)
|
| 2082 |
+
|
| 2083 |
+
tdi = tm.box_expected(tdi, box)
|
| 2084 |
+
xbox = get_upcast_box(tdi, ser)
|
| 2085 |
+
|
| 2086 |
+
expected = tm.box_expected(expected, xbox)
|
| 2087 |
+
|
| 2088 |
+
result = ser * tdi
|
| 2089 |
+
tm.assert_equal(result, expected)
|
| 2090 |
+
|
| 2091 |
+
result = tdi * ser
|
| 2092 |
+
tm.assert_equal(result, expected)
|
| 2093 |
+
|
| 2094 |
+
# TODO: Should we be parametrizing over types for `ser` too?
|
| 2095 |
+
def test_float_series_rdiv_td64arr(self, box_with_array, names):
|
| 2096 |
+
# GH#19042 test for correct name attachment
|
| 2097 |
+
box = box_with_array
|
| 2098 |
+
tdi = TimedeltaIndex(
|
| 2099 |
+
["0days", "1day", "2days", "3days", "4days"], name=names[0]
|
| 2100 |
+
)
|
| 2101 |
+
ser = Series([1.5, 3, 4.5, 6, 7.5], dtype=np.float64, name=names[1])
|
| 2102 |
+
|
| 2103 |
+
xname = names[2] if box not in [tm.to_array, pd.array] else names[1]
|
| 2104 |
+
expected = Series(
|
| 2105 |
+
[tdi[n] / ser[n] for n in range(len(ser))],
|
| 2106 |
+
dtype="timedelta64[ns]",
|
| 2107 |
+
name=xname,
|
| 2108 |
+
)
|
| 2109 |
+
|
| 2110 |
+
tdi = tm.box_expected(tdi, box)
|
| 2111 |
+
xbox = get_upcast_box(tdi, ser)
|
| 2112 |
+
expected = tm.box_expected(expected, xbox)
|
| 2113 |
+
|
| 2114 |
+
result = ser.__rtruediv__(tdi)
|
| 2115 |
+
if box is DataFrame:
|
| 2116 |
+
assert result is NotImplemented
|
| 2117 |
+
else:
|
| 2118 |
+
tm.assert_equal(result, expected)
|
| 2119 |
+
|
| 2120 |
+
def test_td64arr_all_nat_div_object_dtype_numeric(self, box_with_array):
|
| 2121 |
+
# GH#39750 make sure we infer the result as td64
|
| 2122 |
+
tdi = TimedeltaIndex([NaT, NaT])
|
| 2123 |
+
|
| 2124 |
+
left = tm.box_expected(tdi, box_with_array)
|
| 2125 |
+
right = np.array([2, 2.0], dtype=object)
|
| 2126 |
+
|
| 2127 |
+
tdnat = np.timedelta64("NaT", "ns")
|
| 2128 |
+
expected = Index([tdnat] * 2, dtype=object)
|
| 2129 |
+
if box_with_array is not Index:
|
| 2130 |
+
expected = tm.box_expected(expected, box_with_array).astype(object)
|
| 2131 |
+
if box_with_array in [Series, DataFrame]:
|
| 2132 |
+
msg = "The 'downcast' keyword in fillna is deprecated"
|
| 2133 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
| 2134 |
+
expected = expected.fillna(tdnat, downcast=False) # GH#18463
|
| 2135 |
+
|
| 2136 |
+
result = left / right
|
| 2137 |
+
tm.assert_equal(result, expected)
|
| 2138 |
+
|
| 2139 |
+
result = left // right
|
| 2140 |
+
tm.assert_equal(result, expected)
|
| 2141 |
+
|
| 2142 |
+
|
| 2143 |
+
class TestTimedelta64ArrayLikeArithmetic:
|
| 2144 |
+
# Arithmetic tests for timedelta64[ns] vectors fully parametrized over
|
| 2145 |
+
# DataFrame/Series/TimedeltaIndex/TimedeltaArray. Ideally all arithmetic
|
| 2146 |
+
# tests will eventually end up here.
|
| 2147 |
+
|
| 2148 |
+
def test_td64arr_pow_invalid(self, scalar_td, box_with_array):
|
| 2149 |
+
td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
|
| 2150 |
+
td1.iloc[2] = np.nan
|
| 2151 |
+
|
| 2152 |
+
td1 = tm.box_expected(td1, box_with_array)
|
| 2153 |
+
|
| 2154 |
+
# check that we are getting a TypeError
|
| 2155 |
+
# with 'operate' (from core/ops.py) for the ops that are not
|
| 2156 |
+
# defined
|
| 2157 |
+
pattern = "operate|unsupported|cannot|not supported"
|
| 2158 |
+
with pytest.raises(TypeError, match=pattern):
|
| 2159 |
+
scalar_td**td1
|
| 2160 |
+
|
| 2161 |
+
with pytest.raises(TypeError, match=pattern):
|
| 2162 |
+
td1**scalar_td
|
| 2163 |
+
|
| 2164 |
+
|
| 2165 |
+
def test_add_timestamp_to_timedelta():
|
| 2166 |
+
# GH: 35897
|
| 2167 |
+
timestamp = Timestamp("2021-01-01")
|
| 2168 |
+
result = timestamp + timedelta_range("0s", "1s", periods=31)
|
| 2169 |
+
expected = DatetimeIndex(
|
| 2170 |
+
[
|
| 2171 |
+
timestamp
|
| 2172 |
+
+ (
|
| 2173 |
+
pd.to_timedelta("0.033333333s") * i
|
| 2174 |
+
+ pd.to_timedelta("0.000000001s") * divmod(i, 3)[0]
|
| 2175 |
+
)
|
| 2176 |
+
for i in range(31)
|
| 2177 |
+
]
|
| 2178 |
+
)
|
| 2179 |
+
tm.assert_index_equal(result, expected)
|