Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- vllm/lib/python3.10/site-packages/pandas/core/__pycache__/frame.cpython-310.pyc +3 -0
- vllm/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc +3 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/__init__.py +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/conftest.py +48 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_arithmetic.py +244 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_astype.py +128 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_comparison.py +65 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_concat.py +20 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_construction.py +204 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_contains.py +12 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_repr.py +47 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_to_numpy.py +132 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/masked_shared.py +154 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_array.py +478 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_datetimelike.py +1344 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_datetimes.py +840 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_ndarray_backed.py +75 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_period.py +184 -0
- vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_timedeltas.py +313 -0
- vllm/lib/python3.10/site-packages/pandas/tests/config/__init__.py +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/config/__pycache__/__init__.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/config/__pycache__/test_config.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/config/__pycache__/test_localization.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/config/test_config.py +437 -0
- vllm/lib/python3.10/site-packages/pandas/tests/config/test_localization.py +156 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__init__.py +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/__init__.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_array.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_astype.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_chained_assignment_deprecation.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_clip.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_constructors.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_core_functionalities.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_functions.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_indexing.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_internals.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_interp_fillna.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_methods.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_replace.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_setitem.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_util.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/util.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__init__.py +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/__init__.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_datetimeindex.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_index.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_periodindex.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_timedeltaindex.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/test_datetimeindex.py +69 -0
.gitattributes
CHANGED
|
@@ -930,3 +930,5 @@ parrot/lib/python3.10/site-packages/matplotlib/backends/_backend_agg.cpython-310
|
|
| 930 |
parrot/lib/python3.10/site-packages/matplotlib/__pycache__/widgets.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 931 |
parrot/lib/python3.10/site-packages/scipy/sparse/linalg/_dsolve/_superlu.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 932 |
vllm/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/multi.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 930 |
parrot/lib/python3.10/site-packages/matplotlib/__pycache__/widgets.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 931 |
parrot/lib/python3.10/site-packages/scipy/sparse/linalg/_dsolve/_superlu.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 932 |
vllm/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/multi.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 933 |
+
vllm/lib/python3.10/site-packages/pandas/core/__pycache__/frame.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 934 |
+
vllm/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
vllm/lib/python3.10/site-packages/pandas/core/__pycache__/frame.cpython-310.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cc5081d9dfb571bcab11356ae88f40264b728bd93d2b64d404d84fab3d2023fe
|
| 3 |
+
size 362630
|
vllm/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:19b591e5019fd65b233f0e5617069613a74cba18bff30f36e1282ca226f1e378
|
| 3 |
+
size 176120
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/__init__.py
ADDED
|
File without changes
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/conftest.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pandas.core.arrays.floating import (
|
| 6 |
+
Float32Dtype,
|
| 7 |
+
Float64Dtype,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@pytest.fixture(params=[Float32Dtype, Float64Dtype])
|
| 12 |
+
def dtype(request):
|
| 13 |
+
"""Parametrized fixture returning a float 'dtype'"""
|
| 14 |
+
return request.param()
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@pytest.fixture
|
| 18 |
+
def data(dtype):
|
| 19 |
+
"""Fixture returning 'data' array according to parametrized float 'dtype'"""
|
| 20 |
+
return pd.array(
|
| 21 |
+
list(np.arange(0.1, 0.9, 0.1))
|
| 22 |
+
+ [pd.NA]
|
| 23 |
+
+ list(np.arange(1, 9.8, 0.1))
|
| 24 |
+
+ [pd.NA]
|
| 25 |
+
+ [9.9, 10.0],
|
| 26 |
+
dtype=dtype,
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@pytest.fixture
|
| 31 |
+
def data_missing(dtype):
|
| 32 |
+
"""
|
| 33 |
+
Fixture returning array with missing data according to parametrized float
|
| 34 |
+
'dtype'.
|
| 35 |
+
"""
|
| 36 |
+
return pd.array([np.nan, 0.1], dtype=dtype)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@pytest.fixture(params=["data", "data_missing"])
|
| 40 |
+
def all_data(request, data, data_missing):
|
| 41 |
+
"""Parametrized fixture returning 'data' or 'data_missing' float arrays.
|
| 42 |
+
|
| 43 |
+
Used to test dtype conversion with and without missing values.
|
| 44 |
+
"""
|
| 45 |
+
if request.param == "data":
|
| 46 |
+
return data
|
| 47 |
+
elif request.param == "data_missing":
|
| 48 |
+
return data_missing
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_arithmetic.py
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import operator
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
import pandas as pd
|
| 7 |
+
import pandas._testing as tm
|
| 8 |
+
from pandas.core.arrays import FloatingArray
|
| 9 |
+
|
| 10 |
+
# Basic test for the arithmetic array ops
|
| 11 |
+
# -----------------------------------------------------------------------------
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@pytest.mark.parametrize(
|
| 15 |
+
"opname, exp",
|
| 16 |
+
[
|
| 17 |
+
("add", [1.1, 2.2, None, None, 5.5]),
|
| 18 |
+
("mul", [0.1, 0.4, None, None, 2.5]),
|
| 19 |
+
("sub", [0.9, 1.8, None, None, 4.5]),
|
| 20 |
+
("truediv", [10.0, 10.0, None, None, 10.0]),
|
| 21 |
+
("floordiv", [9.0, 9.0, None, None, 10.0]),
|
| 22 |
+
("mod", [0.1, 0.2, None, None, 0.0]),
|
| 23 |
+
],
|
| 24 |
+
ids=["add", "mul", "sub", "div", "floordiv", "mod"],
|
| 25 |
+
)
|
| 26 |
+
def test_array_op(dtype, opname, exp):
|
| 27 |
+
a = pd.array([1.0, 2.0, None, 4.0, 5.0], dtype=dtype)
|
| 28 |
+
b = pd.array([0.1, 0.2, 0.3, None, 0.5], dtype=dtype)
|
| 29 |
+
|
| 30 |
+
op = getattr(operator, opname)
|
| 31 |
+
|
| 32 |
+
result = op(a, b)
|
| 33 |
+
expected = pd.array(exp, dtype=dtype)
|
| 34 |
+
tm.assert_extension_array_equal(result, expected)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@pytest.mark.parametrize("zero, negative", [(0, False), (0.0, False), (-0.0, True)])
|
| 38 |
+
def test_divide_by_zero(dtype, zero, negative):
|
| 39 |
+
# TODO pending NA/NaN discussion
|
| 40 |
+
# https://github.com/pandas-dev/pandas/issues/32265/
|
| 41 |
+
a = pd.array([0, 1, -1, None], dtype=dtype)
|
| 42 |
+
result = a / zero
|
| 43 |
+
expected = FloatingArray(
|
| 44 |
+
np.array([np.nan, np.inf, -np.inf, np.nan], dtype=dtype.numpy_dtype),
|
| 45 |
+
np.array([False, False, False, True]),
|
| 46 |
+
)
|
| 47 |
+
if negative:
|
| 48 |
+
expected *= -1
|
| 49 |
+
tm.assert_extension_array_equal(result, expected)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def test_pow_scalar(dtype):
|
| 53 |
+
a = pd.array([-1, 0, 1, None, 2], dtype=dtype)
|
| 54 |
+
result = a**0
|
| 55 |
+
expected = pd.array([1, 1, 1, 1, 1], dtype=dtype)
|
| 56 |
+
tm.assert_extension_array_equal(result, expected)
|
| 57 |
+
|
| 58 |
+
result = a**1
|
| 59 |
+
expected = pd.array([-1, 0, 1, None, 2], dtype=dtype)
|
| 60 |
+
tm.assert_extension_array_equal(result, expected)
|
| 61 |
+
|
| 62 |
+
result = a**pd.NA
|
| 63 |
+
expected = pd.array([None, None, 1, None, None], dtype=dtype)
|
| 64 |
+
tm.assert_extension_array_equal(result, expected)
|
| 65 |
+
|
| 66 |
+
result = a**np.nan
|
| 67 |
+
# TODO np.nan should be converted to pd.NA / missing before operation?
|
| 68 |
+
expected = FloatingArray(
|
| 69 |
+
np.array([np.nan, np.nan, 1, np.nan, np.nan], dtype=dtype.numpy_dtype),
|
| 70 |
+
mask=a._mask,
|
| 71 |
+
)
|
| 72 |
+
tm.assert_extension_array_equal(result, expected)
|
| 73 |
+
|
| 74 |
+
# reversed
|
| 75 |
+
a = a[1:] # Can't raise integers to negative powers.
|
| 76 |
+
|
| 77 |
+
result = 0**a
|
| 78 |
+
expected = pd.array([1, 0, None, 0], dtype=dtype)
|
| 79 |
+
tm.assert_extension_array_equal(result, expected)
|
| 80 |
+
|
| 81 |
+
result = 1**a
|
| 82 |
+
expected = pd.array([1, 1, 1, 1], dtype=dtype)
|
| 83 |
+
tm.assert_extension_array_equal(result, expected)
|
| 84 |
+
|
| 85 |
+
result = pd.NA**a
|
| 86 |
+
expected = pd.array([1, None, None, None], dtype=dtype)
|
| 87 |
+
tm.assert_extension_array_equal(result, expected)
|
| 88 |
+
|
| 89 |
+
result = np.nan**a
|
| 90 |
+
expected = FloatingArray(
|
| 91 |
+
np.array([1, np.nan, np.nan, np.nan], dtype=dtype.numpy_dtype), mask=a._mask
|
| 92 |
+
)
|
| 93 |
+
tm.assert_extension_array_equal(result, expected)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def test_pow_array(dtype):
|
| 97 |
+
a = pd.array([0, 0, 0, 1, 1, 1, None, None, None], dtype=dtype)
|
| 98 |
+
b = pd.array([0, 1, None, 0, 1, None, 0, 1, None], dtype=dtype)
|
| 99 |
+
result = a**b
|
| 100 |
+
expected = pd.array([1, 0, None, 1, 1, 1, 1, None, None], dtype=dtype)
|
| 101 |
+
tm.assert_extension_array_equal(result, expected)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def test_rpow_one_to_na():
|
| 105 |
+
# https://github.com/pandas-dev/pandas/issues/22022
|
| 106 |
+
# https://github.com/pandas-dev/pandas/issues/29997
|
| 107 |
+
arr = pd.array([np.nan, np.nan], dtype="Float64")
|
| 108 |
+
result = np.array([1.0, 2.0]) ** arr
|
| 109 |
+
expected = pd.array([1.0, np.nan], dtype="Float64")
|
| 110 |
+
tm.assert_extension_array_equal(result, expected)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
@pytest.mark.parametrize("other", [0, 0.5])
|
| 114 |
+
def test_arith_zero_dim_ndarray(other):
|
| 115 |
+
arr = pd.array([1, None, 2], dtype="Float64")
|
| 116 |
+
result = arr + np.array(other)
|
| 117 |
+
expected = arr + other
|
| 118 |
+
tm.assert_equal(result, expected)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
# Test generic characteristics / errors
|
| 122 |
+
# -----------------------------------------------------------------------------
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def test_error_invalid_values(data, all_arithmetic_operators, using_infer_string):
|
| 126 |
+
op = all_arithmetic_operators
|
| 127 |
+
s = pd.Series(data)
|
| 128 |
+
ops = getattr(s, op)
|
| 129 |
+
|
| 130 |
+
if using_infer_string:
|
| 131 |
+
import pyarrow as pa
|
| 132 |
+
|
| 133 |
+
errs = (TypeError, pa.lib.ArrowNotImplementedError, NotImplementedError)
|
| 134 |
+
else:
|
| 135 |
+
errs = TypeError
|
| 136 |
+
|
| 137 |
+
# invalid scalars
|
| 138 |
+
msg = "|".join(
|
| 139 |
+
[
|
| 140 |
+
r"can only perform ops with numeric values",
|
| 141 |
+
r"FloatingArray cannot perform the operation mod",
|
| 142 |
+
"unsupported operand type",
|
| 143 |
+
"not all arguments converted during string formatting",
|
| 144 |
+
"can't multiply sequence by non-int of type 'float'",
|
| 145 |
+
"ufunc 'subtract' cannot use operands with types dtype",
|
| 146 |
+
r"can only concatenate str \(not \"float\"\) to str",
|
| 147 |
+
"ufunc '.*' not supported for the input types, and the inputs could not",
|
| 148 |
+
"ufunc '.*' did not contain a loop with signature matching types",
|
| 149 |
+
"Concatenation operation is not implemented for NumPy arrays",
|
| 150 |
+
"has no kernel",
|
| 151 |
+
"not implemented",
|
| 152 |
+
]
|
| 153 |
+
)
|
| 154 |
+
with pytest.raises(errs, match=msg):
|
| 155 |
+
ops("foo")
|
| 156 |
+
with pytest.raises(errs, match=msg):
|
| 157 |
+
ops(pd.Timestamp("20180101"))
|
| 158 |
+
|
| 159 |
+
# invalid array-likes
|
| 160 |
+
with pytest.raises(errs, match=msg):
|
| 161 |
+
ops(pd.Series("foo", index=s.index))
|
| 162 |
+
|
| 163 |
+
msg = "|".join(
|
| 164 |
+
[
|
| 165 |
+
"can only perform ops with numeric values",
|
| 166 |
+
"cannot perform .* with this index type: DatetimeArray",
|
| 167 |
+
"Addition/subtraction of integers and integer-arrays "
|
| 168 |
+
"with DatetimeArray is no longer supported. *",
|
| 169 |
+
"unsupported operand type",
|
| 170 |
+
"not all arguments converted during string formatting",
|
| 171 |
+
"can't multiply sequence by non-int of type 'float'",
|
| 172 |
+
"ufunc 'subtract' cannot use operands with types dtype",
|
| 173 |
+
(
|
| 174 |
+
"ufunc 'add' cannot use operands with types "
|
| 175 |
+
rf"dtype\('{tm.ENDIAN}M8\[ns\]'\)"
|
| 176 |
+
),
|
| 177 |
+
r"ufunc 'add' cannot use operands with types dtype\('float\d{2}'\)",
|
| 178 |
+
"cannot subtract DatetimeArray from ndarray",
|
| 179 |
+
"has no kernel",
|
| 180 |
+
"not implemented",
|
| 181 |
+
]
|
| 182 |
+
)
|
| 183 |
+
with pytest.raises(errs, match=msg):
|
| 184 |
+
ops(pd.Series(pd.date_range("20180101", periods=len(s))))
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
# Various
|
| 188 |
+
# -----------------------------------------------------------------------------
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def test_cross_type_arithmetic():
|
| 192 |
+
df = pd.DataFrame(
|
| 193 |
+
{
|
| 194 |
+
"A": pd.array([1, 2, np.nan], dtype="Float64"),
|
| 195 |
+
"B": pd.array([1, np.nan, 3], dtype="Float32"),
|
| 196 |
+
"C": np.array([1, 2, 3], dtype="float64"),
|
| 197 |
+
}
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
result = df.A + df.C
|
| 201 |
+
expected = pd.Series([2, 4, np.nan], dtype="Float64")
|
| 202 |
+
tm.assert_series_equal(result, expected)
|
| 203 |
+
|
| 204 |
+
result = (df.A + df.C) * 3 == 12
|
| 205 |
+
expected = pd.Series([False, True, None], dtype="boolean")
|
| 206 |
+
tm.assert_series_equal(result, expected)
|
| 207 |
+
|
| 208 |
+
result = df.A + df.B
|
| 209 |
+
expected = pd.Series([2, np.nan, np.nan], dtype="Float64")
|
| 210 |
+
tm.assert_series_equal(result, expected)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
@pytest.mark.parametrize(
|
| 214 |
+
"source, neg_target, abs_target",
|
| 215 |
+
[
|
| 216 |
+
([1.1, 2.2, 3.3], [-1.1, -2.2, -3.3], [1.1, 2.2, 3.3]),
|
| 217 |
+
([1.1, 2.2, None], [-1.1, -2.2, None], [1.1, 2.2, None]),
|
| 218 |
+
([-1.1, 0.0, 1.1], [1.1, 0.0, -1.1], [1.1, 0.0, 1.1]),
|
| 219 |
+
],
|
| 220 |
+
)
|
| 221 |
+
def test_unary_float_operators(float_ea_dtype, source, neg_target, abs_target):
|
| 222 |
+
# GH38794
|
| 223 |
+
dtype = float_ea_dtype
|
| 224 |
+
arr = pd.array(source, dtype=dtype)
|
| 225 |
+
neg_result, pos_result, abs_result = -arr, +arr, abs(arr)
|
| 226 |
+
neg_target = pd.array(neg_target, dtype=dtype)
|
| 227 |
+
abs_target = pd.array(abs_target, dtype=dtype)
|
| 228 |
+
|
| 229 |
+
tm.assert_extension_array_equal(neg_result, neg_target)
|
| 230 |
+
tm.assert_extension_array_equal(pos_result, arr)
|
| 231 |
+
assert not tm.shares_memory(pos_result, arr)
|
| 232 |
+
tm.assert_extension_array_equal(abs_result, abs_target)
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def test_bitwise(dtype):
|
| 236 |
+
left = pd.array([1, None, 3, 4], dtype=dtype)
|
| 237 |
+
right = pd.array([None, 3, 5, 4], dtype=dtype)
|
| 238 |
+
|
| 239 |
+
with pytest.raises(TypeError, match="unsupported operand type"):
|
| 240 |
+
left | right
|
| 241 |
+
with pytest.raises(TypeError, match="unsupported operand type"):
|
| 242 |
+
left & right
|
| 243 |
+
with pytest.raises(TypeError, match="unsupported operand type"):
|
| 244 |
+
left ^ right
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_astype.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import pandas._testing as tm
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_astype():
|
| 9 |
+
# with missing values
|
| 10 |
+
arr = pd.array([0.1, 0.2, None], dtype="Float64")
|
| 11 |
+
|
| 12 |
+
with pytest.raises(ValueError, match="cannot convert NA to integer"):
|
| 13 |
+
arr.astype("int64")
|
| 14 |
+
|
| 15 |
+
with pytest.raises(ValueError, match="cannot convert float NaN to bool"):
|
| 16 |
+
arr.astype("bool")
|
| 17 |
+
|
| 18 |
+
result = arr.astype("float64")
|
| 19 |
+
expected = np.array([0.1, 0.2, np.nan], dtype="float64")
|
| 20 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 21 |
+
|
| 22 |
+
# no missing values
|
| 23 |
+
arr = pd.array([0.0, 1.0, 0.5], dtype="Float64")
|
| 24 |
+
result = arr.astype("int64")
|
| 25 |
+
expected = np.array([0, 1, 0], dtype="int64")
|
| 26 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 27 |
+
|
| 28 |
+
result = arr.astype("bool")
|
| 29 |
+
expected = np.array([False, True, True], dtype="bool")
|
| 30 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def test_astype_to_floating_array():
|
| 34 |
+
# astype to FloatingArray
|
| 35 |
+
arr = pd.array([0.0, 1.0, None], dtype="Float64")
|
| 36 |
+
|
| 37 |
+
result = arr.astype("Float64")
|
| 38 |
+
tm.assert_extension_array_equal(result, arr)
|
| 39 |
+
result = arr.astype(pd.Float64Dtype())
|
| 40 |
+
tm.assert_extension_array_equal(result, arr)
|
| 41 |
+
result = arr.astype("Float32")
|
| 42 |
+
expected = pd.array([0.0, 1.0, None], dtype="Float32")
|
| 43 |
+
tm.assert_extension_array_equal(result, expected)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def test_astype_to_boolean_array():
|
| 47 |
+
# astype to BooleanArray
|
| 48 |
+
arr = pd.array([0.0, 1.0, None], dtype="Float64")
|
| 49 |
+
|
| 50 |
+
result = arr.astype("boolean")
|
| 51 |
+
expected = pd.array([False, True, None], dtype="boolean")
|
| 52 |
+
tm.assert_extension_array_equal(result, expected)
|
| 53 |
+
result = arr.astype(pd.BooleanDtype())
|
| 54 |
+
tm.assert_extension_array_equal(result, expected)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def test_astype_to_integer_array():
|
| 58 |
+
# astype to IntegerArray
|
| 59 |
+
arr = pd.array([0.0, 1.5, None], dtype="Float64")
|
| 60 |
+
|
| 61 |
+
result = arr.astype("Int64")
|
| 62 |
+
expected = pd.array([0, 1, None], dtype="Int64")
|
| 63 |
+
tm.assert_extension_array_equal(result, expected)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def test_astype_str():
|
| 67 |
+
a = pd.array([0.1, 0.2, None], dtype="Float64")
|
| 68 |
+
expected = np.array(["0.1", "0.2", "<NA>"], dtype="U32")
|
| 69 |
+
|
| 70 |
+
tm.assert_numpy_array_equal(a.astype(str), expected)
|
| 71 |
+
tm.assert_numpy_array_equal(a.astype("str"), expected)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def test_astype_copy():
|
| 75 |
+
arr = pd.array([0.1, 0.2, None], dtype="Float64")
|
| 76 |
+
orig = pd.array([0.1, 0.2, None], dtype="Float64")
|
| 77 |
+
|
| 78 |
+
# copy=True -> ensure both data and mask are actual copies
|
| 79 |
+
result = arr.astype("Float64", copy=True)
|
| 80 |
+
assert result is not arr
|
| 81 |
+
assert not tm.shares_memory(result, arr)
|
| 82 |
+
result[0] = 10
|
| 83 |
+
tm.assert_extension_array_equal(arr, orig)
|
| 84 |
+
result[0] = pd.NA
|
| 85 |
+
tm.assert_extension_array_equal(arr, orig)
|
| 86 |
+
|
| 87 |
+
# copy=False
|
| 88 |
+
result = arr.astype("Float64", copy=False)
|
| 89 |
+
assert result is arr
|
| 90 |
+
assert np.shares_memory(result._data, arr._data)
|
| 91 |
+
assert np.shares_memory(result._mask, arr._mask)
|
| 92 |
+
result[0] = 10
|
| 93 |
+
assert arr[0] == 10
|
| 94 |
+
result[0] = pd.NA
|
| 95 |
+
assert arr[0] is pd.NA
|
| 96 |
+
|
| 97 |
+
# astype to different dtype -> always needs a copy -> even with copy=False
|
| 98 |
+
# we need to ensure that also the mask is actually copied
|
| 99 |
+
arr = pd.array([0.1, 0.2, None], dtype="Float64")
|
| 100 |
+
orig = pd.array([0.1, 0.2, None], dtype="Float64")
|
| 101 |
+
|
| 102 |
+
result = arr.astype("Float32", copy=False)
|
| 103 |
+
assert not tm.shares_memory(result, arr)
|
| 104 |
+
result[0] = 10
|
| 105 |
+
tm.assert_extension_array_equal(arr, orig)
|
| 106 |
+
result[0] = pd.NA
|
| 107 |
+
tm.assert_extension_array_equal(arr, orig)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def test_astype_object(dtype):
|
| 111 |
+
arr = pd.array([1.0, pd.NA], dtype=dtype)
|
| 112 |
+
|
| 113 |
+
result = arr.astype(object)
|
| 114 |
+
expected = np.array([1.0, pd.NA], dtype=object)
|
| 115 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 116 |
+
# check exact element types
|
| 117 |
+
assert isinstance(result[0], float)
|
| 118 |
+
assert result[1] is pd.NA
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def test_Float64_conversion():
|
| 122 |
+
# GH#40729
|
| 123 |
+
testseries = pd.Series(["1", "2", "3", "4"], dtype="object")
|
| 124 |
+
result = testseries.astype(pd.Float64Dtype())
|
| 125 |
+
|
| 126 |
+
expected = pd.Series([1.0, 2.0, 3.0, 4.0], dtype=pd.Float64Dtype())
|
| 127 |
+
|
| 128 |
+
tm.assert_series_equal(result, expected)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_comparison.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import pandas._testing as tm
|
| 6 |
+
from pandas.core.arrays import FloatingArray
|
| 7 |
+
from pandas.tests.arrays.masked_shared import (
|
| 8 |
+
ComparisonOps,
|
| 9 |
+
NumericOps,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class TestComparisonOps(NumericOps, ComparisonOps):
|
| 14 |
+
@pytest.mark.parametrize("other", [True, False, pd.NA, -1.0, 0.0, 1])
|
| 15 |
+
def test_scalar(self, other, comparison_op, dtype):
|
| 16 |
+
ComparisonOps.test_scalar(self, other, comparison_op, dtype)
|
| 17 |
+
|
| 18 |
+
def test_compare_with_integerarray(self, comparison_op):
|
| 19 |
+
op = comparison_op
|
| 20 |
+
a = pd.array([0, 1, None] * 3, dtype="Int64")
|
| 21 |
+
b = pd.array([0] * 3 + [1] * 3 + [None] * 3, dtype="Float64")
|
| 22 |
+
other = b.astype("Int64")
|
| 23 |
+
expected = op(a, other)
|
| 24 |
+
result = op(a, b)
|
| 25 |
+
tm.assert_extension_array_equal(result, expected)
|
| 26 |
+
expected = op(other, a)
|
| 27 |
+
result = op(b, a)
|
| 28 |
+
tm.assert_extension_array_equal(result, expected)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def test_equals():
|
| 32 |
+
# GH-30652
|
| 33 |
+
# equals is generally tested in /tests/extension/base/methods, but this
|
| 34 |
+
# specifically tests that two arrays of the same class but different dtype
|
| 35 |
+
# do not evaluate equal
|
| 36 |
+
a1 = pd.array([1, 2, None], dtype="Float64")
|
| 37 |
+
a2 = pd.array([1, 2, None], dtype="Float32")
|
| 38 |
+
assert a1.equals(a2) is False
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def test_equals_nan_vs_na():
|
| 42 |
+
# GH#44382
|
| 43 |
+
|
| 44 |
+
mask = np.zeros(3, dtype=bool)
|
| 45 |
+
data = np.array([1.0, np.nan, 3.0], dtype=np.float64)
|
| 46 |
+
|
| 47 |
+
left = FloatingArray(data, mask)
|
| 48 |
+
assert left.equals(left)
|
| 49 |
+
tm.assert_extension_array_equal(left, left)
|
| 50 |
+
|
| 51 |
+
assert left.equals(left.copy())
|
| 52 |
+
assert left.equals(FloatingArray(data.copy(), mask.copy()))
|
| 53 |
+
|
| 54 |
+
mask2 = np.array([False, True, False], dtype=bool)
|
| 55 |
+
data2 = np.array([1.0, 2.0, 3.0], dtype=np.float64)
|
| 56 |
+
right = FloatingArray(data2, mask2)
|
| 57 |
+
assert right.equals(right)
|
| 58 |
+
tm.assert_extension_array_equal(right, right)
|
| 59 |
+
|
| 60 |
+
assert not left.equals(right)
|
| 61 |
+
|
| 62 |
+
# with mask[1] = True, the only difference is data[1], which should
|
| 63 |
+
# not matter for equals
|
| 64 |
+
mask[1] = True
|
| 65 |
+
assert left.equals(right)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_concat.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import pandas as pd
|
| 4 |
+
import pandas._testing as tm
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@pytest.mark.parametrize(
|
| 8 |
+
"to_concat_dtypes, result_dtype",
|
| 9 |
+
[
|
| 10 |
+
(["Float64", "Float64"], "Float64"),
|
| 11 |
+
(["Float32", "Float64"], "Float64"),
|
| 12 |
+
(["Float32", "Float32"], "Float32"),
|
| 13 |
+
],
|
| 14 |
+
)
|
| 15 |
+
def test_concat_series(to_concat_dtypes, result_dtype):
|
| 16 |
+
result = pd.concat([pd.Series([1, 2, pd.NA], dtype=t) for t in to_concat_dtypes])
|
| 17 |
+
expected = pd.concat([pd.Series([1, 2, pd.NA], dtype=object)] * 2).astype(
|
| 18 |
+
result_dtype
|
| 19 |
+
)
|
| 20 |
+
tm.assert_series_equal(result, expected)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_construction.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import pandas._testing as tm
|
| 6 |
+
from pandas.core.arrays import FloatingArray
|
| 7 |
+
from pandas.core.arrays.floating import (
|
| 8 |
+
Float32Dtype,
|
| 9 |
+
Float64Dtype,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def test_uses_pandas_na():
|
| 14 |
+
a = pd.array([1, None], dtype=Float64Dtype())
|
| 15 |
+
assert a[1] is pd.NA
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def test_floating_array_constructor():
|
| 19 |
+
values = np.array([1, 2, 3, 4], dtype="float64")
|
| 20 |
+
mask = np.array([False, False, False, True], dtype="bool")
|
| 21 |
+
|
| 22 |
+
result = FloatingArray(values, mask)
|
| 23 |
+
expected = pd.array([1, 2, 3, np.nan], dtype="Float64")
|
| 24 |
+
tm.assert_extension_array_equal(result, expected)
|
| 25 |
+
tm.assert_numpy_array_equal(result._data, values)
|
| 26 |
+
tm.assert_numpy_array_equal(result._mask, mask)
|
| 27 |
+
|
| 28 |
+
msg = r".* should be .* numpy array. Use the 'pd.array' function instead"
|
| 29 |
+
with pytest.raises(TypeError, match=msg):
|
| 30 |
+
FloatingArray(values.tolist(), mask)
|
| 31 |
+
|
| 32 |
+
with pytest.raises(TypeError, match=msg):
|
| 33 |
+
FloatingArray(values, mask.tolist())
|
| 34 |
+
|
| 35 |
+
with pytest.raises(TypeError, match=msg):
|
| 36 |
+
FloatingArray(values.astype(int), mask)
|
| 37 |
+
|
| 38 |
+
msg = r"__init__\(\) missing 1 required positional argument: 'mask'"
|
| 39 |
+
with pytest.raises(TypeError, match=msg):
|
| 40 |
+
FloatingArray(values)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def test_floating_array_disallows_float16():
|
| 44 |
+
# GH#44715
|
| 45 |
+
arr = np.array([1, 2], dtype=np.float16)
|
| 46 |
+
mask = np.array([False, False])
|
| 47 |
+
|
| 48 |
+
msg = "FloatingArray does not support np.float16 dtype"
|
| 49 |
+
with pytest.raises(TypeError, match=msg):
|
| 50 |
+
FloatingArray(arr, mask)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def test_floating_array_disallows_Float16_dtype(request):
|
| 54 |
+
# GH#44715
|
| 55 |
+
with pytest.raises(TypeError, match="data type 'Float16' not understood"):
|
| 56 |
+
pd.array([1.0, 2.0], dtype="Float16")
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def test_floating_array_constructor_copy():
|
| 60 |
+
values = np.array([1, 2, 3, 4], dtype="float64")
|
| 61 |
+
mask = np.array([False, False, False, True], dtype="bool")
|
| 62 |
+
|
| 63 |
+
result = FloatingArray(values, mask)
|
| 64 |
+
assert result._data is values
|
| 65 |
+
assert result._mask is mask
|
| 66 |
+
|
| 67 |
+
result = FloatingArray(values, mask, copy=True)
|
| 68 |
+
assert result._data is not values
|
| 69 |
+
assert result._mask is not mask
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def test_to_array():
|
| 73 |
+
result = pd.array([0.1, 0.2, 0.3, 0.4])
|
| 74 |
+
expected = pd.array([0.1, 0.2, 0.3, 0.4], dtype="Float64")
|
| 75 |
+
tm.assert_extension_array_equal(result, expected)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
@pytest.mark.parametrize(
|
| 79 |
+
"a, b",
|
| 80 |
+
[
|
| 81 |
+
([1, None], [1, pd.NA]),
|
| 82 |
+
([None], [pd.NA]),
|
| 83 |
+
([None, np.nan], [pd.NA, pd.NA]),
|
| 84 |
+
([1, np.nan], [1, pd.NA]),
|
| 85 |
+
([np.nan], [pd.NA]),
|
| 86 |
+
],
|
| 87 |
+
)
|
| 88 |
+
def test_to_array_none_is_nan(a, b):
|
| 89 |
+
result = pd.array(a, dtype="Float64")
|
| 90 |
+
expected = pd.array(b, dtype="Float64")
|
| 91 |
+
tm.assert_extension_array_equal(result, expected)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def test_to_array_mixed_integer_float():
|
| 95 |
+
result = pd.array([1, 2.0])
|
| 96 |
+
expected = pd.array([1.0, 2.0], dtype="Float64")
|
| 97 |
+
tm.assert_extension_array_equal(result, expected)
|
| 98 |
+
|
| 99 |
+
result = pd.array([1, None, 2.0])
|
| 100 |
+
expected = pd.array([1.0, None, 2.0], dtype="Float64")
|
| 101 |
+
tm.assert_extension_array_equal(result, expected)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@pytest.mark.parametrize(
|
| 105 |
+
"values",
|
| 106 |
+
[
|
| 107 |
+
["foo", "bar"],
|
| 108 |
+
"foo",
|
| 109 |
+
1,
|
| 110 |
+
1.0,
|
| 111 |
+
pd.date_range("20130101", periods=2),
|
| 112 |
+
np.array(["foo"]),
|
| 113 |
+
[[1, 2], [3, 4]],
|
| 114 |
+
[np.nan, {"a": 1}],
|
| 115 |
+
# GH#44514 all-NA case used to get quietly swapped out before checking ndim
|
| 116 |
+
np.array([pd.NA] * 6, dtype=object).reshape(3, 2),
|
| 117 |
+
],
|
| 118 |
+
)
|
| 119 |
+
def test_to_array_error(values):
|
| 120 |
+
# error in converting existing arrays to FloatingArray
|
| 121 |
+
msg = "|".join(
|
| 122 |
+
[
|
| 123 |
+
"cannot be converted to FloatingDtype",
|
| 124 |
+
"values must be a 1D list-like",
|
| 125 |
+
"Cannot pass scalar",
|
| 126 |
+
r"float\(\) argument must be a string or a (real )?number, not 'dict'",
|
| 127 |
+
"could not convert string to float: 'foo'",
|
| 128 |
+
r"could not convert string to float: np\.str_\('foo'\)",
|
| 129 |
+
]
|
| 130 |
+
)
|
| 131 |
+
with pytest.raises((TypeError, ValueError), match=msg):
|
| 132 |
+
pd.array(values, dtype="Float64")
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
@pytest.mark.parametrize("values", [["1", "2", None], ["1.5", "2", None]])
|
| 136 |
+
def test_construct_from_float_strings(values):
|
| 137 |
+
# see also test_to_integer_array_str
|
| 138 |
+
expected = pd.array([float(values[0]), 2, None], dtype="Float64")
|
| 139 |
+
|
| 140 |
+
res = pd.array(values, dtype="Float64")
|
| 141 |
+
tm.assert_extension_array_equal(res, expected)
|
| 142 |
+
|
| 143 |
+
res = FloatingArray._from_sequence(values)
|
| 144 |
+
tm.assert_extension_array_equal(res, expected)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def test_to_array_inferred_dtype():
|
| 148 |
+
# if values has dtype -> respect it
|
| 149 |
+
result = pd.array(np.array([1, 2], dtype="float32"))
|
| 150 |
+
assert result.dtype == Float32Dtype()
|
| 151 |
+
|
| 152 |
+
# if values have no dtype -> always float64
|
| 153 |
+
result = pd.array([1.0, 2.0])
|
| 154 |
+
assert result.dtype == Float64Dtype()
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def test_to_array_dtype_keyword():
|
| 158 |
+
result = pd.array([1, 2], dtype="Float32")
|
| 159 |
+
assert result.dtype == Float32Dtype()
|
| 160 |
+
|
| 161 |
+
# if values has dtype -> override it
|
| 162 |
+
result = pd.array(np.array([1, 2], dtype="float32"), dtype="Float64")
|
| 163 |
+
assert result.dtype == Float64Dtype()
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def test_to_array_integer():
|
| 167 |
+
result = pd.array([1, 2], dtype="Float64")
|
| 168 |
+
expected = pd.array([1.0, 2.0], dtype="Float64")
|
| 169 |
+
tm.assert_extension_array_equal(result, expected)
|
| 170 |
+
|
| 171 |
+
# for integer dtypes, the itemsize is not preserved
|
| 172 |
+
# TODO can we specify "floating" in general?
|
| 173 |
+
result = pd.array(np.array([1, 2], dtype="int32"), dtype="Float64")
|
| 174 |
+
assert result.dtype == Float64Dtype()
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
@pytest.mark.parametrize(
|
| 178 |
+
"bool_values, values, target_dtype, expected_dtype",
|
| 179 |
+
[
|
| 180 |
+
([False, True], [0, 1], Float64Dtype(), Float64Dtype()),
|
| 181 |
+
([False, True], [0, 1], "Float64", Float64Dtype()),
|
| 182 |
+
([False, True, np.nan], [0, 1, np.nan], Float64Dtype(), Float64Dtype()),
|
| 183 |
+
],
|
| 184 |
+
)
|
| 185 |
+
def test_to_array_bool(bool_values, values, target_dtype, expected_dtype):
|
| 186 |
+
result = pd.array(bool_values, dtype=target_dtype)
|
| 187 |
+
assert result.dtype == expected_dtype
|
| 188 |
+
expected = pd.array(values, dtype=target_dtype)
|
| 189 |
+
tm.assert_extension_array_equal(result, expected)
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
def test_series_from_float(data):
|
| 193 |
+
# construct from our dtype & string dtype
|
| 194 |
+
dtype = data.dtype
|
| 195 |
+
|
| 196 |
+
# from float
|
| 197 |
+
expected = pd.Series(data)
|
| 198 |
+
result = pd.Series(data.to_numpy(na_value=np.nan, dtype="float"), dtype=str(dtype))
|
| 199 |
+
tm.assert_series_equal(result, expected)
|
| 200 |
+
|
| 201 |
+
# from list
|
| 202 |
+
expected = pd.Series(data)
|
| 203 |
+
result = pd.Series(np.array(data).tolist(), dtype=str(dtype))
|
| 204 |
+
tm.assert_series_equal(result, expected)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_contains.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
|
| 3 |
+
import pandas as pd
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_contains_nan():
|
| 7 |
+
# GH#52840
|
| 8 |
+
arr = pd.array(range(5)) / 0
|
| 9 |
+
|
| 10 |
+
assert np.isnan(arr._data[0])
|
| 11 |
+
assert not arr.isna()[0]
|
| 12 |
+
assert np.nan in arr
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_repr.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pandas.core.arrays.floating import (
|
| 6 |
+
Float32Dtype,
|
| 7 |
+
Float64Dtype,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def test_dtypes(dtype):
|
| 12 |
+
# smoke tests on auto dtype construction
|
| 13 |
+
|
| 14 |
+
np.dtype(dtype.type).kind == "f"
|
| 15 |
+
assert dtype.name is not None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@pytest.mark.parametrize(
|
| 19 |
+
"dtype, expected",
|
| 20 |
+
[(Float32Dtype(), "Float32Dtype()"), (Float64Dtype(), "Float64Dtype()")],
|
| 21 |
+
)
|
| 22 |
+
def test_repr_dtype(dtype, expected):
|
| 23 |
+
assert repr(dtype) == expected
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def test_repr_array():
|
| 27 |
+
result = repr(pd.array([1.0, None, 3.0]))
|
| 28 |
+
expected = "<FloatingArray>\n[1.0, <NA>, 3.0]\nLength: 3, dtype: Float64"
|
| 29 |
+
assert result == expected
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def test_repr_array_long():
|
| 33 |
+
data = pd.array([1.0, 2.0, None] * 1000)
|
| 34 |
+
expected = """<FloatingArray>
|
| 35 |
+
[ 1.0, 2.0, <NA>, 1.0, 2.0, <NA>, 1.0, 2.0, <NA>, 1.0,
|
| 36 |
+
...
|
| 37 |
+
<NA>, 1.0, 2.0, <NA>, 1.0, 2.0, <NA>, 1.0, 2.0, <NA>]
|
| 38 |
+
Length: 3000, dtype: Float64"""
|
| 39 |
+
result = repr(data)
|
| 40 |
+
assert result == expected
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def test_frame_repr(data_missing):
|
| 44 |
+
df = pd.DataFrame({"A": data_missing})
|
| 45 |
+
result = repr(df)
|
| 46 |
+
expected = " A\n0 <NA>\n1 0.1"
|
| 47 |
+
assert result == expected
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/floating/test_to_numpy.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import pandas._testing as tm
|
| 6 |
+
from pandas.core.arrays import FloatingArray
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
|
| 10 |
+
def test_to_numpy(box):
|
| 11 |
+
con = pd.Series if box else pd.array
|
| 12 |
+
|
| 13 |
+
# default (with or without missing values) -> object dtype
|
| 14 |
+
arr = con([0.1, 0.2, 0.3], dtype="Float64")
|
| 15 |
+
result = arr.to_numpy()
|
| 16 |
+
expected = np.array([0.1, 0.2, 0.3], dtype="float64")
|
| 17 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 18 |
+
|
| 19 |
+
arr = con([0.1, 0.2, None], dtype="Float64")
|
| 20 |
+
result = arr.to_numpy()
|
| 21 |
+
expected = np.array([0.1, 0.2, np.nan], dtype="float64")
|
| 22 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
|
| 26 |
+
def test_to_numpy_float(box):
|
| 27 |
+
con = pd.Series if box else pd.array
|
| 28 |
+
|
| 29 |
+
# no missing values -> can convert to float, otherwise raises
|
| 30 |
+
arr = con([0.1, 0.2, 0.3], dtype="Float64")
|
| 31 |
+
result = arr.to_numpy(dtype="float64")
|
| 32 |
+
expected = np.array([0.1, 0.2, 0.3], dtype="float64")
|
| 33 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 34 |
+
|
| 35 |
+
arr = con([0.1, 0.2, None], dtype="Float64")
|
| 36 |
+
result = arr.to_numpy(dtype="float64")
|
| 37 |
+
expected = np.array([0.1, 0.2, np.nan], dtype="float64")
|
| 38 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 39 |
+
|
| 40 |
+
result = arr.to_numpy(dtype="float64", na_value=np.nan)
|
| 41 |
+
expected = np.array([0.1, 0.2, np.nan], dtype="float64")
|
| 42 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
|
| 46 |
+
def test_to_numpy_int(box):
|
| 47 |
+
con = pd.Series if box else pd.array
|
| 48 |
+
|
| 49 |
+
# no missing values -> can convert to int, otherwise raises
|
| 50 |
+
arr = con([1.0, 2.0, 3.0], dtype="Float64")
|
| 51 |
+
result = arr.to_numpy(dtype="int64")
|
| 52 |
+
expected = np.array([1, 2, 3], dtype="int64")
|
| 53 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 54 |
+
|
| 55 |
+
arr = con([1.0, 2.0, None], dtype="Float64")
|
| 56 |
+
with pytest.raises(ValueError, match="cannot convert to 'int64'-dtype"):
|
| 57 |
+
result = arr.to_numpy(dtype="int64")
|
| 58 |
+
|
| 59 |
+
# automatic casting (floors the values)
|
| 60 |
+
arr = con([0.1, 0.9, 1.1], dtype="Float64")
|
| 61 |
+
result = arr.to_numpy(dtype="int64")
|
| 62 |
+
expected = np.array([0, 0, 1], dtype="int64")
|
| 63 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
|
| 67 |
+
def test_to_numpy_na_value(box):
|
| 68 |
+
con = pd.Series if box else pd.array
|
| 69 |
+
|
| 70 |
+
arr = con([0.0, 1.0, None], dtype="Float64")
|
| 71 |
+
result = arr.to_numpy(dtype=object, na_value=None)
|
| 72 |
+
expected = np.array([0.0, 1.0, None], dtype="object")
|
| 73 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 74 |
+
|
| 75 |
+
result = arr.to_numpy(dtype=bool, na_value=False)
|
| 76 |
+
expected = np.array([False, True, False], dtype="bool")
|
| 77 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 78 |
+
|
| 79 |
+
result = arr.to_numpy(dtype="int64", na_value=-99)
|
| 80 |
+
expected = np.array([0, 1, -99], dtype="int64")
|
| 81 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def test_to_numpy_na_value_with_nan():
|
| 85 |
+
# array with both NaN and NA -> only fill NA with `na_value`
|
| 86 |
+
arr = FloatingArray(np.array([0.0, np.nan, 0.0]), np.array([False, False, True]))
|
| 87 |
+
result = arr.to_numpy(dtype="float64", na_value=-1)
|
| 88 |
+
expected = np.array([0.0, np.nan, -1.0], dtype="float64")
|
| 89 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@pytest.mark.parametrize("dtype", ["float64", "float32", "int32", "int64", "bool"])
|
| 93 |
+
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
|
| 94 |
+
def test_to_numpy_dtype(box, dtype):
|
| 95 |
+
con = pd.Series if box else pd.array
|
| 96 |
+
arr = con([0.0, 1.0], dtype="Float64")
|
| 97 |
+
|
| 98 |
+
result = arr.to_numpy(dtype=dtype)
|
| 99 |
+
expected = np.array([0, 1], dtype=dtype)
|
| 100 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
@pytest.mark.parametrize("dtype", ["int32", "int64", "bool"])
|
| 104 |
+
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
|
| 105 |
+
def test_to_numpy_na_raises(box, dtype):
|
| 106 |
+
con = pd.Series if box else pd.array
|
| 107 |
+
arr = con([0.0, 1.0, None], dtype="Float64")
|
| 108 |
+
with pytest.raises(ValueError, match=dtype):
|
| 109 |
+
arr.to_numpy(dtype=dtype)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
|
| 113 |
+
def test_to_numpy_string(box, dtype):
|
| 114 |
+
con = pd.Series if box else pd.array
|
| 115 |
+
arr = con([0.0, 1.0, None], dtype="Float64")
|
| 116 |
+
|
| 117 |
+
result = arr.to_numpy(dtype="str")
|
| 118 |
+
expected = np.array([0.0, 1.0, pd.NA], dtype=f"{tm.ENDIAN}U32")
|
| 119 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def test_to_numpy_copy():
|
| 123 |
+
# to_numpy can be zero-copy if no missing values
|
| 124 |
+
arr = pd.array([0.1, 0.2, 0.3], dtype="Float64")
|
| 125 |
+
result = arr.to_numpy(dtype="float64")
|
| 126 |
+
result[0] = 10
|
| 127 |
+
tm.assert_extension_array_equal(arr, pd.array([10, 0.2, 0.3], dtype="Float64"))
|
| 128 |
+
|
| 129 |
+
arr = pd.array([0.1, 0.2, 0.3], dtype="Float64")
|
| 130 |
+
result = arr.to_numpy(dtype="float64", copy=True)
|
| 131 |
+
result[0] = 10
|
| 132 |
+
tm.assert_extension_array_equal(arr, pd.array([0.1, 0.2, 0.3], dtype="Float64"))
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/masked_shared.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests shared by MaskedArray subclasses.
|
| 3 |
+
"""
|
| 4 |
+
import numpy as np
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import pandas as pd
|
| 8 |
+
import pandas._testing as tm
|
| 9 |
+
from pandas.tests.extension.base import BaseOpsUtil
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ComparisonOps(BaseOpsUtil):
|
| 13 |
+
def _compare_other(self, data, op, other):
|
| 14 |
+
# array
|
| 15 |
+
result = pd.Series(op(data, other))
|
| 16 |
+
expected = pd.Series(op(data._data, other), dtype="boolean")
|
| 17 |
+
|
| 18 |
+
# fill the nan locations
|
| 19 |
+
expected[data._mask] = pd.NA
|
| 20 |
+
|
| 21 |
+
tm.assert_series_equal(result, expected)
|
| 22 |
+
|
| 23 |
+
# series
|
| 24 |
+
ser = pd.Series(data)
|
| 25 |
+
result = op(ser, other)
|
| 26 |
+
|
| 27 |
+
# Set nullable dtype here to avoid upcasting when setting to pd.NA below
|
| 28 |
+
expected = op(pd.Series(data._data), other).astype("boolean")
|
| 29 |
+
|
| 30 |
+
# fill the nan locations
|
| 31 |
+
expected[data._mask] = pd.NA
|
| 32 |
+
|
| 33 |
+
tm.assert_series_equal(result, expected)
|
| 34 |
+
|
| 35 |
+
# subclass will override to parametrize 'other'
|
| 36 |
+
def test_scalar(self, other, comparison_op, dtype):
|
| 37 |
+
op = comparison_op
|
| 38 |
+
left = pd.array([1, 0, None], dtype=dtype)
|
| 39 |
+
|
| 40 |
+
result = op(left, other)
|
| 41 |
+
|
| 42 |
+
if other is pd.NA:
|
| 43 |
+
expected = pd.array([None, None, None], dtype="boolean")
|
| 44 |
+
else:
|
| 45 |
+
values = op(left._data, other)
|
| 46 |
+
expected = pd.arrays.BooleanArray(values, left._mask, copy=True)
|
| 47 |
+
tm.assert_extension_array_equal(result, expected)
|
| 48 |
+
|
| 49 |
+
# ensure we haven't mutated anything inplace
|
| 50 |
+
result[0] = pd.NA
|
| 51 |
+
tm.assert_extension_array_equal(left, pd.array([1, 0, None], dtype=dtype))
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class NumericOps:
|
| 55 |
+
# Shared by IntegerArray and FloatingArray, not BooleanArray
|
| 56 |
+
|
| 57 |
+
def test_searchsorted_nan(self, dtype):
|
| 58 |
+
# The base class casts to object dtype, for which searchsorted returns
|
| 59 |
+
# 0 from the left and 10 from the right.
|
| 60 |
+
arr = pd.array(range(10), dtype=dtype)
|
| 61 |
+
|
| 62 |
+
assert arr.searchsorted(np.nan, side="left") == 10
|
| 63 |
+
assert arr.searchsorted(np.nan, side="right") == 10
|
| 64 |
+
|
| 65 |
+
def test_no_shared_mask(self, data):
|
| 66 |
+
result = data + 1
|
| 67 |
+
assert not tm.shares_memory(result, data)
|
| 68 |
+
|
| 69 |
+
def test_array(self, comparison_op, dtype):
|
| 70 |
+
op = comparison_op
|
| 71 |
+
|
| 72 |
+
left = pd.array([0, 1, 2, None, None, None], dtype=dtype)
|
| 73 |
+
right = pd.array([0, 1, None, 0, 1, None], dtype=dtype)
|
| 74 |
+
|
| 75 |
+
result = op(left, right)
|
| 76 |
+
values = op(left._data, right._data)
|
| 77 |
+
mask = left._mask | right._mask
|
| 78 |
+
|
| 79 |
+
expected = pd.arrays.BooleanArray(values, mask)
|
| 80 |
+
tm.assert_extension_array_equal(result, expected)
|
| 81 |
+
|
| 82 |
+
# ensure we haven't mutated anything inplace
|
| 83 |
+
result[0] = pd.NA
|
| 84 |
+
tm.assert_extension_array_equal(
|
| 85 |
+
left, pd.array([0, 1, 2, None, None, None], dtype=dtype)
|
| 86 |
+
)
|
| 87 |
+
tm.assert_extension_array_equal(
|
| 88 |
+
right, pd.array([0, 1, None, 0, 1, None], dtype=dtype)
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
def test_compare_with_booleanarray(self, comparison_op, dtype):
|
| 92 |
+
op = comparison_op
|
| 93 |
+
|
| 94 |
+
left = pd.array([True, False, None] * 3, dtype="boolean")
|
| 95 |
+
right = pd.array([0] * 3 + [1] * 3 + [None] * 3, dtype=dtype)
|
| 96 |
+
other = pd.array([False] * 3 + [True] * 3 + [None] * 3, dtype="boolean")
|
| 97 |
+
|
| 98 |
+
expected = op(left, other)
|
| 99 |
+
result = op(left, right)
|
| 100 |
+
tm.assert_extension_array_equal(result, expected)
|
| 101 |
+
|
| 102 |
+
# reversed op
|
| 103 |
+
expected = op(other, left)
|
| 104 |
+
result = op(right, left)
|
| 105 |
+
tm.assert_extension_array_equal(result, expected)
|
| 106 |
+
|
| 107 |
+
def test_compare_to_string(self, dtype):
|
| 108 |
+
# GH#28930
|
| 109 |
+
ser = pd.Series([1, None], dtype=dtype)
|
| 110 |
+
result = ser == "a"
|
| 111 |
+
expected = pd.Series([False, pd.NA], dtype="boolean")
|
| 112 |
+
|
| 113 |
+
tm.assert_series_equal(result, expected)
|
| 114 |
+
|
| 115 |
+
def test_ufunc_with_out(self, dtype):
|
| 116 |
+
arr = pd.array([1, 2, 3], dtype=dtype)
|
| 117 |
+
arr2 = pd.array([1, 2, pd.NA], dtype=dtype)
|
| 118 |
+
|
| 119 |
+
mask = arr == arr
|
| 120 |
+
mask2 = arr2 == arr2
|
| 121 |
+
|
| 122 |
+
result = np.zeros(3, dtype=bool)
|
| 123 |
+
result |= mask
|
| 124 |
+
# If MaskedArray.__array_ufunc__ handled "out" appropriately,
|
| 125 |
+
# `result` should still be an ndarray.
|
| 126 |
+
assert isinstance(result, np.ndarray)
|
| 127 |
+
assert result.all()
|
| 128 |
+
|
| 129 |
+
# result |= mask worked because mask could be cast losslessly to
|
| 130 |
+
# boolean ndarray. mask2 can't, so this raises
|
| 131 |
+
result = np.zeros(3, dtype=bool)
|
| 132 |
+
msg = "Specify an appropriate 'na_value' for this dtype"
|
| 133 |
+
with pytest.raises(ValueError, match=msg):
|
| 134 |
+
result |= mask2
|
| 135 |
+
|
| 136 |
+
# addition
|
| 137 |
+
res = np.add(arr, arr2)
|
| 138 |
+
expected = pd.array([2, 4, pd.NA], dtype=dtype)
|
| 139 |
+
tm.assert_extension_array_equal(res, expected)
|
| 140 |
+
|
| 141 |
+
# when passing out=arr, we will modify 'arr' inplace.
|
| 142 |
+
res = np.add(arr, arr2, out=arr)
|
| 143 |
+
assert res is arr
|
| 144 |
+
tm.assert_extension_array_equal(res, expected)
|
| 145 |
+
tm.assert_extension_array_equal(arr, expected)
|
| 146 |
+
|
| 147 |
+
def test_mul_td64_array(self, dtype):
|
| 148 |
+
# GH#45622
|
| 149 |
+
arr = pd.array([1, 2, pd.NA], dtype=dtype)
|
| 150 |
+
other = np.arange(3, dtype=np.int64).view("m8[ns]")
|
| 151 |
+
|
| 152 |
+
result = arr * other
|
| 153 |
+
expected = pd.array([pd.Timedelta(0), pd.Timedelta(2), pd.NaT])
|
| 154 |
+
tm.assert_extension_array_equal(result, expected)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_array.py
ADDED
|
@@ -0,0 +1,478 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import datetime
|
| 2 |
+
import decimal
|
| 3 |
+
import re
|
| 4 |
+
|
| 5 |
+
import numpy as np
|
| 6 |
+
import pytest
|
| 7 |
+
import pytz
|
| 8 |
+
|
| 9 |
+
import pandas as pd
|
| 10 |
+
import pandas._testing as tm
|
| 11 |
+
from pandas.api.extensions import register_extension_dtype
|
| 12 |
+
from pandas.arrays import (
|
| 13 |
+
BooleanArray,
|
| 14 |
+
DatetimeArray,
|
| 15 |
+
FloatingArray,
|
| 16 |
+
IntegerArray,
|
| 17 |
+
IntervalArray,
|
| 18 |
+
SparseArray,
|
| 19 |
+
TimedeltaArray,
|
| 20 |
+
)
|
| 21 |
+
from pandas.core.arrays import (
|
| 22 |
+
NumpyExtensionArray,
|
| 23 |
+
period_array,
|
| 24 |
+
)
|
| 25 |
+
from pandas.tests.extension.decimal import (
|
| 26 |
+
DecimalArray,
|
| 27 |
+
DecimalDtype,
|
| 28 |
+
to_decimal,
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
@pytest.mark.parametrize("dtype_unit", ["M8[h]", "M8[m]", "m8[h]", "M8[m]"])
|
| 33 |
+
def test_dt64_array(dtype_unit):
|
| 34 |
+
# PR 53817
|
| 35 |
+
dtype_var = np.dtype(dtype_unit)
|
| 36 |
+
msg = (
|
| 37 |
+
r"datetime64 and timedelta64 dtype resolutions other than "
|
| 38 |
+
r"'s', 'ms', 'us', and 'ns' are deprecated. "
|
| 39 |
+
r"In future releases passing unsupported resolutions will "
|
| 40 |
+
r"raise an exception."
|
| 41 |
+
)
|
| 42 |
+
with tm.assert_produces_warning(FutureWarning, match=re.escape(msg)):
|
| 43 |
+
pd.array([], dtype=dtype_var)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
@pytest.mark.parametrize(
|
| 47 |
+
"data, dtype, expected",
|
| 48 |
+
[
|
| 49 |
+
# Basic NumPy defaults.
|
| 50 |
+
([], None, FloatingArray._from_sequence([], dtype="Float64")),
|
| 51 |
+
([1, 2], None, IntegerArray._from_sequence([1, 2], dtype="Int64")),
|
| 52 |
+
([1, 2], object, NumpyExtensionArray(np.array([1, 2], dtype=object))),
|
| 53 |
+
(
|
| 54 |
+
[1, 2],
|
| 55 |
+
np.dtype("float32"),
|
| 56 |
+
NumpyExtensionArray(np.array([1.0, 2.0], dtype=np.dtype("float32"))),
|
| 57 |
+
),
|
| 58 |
+
(
|
| 59 |
+
np.array([], dtype=object),
|
| 60 |
+
None,
|
| 61 |
+
NumpyExtensionArray(np.array([], dtype=object)),
|
| 62 |
+
),
|
| 63 |
+
(
|
| 64 |
+
np.array([1, 2], dtype="int64"),
|
| 65 |
+
None,
|
| 66 |
+
IntegerArray._from_sequence([1, 2], dtype="Int64"),
|
| 67 |
+
),
|
| 68 |
+
(
|
| 69 |
+
np.array([1.0, 2.0], dtype="float64"),
|
| 70 |
+
None,
|
| 71 |
+
FloatingArray._from_sequence([1.0, 2.0], dtype="Float64"),
|
| 72 |
+
),
|
| 73 |
+
# String alias passes through to NumPy
|
| 74 |
+
([1, 2], "float32", NumpyExtensionArray(np.array([1, 2], dtype="float32"))),
|
| 75 |
+
([1, 2], "int64", NumpyExtensionArray(np.array([1, 2], dtype=np.int64))),
|
| 76 |
+
# GH#44715 FloatingArray does not support float16, so fall
|
| 77 |
+
# back to NumpyExtensionArray
|
| 78 |
+
(
|
| 79 |
+
np.array([1, 2], dtype=np.float16),
|
| 80 |
+
None,
|
| 81 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.float16)),
|
| 82 |
+
),
|
| 83 |
+
# idempotency with e.g. pd.array(pd.array([1, 2], dtype="int64"))
|
| 84 |
+
(
|
| 85 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.int32)),
|
| 86 |
+
None,
|
| 87 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.int32)),
|
| 88 |
+
),
|
| 89 |
+
# Period alias
|
| 90 |
+
(
|
| 91 |
+
[pd.Period("2000", "D"), pd.Period("2001", "D")],
|
| 92 |
+
"Period[D]",
|
| 93 |
+
period_array(["2000", "2001"], freq="D"),
|
| 94 |
+
),
|
| 95 |
+
# Period dtype
|
| 96 |
+
(
|
| 97 |
+
[pd.Period("2000", "D")],
|
| 98 |
+
pd.PeriodDtype("D"),
|
| 99 |
+
period_array(["2000"], freq="D"),
|
| 100 |
+
),
|
| 101 |
+
# Datetime (naive)
|
| 102 |
+
(
|
| 103 |
+
[1, 2],
|
| 104 |
+
np.dtype("datetime64[ns]"),
|
| 105 |
+
DatetimeArray._from_sequence(
|
| 106 |
+
np.array([1, 2], dtype="M8[ns]"), dtype="M8[ns]"
|
| 107 |
+
),
|
| 108 |
+
),
|
| 109 |
+
(
|
| 110 |
+
[1, 2],
|
| 111 |
+
np.dtype("datetime64[s]"),
|
| 112 |
+
DatetimeArray._from_sequence(
|
| 113 |
+
np.array([1, 2], dtype="M8[s]"), dtype="M8[s]"
|
| 114 |
+
),
|
| 115 |
+
),
|
| 116 |
+
(
|
| 117 |
+
np.array([1, 2], dtype="datetime64[ns]"),
|
| 118 |
+
None,
|
| 119 |
+
DatetimeArray._from_sequence(
|
| 120 |
+
np.array([1, 2], dtype="M8[ns]"), dtype="M8[ns]"
|
| 121 |
+
),
|
| 122 |
+
),
|
| 123 |
+
(
|
| 124 |
+
pd.DatetimeIndex(["2000", "2001"]),
|
| 125 |
+
np.dtype("datetime64[ns]"),
|
| 126 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
| 127 |
+
),
|
| 128 |
+
(
|
| 129 |
+
pd.DatetimeIndex(["2000", "2001"]),
|
| 130 |
+
None,
|
| 131 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
| 132 |
+
),
|
| 133 |
+
(
|
| 134 |
+
["2000", "2001"],
|
| 135 |
+
np.dtype("datetime64[ns]"),
|
| 136 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
| 137 |
+
),
|
| 138 |
+
# Datetime (tz-aware)
|
| 139 |
+
(
|
| 140 |
+
["2000", "2001"],
|
| 141 |
+
pd.DatetimeTZDtype(tz="CET"),
|
| 142 |
+
DatetimeArray._from_sequence(
|
| 143 |
+
["2000", "2001"], dtype=pd.DatetimeTZDtype(tz="CET")
|
| 144 |
+
),
|
| 145 |
+
),
|
| 146 |
+
# Timedelta
|
| 147 |
+
(
|
| 148 |
+
["1h", "2h"],
|
| 149 |
+
np.dtype("timedelta64[ns]"),
|
| 150 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
| 151 |
+
),
|
| 152 |
+
(
|
| 153 |
+
pd.TimedeltaIndex(["1h", "2h"]),
|
| 154 |
+
np.dtype("timedelta64[ns]"),
|
| 155 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
| 156 |
+
),
|
| 157 |
+
(
|
| 158 |
+
np.array([1, 2], dtype="m8[s]"),
|
| 159 |
+
np.dtype("timedelta64[s]"),
|
| 160 |
+
TimedeltaArray._from_sequence(
|
| 161 |
+
np.array([1, 2], dtype="m8[s]"), dtype="m8[s]"
|
| 162 |
+
),
|
| 163 |
+
),
|
| 164 |
+
(
|
| 165 |
+
pd.TimedeltaIndex(["1h", "2h"]),
|
| 166 |
+
None,
|
| 167 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
| 168 |
+
),
|
| 169 |
+
(
|
| 170 |
+
# preserve non-nano, i.e. don't cast to NumpyExtensionArray
|
| 171 |
+
TimedeltaArray._simple_new(
|
| 172 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
| 173 |
+
),
|
| 174 |
+
None,
|
| 175 |
+
TimedeltaArray._simple_new(
|
| 176 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
| 177 |
+
),
|
| 178 |
+
),
|
| 179 |
+
(
|
| 180 |
+
# preserve non-nano, i.e. don't cast to NumpyExtensionArray
|
| 181 |
+
TimedeltaArray._simple_new(
|
| 182 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
| 183 |
+
),
|
| 184 |
+
np.dtype("m8[s]"),
|
| 185 |
+
TimedeltaArray._simple_new(
|
| 186 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
| 187 |
+
),
|
| 188 |
+
),
|
| 189 |
+
# Category
|
| 190 |
+
(["a", "b"], "category", pd.Categorical(["a", "b"])),
|
| 191 |
+
(
|
| 192 |
+
["a", "b"],
|
| 193 |
+
pd.CategoricalDtype(None, ordered=True),
|
| 194 |
+
pd.Categorical(["a", "b"], ordered=True),
|
| 195 |
+
),
|
| 196 |
+
# Interval
|
| 197 |
+
(
|
| 198 |
+
[pd.Interval(1, 2), pd.Interval(3, 4)],
|
| 199 |
+
"interval",
|
| 200 |
+
IntervalArray.from_tuples([(1, 2), (3, 4)]),
|
| 201 |
+
),
|
| 202 |
+
# Sparse
|
| 203 |
+
([0, 1], "Sparse[int64]", SparseArray([0, 1], dtype="int64")),
|
| 204 |
+
# IntegerNA
|
| 205 |
+
([1, None], "Int16", pd.array([1, None], dtype="Int16")),
|
| 206 |
+
(
|
| 207 |
+
pd.Series([1, 2]),
|
| 208 |
+
None,
|
| 209 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.int64)),
|
| 210 |
+
),
|
| 211 |
+
# String
|
| 212 |
+
(
|
| 213 |
+
["a", None],
|
| 214 |
+
"string",
|
| 215 |
+
pd.StringDtype()
|
| 216 |
+
.construct_array_type()
|
| 217 |
+
._from_sequence(["a", None], dtype=pd.StringDtype()),
|
| 218 |
+
),
|
| 219 |
+
(
|
| 220 |
+
["a", None],
|
| 221 |
+
pd.StringDtype(),
|
| 222 |
+
pd.StringDtype()
|
| 223 |
+
.construct_array_type()
|
| 224 |
+
._from_sequence(["a", None], dtype=pd.StringDtype()),
|
| 225 |
+
),
|
| 226 |
+
# Boolean
|
| 227 |
+
(
|
| 228 |
+
[True, None],
|
| 229 |
+
"boolean",
|
| 230 |
+
BooleanArray._from_sequence([True, None], dtype="boolean"),
|
| 231 |
+
),
|
| 232 |
+
(
|
| 233 |
+
[True, None],
|
| 234 |
+
pd.BooleanDtype(),
|
| 235 |
+
BooleanArray._from_sequence([True, None], dtype="boolean"),
|
| 236 |
+
),
|
| 237 |
+
# Index
|
| 238 |
+
(pd.Index([1, 2]), None, NumpyExtensionArray(np.array([1, 2], dtype=np.int64))),
|
| 239 |
+
# Series[EA] returns the EA
|
| 240 |
+
(
|
| 241 |
+
pd.Series(pd.Categorical(["a", "b"], categories=["a", "b", "c"])),
|
| 242 |
+
None,
|
| 243 |
+
pd.Categorical(["a", "b"], categories=["a", "b", "c"]),
|
| 244 |
+
),
|
| 245 |
+
# "3rd party" EAs work
|
| 246 |
+
([decimal.Decimal(0), decimal.Decimal(1)], "decimal", to_decimal([0, 1])),
|
| 247 |
+
# pass an ExtensionArray, but a different dtype
|
| 248 |
+
(
|
| 249 |
+
period_array(["2000", "2001"], freq="D"),
|
| 250 |
+
"category",
|
| 251 |
+
pd.Categorical([pd.Period("2000", "D"), pd.Period("2001", "D")]),
|
| 252 |
+
),
|
| 253 |
+
],
|
| 254 |
+
)
|
| 255 |
+
def test_array(data, dtype, expected):
|
| 256 |
+
result = pd.array(data, dtype=dtype)
|
| 257 |
+
tm.assert_equal(result, expected)
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def test_array_copy():
|
| 261 |
+
a = np.array([1, 2])
|
| 262 |
+
# default is to copy
|
| 263 |
+
b = pd.array(a, dtype=a.dtype)
|
| 264 |
+
assert not tm.shares_memory(a, b)
|
| 265 |
+
|
| 266 |
+
# copy=True
|
| 267 |
+
b = pd.array(a, dtype=a.dtype, copy=True)
|
| 268 |
+
assert not tm.shares_memory(a, b)
|
| 269 |
+
|
| 270 |
+
# copy=False
|
| 271 |
+
b = pd.array(a, dtype=a.dtype, copy=False)
|
| 272 |
+
assert tm.shares_memory(a, b)
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
cet = pytz.timezone("CET")
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
@pytest.mark.parametrize(
|
| 279 |
+
"data, expected",
|
| 280 |
+
[
|
| 281 |
+
# period
|
| 282 |
+
(
|
| 283 |
+
[pd.Period("2000", "D"), pd.Period("2001", "D")],
|
| 284 |
+
period_array(["2000", "2001"], freq="D"),
|
| 285 |
+
),
|
| 286 |
+
# interval
|
| 287 |
+
([pd.Interval(0, 1), pd.Interval(1, 2)], IntervalArray.from_breaks([0, 1, 2])),
|
| 288 |
+
# datetime
|
| 289 |
+
(
|
| 290 |
+
[pd.Timestamp("2000"), pd.Timestamp("2001")],
|
| 291 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
| 292 |
+
),
|
| 293 |
+
(
|
| 294 |
+
[datetime.datetime(2000, 1, 1), datetime.datetime(2001, 1, 1)],
|
| 295 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
| 296 |
+
),
|
| 297 |
+
(
|
| 298 |
+
np.array([1, 2], dtype="M8[ns]"),
|
| 299 |
+
DatetimeArray._from_sequence(np.array([1, 2], dtype="M8[ns]")),
|
| 300 |
+
),
|
| 301 |
+
(
|
| 302 |
+
np.array([1, 2], dtype="M8[us]"),
|
| 303 |
+
DatetimeArray._simple_new(
|
| 304 |
+
np.array([1, 2], dtype="M8[us]"), dtype=np.dtype("M8[us]")
|
| 305 |
+
),
|
| 306 |
+
),
|
| 307 |
+
# datetimetz
|
| 308 |
+
(
|
| 309 |
+
[pd.Timestamp("2000", tz="CET"), pd.Timestamp("2001", tz="CET")],
|
| 310 |
+
DatetimeArray._from_sequence(
|
| 311 |
+
["2000", "2001"], dtype=pd.DatetimeTZDtype(tz="CET", unit="ns")
|
| 312 |
+
),
|
| 313 |
+
),
|
| 314 |
+
(
|
| 315 |
+
[
|
| 316 |
+
datetime.datetime(2000, 1, 1, tzinfo=cet),
|
| 317 |
+
datetime.datetime(2001, 1, 1, tzinfo=cet),
|
| 318 |
+
],
|
| 319 |
+
DatetimeArray._from_sequence(
|
| 320 |
+
["2000", "2001"], dtype=pd.DatetimeTZDtype(tz=cet, unit="ns")
|
| 321 |
+
),
|
| 322 |
+
),
|
| 323 |
+
# timedelta
|
| 324 |
+
(
|
| 325 |
+
[pd.Timedelta("1h"), pd.Timedelta("2h")],
|
| 326 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
| 327 |
+
),
|
| 328 |
+
(
|
| 329 |
+
np.array([1, 2], dtype="m8[ns]"),
|
| 330 |
+
TimedeltaArray._from_sequence(np.array([1, 2], dtype="m8[ns]")),
|
| 331 |
+
),
|
| 332 |
+
(
|
| 333 |
+
np.array([1, 2], dtype="m8[us]"),
|
| 334 |
+
TimedeltaArray._from_sequence(np.array([1, 2], dtype="m8[us]")),
|
| 335 |
+
),
|
| 336 |
+
# integer
|
| 337 |
+
([1, 2], IntegerArray._from_sequence([1, 2], dtype="Int64")),
|
| 338 |
+
([1, None], IntegerArray._from_sequence([1, None], dtype="Int64")),
|
| 339 |
+
([1, pd.NA], IntegerArray._from_sequence([1, pd.NA], dtype="Int64")),
|
| 340 |
+
([1, np.nan], IntegerArray._from_sequence([1, np.nan], dtype="Int64")),
|
| 341 |
+
# float
|
| 342 |
+
([0.1, 0.2], FloatingArray._from_sequence([0.1, 0.2], dtype="Float64")),
|
| 343 |
+
([0.1, None], FloatingArray._from_sequence([0.1, pd.NA], dtype="Float64")),
|
| 344 |
+
([0.1, np.nan], FloatingArray._from_sequence([0.1, pd.NA], dtype="Float64")),
|
| 345 |
+
([0.1, pd.NA], FloatingArray._from_sequence([0.1, pd.NA], dtype="Float64")),
|
| 346 |
+
# integer-like float
|
| 347 |
+
([1.0, 2.0], FloatingArray._from_sequence([1.0, 2.0], dtype="Float64")),
|
| 348 |
+
([1.0, None], FloatingArray._from_sequence([1.0, pd.NA], dtype="Float64")),
|
| 349 |
+
([1.0, np.nan], FloatingArray._from_sequence([1.0, pd.NA], dtype="Float64")),
|
| 350 |
+
([1.0, pd.NA], FloatingArray._from_sequence([1.0, pd.NA], dtype="Float64")),
|
| 351 |
+
# mixed-integer-float
|
| 352 |
+
([1, 2.0], FloatingArray._from_sequence([1.0, 2.0], dtype="Float64")),
|
| 353 |
+
(
|
| 354 |
+
[1, np.nan, 2.0],
|
| 355 |
+
FloatingArray._from_sequence([1.0, None, 2.0], dtype="Float64"),
|
| 356 |
+
),
|
| 357 |
+
# string
|
| 358 |
+
(
|
| 359 |
+
["a", "b"],
|
| 360 |
+
pd.StringDtype()
|
| 361 |
+
.construct_array_type()
|
| 362 |
+
._from_sequence(["a", "b"], dtype=pd.StringDtype()),
|
| 363 |
+
),
|
| 364 |
+
(
|
| 365 |
+
["a", None],
|
| 366 |
+
pd.StringDtype()
|
| 367 |
+
.construct_array_type()
|
| 368 |
+
._from_sequence(["a", None], dtype=pd.StringDtype()),
|
| 369 |
+
),
|
| 370 |
+
# Boolean
|
| 371 |
+
([True, False], BooleanArray._from_sequence([True, False], dtype="boolean")),
|
| 372 |
+
([True, None], BooleanArray._from_sequence([True, None], dtype="boolean")),
|
| 373 |
+
],
|
| 374 |
+
)
|
| 375 |
+
def test_array_inference(data, expected):
|
| 376 |
+
result = pd.array(data)
|
| 377 |
+
tm.assert_equal(result, expected)
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
@pytest.mark.parametrize(
|
| 381 |
+
"data",
|
| 382 |
+
[
|
| 383 |
+
# mix of frequencies
|
| 384 |
+
[pd.Period("2000", "D"), pd.Period("2001", "Y")],
|
| 385 |
+
# mix of closed
|
| 386 |
+
[pd.Interval(0, 1, closed="left"), pd.Interval(1, 2, closed="right")],
|
| 387 |
+
# Mix of timezones
|
| 388 |
+
[pd.Timestamp("2000", tz="CET"), pd.Timestamp("2000", tz="UTC")],
|
| 389 |
+
# Mix of tz-aware and tz-naive
|
| 390 |
+
[pd.Timestamp("2000", tz="CET"), pd.Timestamp("2000")],
|
| 391 |
+
np.array([pd.Timestamp("2000"), pd.Timestamp("2000", tz="CET")]),
|
| 392 |
+
],
|
| 393 |
+
)
|
| 394 |
+
def test_array_inference_fails(data):
|
| 395 |
+
result = pd.array(data)
|
| 396 |
+
expected = NumpyExtensionArray(np.array(data, dtype=object))
|
| 397 |
+
tm.assert_extension_array_equal(result, expected)
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
@pytest.mark.parametrize("data", [np.array(0)])
|
| 401 |
+
def test_nd_raises(data):
|
| 402 |
+
with pytest.raises(ValueError, match="NumpyExtensionArray must be 1-dimensional"):
|
| 403 |
+
pd.array(data, dtype="int64")
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def test_scalar_raises():
|
| 407 |
+
with pytest.raises(ValueError, match="Cannot pass scalar '1'"):
|
| 408 |
+
pd.array(1)
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
def test_dataframe_raises():
|
| 412 |
+
# GH#51167 don't accidentally cast to StringArray by doing inference on columns
|
| 413 |
+
df = pd.DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
|
| 414 |
+
msg = "Cannot pass DataFrame to 'pandas.array'"
|
| 415 |
+
with pytest.raises(TypeError, match=msg):
|
| 416 |
+
pd.array(df)
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
def test_bounds_check():
|
| 420 |
+
# GH21796
|
| 421 |
+
with pytest.raises(
|
| 422 |
+
TypeError, match=r"cannot safely cast non-equivalent int(32|64) to uint16"
|
| 423 |
+
):
|
| 424 |
+
pd.array([-1, 2, 3], dtype="UInt16")
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
# ---------------------------------------------------------------------------
|
| 428 |
+
# A couple dummy classes to ensure that Series and Indexes are unboxed before
|
| 429 |
+
# getting to the EA classes.
|
| 430 |
+
|
| 431 |
+
|
| 432 |
+
@register_extension_dtype
|
| 433 |
+
class DecimalDtype2(DecimalDtype):
|
| 434 |
+
name = "decimal2"
|
| 435 |
+
|
| 436 |
+
@classmethod
|
| 437 |
+
def construct_array_type(cls):
|
| 438 |
+
"""
|
| 439 |
+
Return the array type associated with this dtype.
|
| 440 |
+
|
| 441 |
+
Returns
|
| 442 |
+
-------
|
| 443 |
+
type
|
| 444 |
+
"""
|
| 445 |
+
return DecimalArray2
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
class DecimalArray2(DecimalArray):
|
| 449 |
+
@classmethod
|
| 450 |
+
def _from_sequence(cls, scalars, *, dtype=None, copy=False):
|
| 451 |
+
if isinstance(scalars, (pd.Series, pd.Index)):
|
| 452 |
+
raise TypeError("scalars should not be of type pd.Series or pd.Index")
|
| 453 |
+
|
| 454 |
+
return super()._from_sequence(scalars, dtype=dtype, copy=copy)
|
| 455 |
+
|
| 456 |
+
|
| 457 |
+
def test_array_unboxes(index_or_series):
|
| 458 |
+
box = index_or_series
|
| 459 |
+
|
| 460 |
+
data = box([decimal.Decimal("1"), decimal.Decimal("2")])
|
| 461 |
+
dtype = DecimalDtype2()
|
| 462 |
+
# make sure it works
|
| 463 |
+
with pytest.raises(
|
| 464 |
+
TypeError, match="scalars should not be of type pd.Series or pd.Index"
|
| 465 |
+
):
|
| 466 |
+
DecimalArray2._from_sequence(data, dtype=dtype)
|
| 467 |
+
|
| 468 |
+
result = pd.array(data, dtype="decimal2")
|
| 469 |
+
expected = DecimalArray2._from_sequence(data.values, dtype=dtype)
|
| 470 |
+
tm.assert_equal(result, expected)
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def test_array_to_numpy_na():
|
| 474 |
+
# GH#40638
|
| 475 |
+
arr = pd.array([pd.NA, 1], dtype="string[python]")
|
| 476 |
+
result = arr.to_numpy(na_value=True, dtype=bool)
|
| 477 |
+
expected = np.array([True, True])
|
| 478 |
+
tm.assert_numpy_array_equal(result, expected)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_datetimelike.py
ADDED
|
@@ -0,0 +1,1344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import re
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from pandas._libs import (
|
| 10 |
+
NaT,
|
| 11 |
+
OutOfBoundsDatetime,
|
| 12 |
+
Timestamp,
|
| 13 |
+
)
|
| 14 |
+
from pandas._libs.tslibs.dtypes import freq_to_period_freqstr
|
| 15 |
+
from pandas.compat.numpy import np_version_gt2
|
| 16 |
+
|
| 17 |
+
import pandas as pd
|
| 18 |
+
from pandas import (
|
| 19 |
+
DatetimeIndex,
|
| 20 |
+
Period,
|
| 21 |
+
PeriodIndex,
|
| 22 |
+
TimedeltaIndex,
|
| 23 |
+
)
|
| 24 |
+
import pandas._testing as tm
|
| 25 |
+
from pandas.core.arrays import (
|
| 26 |
+
DatetimeArray,
|
| 27 |
+
NumpyExtensionArray,
|
| 28 |
+
PeriodArray,
|
| 29 |
+
TimedeltaArray,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# TODO: more freq variants
|
| 34 |
+
@pytest.fixture(params=["D", "B", "W", "ME", "QE", "YE"])
|
| 35 |
+
def freqstr(request):
|
| 36 |
+
"""Fixture returning parametrized frequency in string format."""
|
| 37 |
+
return request.param
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@pytest.fixture
|
| 41 |
+
def period_index(freqstr):
|
| 42 |
+
"""
|
| 43 |
+
A fixture to provide PeriodIndex objects with different frequencies.
|
| 44 |
+
|
| 45 |
+
Most PeriodArray behavior is already tested in PeriodIndex tests,
|
| 46 |
+
so here we just test that the PeriodArray behavior matches
|
| 47 |
+
the PeriodIndex behavior.
|
| 48 |
+
"""
|
| 49 |
+
# TODO: non-monotone indexes; NaTs, different start dates
|
| 50 |
+
with warnings.catch_warnings():
|
| 51 |
+
# suppress deprecation of Period[B]
|
| 52 |
+
warnings.filterwarnings(
|
| 53 |
+
"ignore", message="Period with BDay freq", category=FutureWarning
|
| 54 |
+
)
|
| 55 |
+
freqstr = freq_to_period_freqstr(1, freqstr)
|
| 56 |
+
pi = pd.period_range(start=Timestamp("2000-01-01"), periods=100, freq=freqstr)
|
| 57 |
+
return pi
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
@pytest.fixture
|
| 61 |
+
def datetime_index(freqstr):
|
| 62 |
+
"""
|
| 63 |
+
A fixture to provide DatetimeIndex objects with different frequencies.
|
| 64 |
+
|
| 65 |
+
Most DatetimeArray behavior is already tested in DatetimeIndex tests,
|
| 66 |
+
so here we just test that the DatetimeArray behavior matches
|
| 67 |
+
the DatetimeIndex behavior.
|
| 68 |
+
"""
|
| 69 |
+
# TODO: non-monotone indexes; NaTs, different start dates, timezones
|
| 70 |
+
dti = pd.date_range(start=Timestamp("2000-01-01"), periods=100, freq=freqstr)
|
| 71 |
+
return dti
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@pytest.fixture
|
| 75 |
+
def timedelta_index():
|
| 76 |
+
"""
|
| 77 |
+
A fixture to provide TimedeltaIndex objects with different frequencies.
|
| 78 |
+
Most TimedeltaArray behavior is already tested in TimedeltaIndex tests,
|
| 79 |
+
so here we just test that the TimedeltaArray behavior matches
|
| 80 |
+
the TimedeltaIndex behavior.
|
| 81 |
+
"""
|
| 82 |
+
# TODO: flesh this out
|
| 83 |
+
return TimedeltaIndex(["1 Day", "3 Hours", "NaT"])
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class SharedTests:
|
| 87 |
+
index_cls: type[DatetimeIndex | PeriodIndex | TimedeltaIndex]
|
| 88 |
+
|
| 89 |
+
@pytest.fixture
|
| 90 |
+
def arr1d(self):
|
| 91 |
+
"""Fixture returning DatetimeArray with daily frequency."""
|
| 92 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 93 |
+
if self.array_cls is PeriodArray:
|
| 94 |
+
arr = self.array_cls(data, freq="D")
|
| 95 |
+
else:
|
| 96 |
+
arr = self.index_cls(data, freq="D")._data
|
| 97 |
+
return arr
|
| 98 |
+
|
| 99 |
+
def test_compare_len1_raises(self, arr1d):
|
| 100 |
+
# make sure we raise when comparing with different lengths, specific
|
| 101 |
+
# to the case where one has length-1, which numpy would broadcast
|
| 102 |
+
arr = arr1d
|
| 103 |
+
idx = self.index_cls(arr)
|
| 104 |
+
|
| 105 |
+
with pytest.raises(ValueError, match="Lengths must match"):
|
| 106 |
+
arr == arr[:1]
|
| 107 |
+
|
| 108 |
+
# test the index classes while we're at it, GH#23078
|
| 109 |
+
with pytest.raises(ValueError, match="Lengths must match"):
|
| 110 |
+
idx <= idx[[0]]
|
| 111 |
+
|
| 112 |
+
@pytest.mark.parametrize(
|
| 113 |
+
"result",
|
| 114 |
+
[
|
| 115 |
+
pd.date_range("2020", periods=3),
|
| 116 |
+
pd.date_range("2020", periods=3, tz="UTC"),
|
| 117 |
+
pd.timedelta_range("0 days", periods=3),
|
| 118 |
+
pd.period_range("2020Q1", periods=3, freq="Q"),
|
| 119 |
+
],
|
| 120 |
+
)
|
| 121 |
+
def test_compare_with_Categorical(self, result):
|
| 122 |
+
expected = pd.Categorical(result)
|
| 123 |
+
assert all(result == expected)
|
| 124 |
+
assert not any(result != expected)
|
| 125 |
+
|
| 126 |
+
@pytest.mark.parametrize("reverse", [True, False])
|
| 127 |
+
@pytest.mark.parametrize("as_index", [True, False])
|
| 128 |
+
def test_compare_categorical_dtype(self, arr1d, as_index, reverse, ordered):
|
| 129 |
+
other = pd.Categorical(arr1d, ordered=ordered)
|
| 130 |
+
if as_index:
|
| 131 |
+
other = pd.CategoricalIndex(other)
|
| 132 |
+
|
| 133 |
+
left, right = arr1d, other
|
| 134 |
+
if reverse:
|
| 135 |
+
left, right = right, left
|
| 136 |
+
|
| 137 |
+
ones = np.ones(arr1d.shape, dtype=bool)
|
| 138 |
+
zeros = ~ones
|
| 139 |
+
|
| 140 |
+
result = left == right
|
| 141 |
+
tm.assert_numpy_array_equal(result, ones)
|
| 142 |
+
|
| 143 |
+
result = left != right
|
| 144 |
+
tm.assert_numpy_array_equal(result, zeros)
|
| 145 |
+
|
| 146 |
+
if not reverse and not as_index:
|
| 147 |
+
# Otherwise Categorical raises TypeError bc it is not ordered
|
| 148 |
+
# TODO: we should probably get the same behavior regardless?
|
| 149 |
+
result = left < right
|
| 150 |
+
tm.assert_numpy_array_equal(result, zeros)
|
| 151 |
+
|
| 152 |
+
result = left <= right
|
| 153 |
+
tm.assert_numpy_array_equal(result, ones)
|
| 154 |
+
|
| 155 |
+
result = left > right
|
| 156 |
+
tm.assert_numpy_array_equal(result, zeros)
|
| 157 |
+
|
| 158 |
+
result = left >= right
|
| 159 |
+
tm.assert_numpy_array_equal(result, ones)
|
| 160 |
+
|
| 161 |
+
def test_take(self):
|
| 162 |
+
data = np.arange(100, dtype="i8") * 24 * 3600 * 10**9
|
| 163 |
+
np.random.default_rng(2).shuffle(data)
|
| 164 |
+
|
| 165 |
+
if self.array_cls is PeriodArray:
|
| 166 |
+
arr = PeriodArray(data, dtype="period[D]")
|
| 167 |
+
else:
|
| 168 |
+
arr = self.index_cls(data)._data
|
| 169 |
+
idx = self.index_cls._simple_new(arr)
|
| 170 |
+
|
| 171 |
+
takers = [1, 4, 94]
|
| 172 |
+
result = arr.take(takers)
|
| 173 |
+
expected = idx.take(takers)
|
| 174 |
+
|
| 175 |
+
tm.assert_index_equal(self.index_cls(result), expected)
|
| 176 |
+
|
| 177 |
+
takers = np.array([1, 4, 94])
|
| 178 |
+
result = arr.take(takers)
|
| 179 |
+
expected = idx.take(takers)
|
| 180 |
+
|
| 181 |
+
tm.assert_index_equal(self.index_cls(result), expected)
|
| 182 |
+
|
| 183 |
+
@pytest.mark.parametrize("fill_value", [2, 2.0, Timestamp(2021, 1, 1, 12).time])
|
| 184 |
+
def test_take_fill_raises(self, fill_value, arr1d):
|
| 185 |
+
msg = f"value should be a '{arr1d._scalar_type.__name__}' or 'NaT'. Got"
|
| 186 |
+
with pytest.raises(TypeError, match=msg):
|
| 187 |
+
arr1d.take([0, 1], allow_fill=True, fill_value=fill_value)
|
| 188 |
+
|
| 189 |
+
def test_take_fill(self, arr1d):
|
| 190 |
+
arr = arr1d
|
| 191 |
+
|
| 192 |
+
result = arr.take([-1, 1], allow_fill=True, fill_value=None)
|
| 193 |
+
assert result[0] is NaT
|
| 194 |
+
|
| 195 |
+
result = arr.take([-1, 1], allow_fill=True, fill_value=np.nan)
|
| 196 |
+
assert result[0] is NaT
|
| 197 |
+
|
| 198 |
+
result = arr.take([-1, 1], allow_fill=True, fill_value=NaT)
|
| 199 |
+
assert result[0] is NaT
|
| 200 |
+
|
| 201 |
+
@pytest.mark.filterwarnings(
|
| 202 |
+
"ignore:Period with BDay freq is deprecated:FutureWarning"
|
| 203 |
+
)
|
| 204 |
+
def test_take_fill_str(self, arr1d):
|
| 205 |
+
# Cast str fill_value matching other fill_value-taking methods
|
| 206 |
+
result = arr1d.take([-1, 1], allow_fill=True, fill_value=str(arr1d[-1]))
|
| 207 |
+
expected = arr1d[[-1, 1]]
|
| 208 |
+
tm.assert_equal(result, expected)
|
| 209 |
+
|
| 210 |
+
msg = f"value should be a '{arr1d._scalar_type.__name__}' or 'NaT'. Got"
|
| 211 |
+
with pytest.raises(TypeError, match=msg):
|
| 212 |
+
arr1d.take([-1, 1], allow_fill=True, fill_value="foo")
|
| 213 |
+
|
| 214 |
+
def test_concat_same_type(self, arr1d):
|
| 215 |
+
arr = arr1d
|
| 216 |
+
idx = self.index_cls(arr)
|
| 217 |
+
idx = idx.insert(0, NaT)
|
| 218 |
+
arr = arr1d
|
| 219 |
+
|
| 220 |
+
result = arr._concat_same_type([arr[:-1], arr[1:], arr])
|
| 221 |
+
arr2 = arr.astype(object)
|
| 222 |
+
expected = self.index_cls(np.concatenate([arr2[:-1], arr2[1:], arr2]))
|
| 223 |
+
|
| 224 |
+
tm.assert_index_equal(self.index_cls(result), expected)
|
| 225 |
+
|
| 226 |
+
def test_unbox_scalar(self, arr1d):
|
| 227 |
+
result = arr1d._unbox_scalar(arr1d[0])
|
| 228 |
+
expected = arr1d._ndarray.dtype.type
|
| 229 |
+
assert isinstance(result, expected)
|
| 230 |
+
|
| 231 |
+
result = arr1d._unbox_scalar(NaT)
|
| 232 |
+
assert isinstance(result, expected)
|
| 233 |
+
|
| 234 |
+
msg = f"'value' should be a {self.scalar_type.__name__}."
|
| 235 |
+
with pytest.raises(ValueError, match=msg):
|
| 236 |
+
arr1d._unbox_scalar("foo")
|
| 237 |
+
|
| 238 |
+
def test_check_compatible_with(self, arr1d):
|
| 239 |
+
arr1d._check_compatible_with(arr1d[0])
|
| 240 |
+
arr1d._check_compatible_with(arr1d[:1])
|
| 241 |
+
arr1d._check_compatible_with(NaT)
|
| 242 |
+
|
| 243 |
+
def test_scalar_from_string(self, arr1d):
|
| 244 |
+
result = arr1d._scalar_from_string(str(arr1d[0]))
|
| 245 |
+
assert result == arr1d[0]
|
| 246 |
+
|
| 247 |
+
def test_reduce_invalid(self, arr1d):
|
| 248 |
+
msg = "does not support reduction 'not a method'"
|
| 249 |
+
with pytest.raises(TypeError, match=msg):
|
| 250 |
+
arr1d._reduce("not a method")
|
| 251 |
+
|
| 252 |
+
@pytest.mark.parametrize("method", ["pad", "backfill"])
|
| 253 |
+
def test_fillna_method_doesnt_change_orig(self, method):
|
| 254 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 255 |
+
if self.array_cls is PeriodArray:
|
| 256 |
+
arr = self.array_cls(data, dtype="period[D]")
|
| 257 |
+
else:
|
| 258 |
+
arr = self.array_cls._from_sequence(data)
|
| 259 |
+
arr[4] = NaT
|
| 260 |
+
|
| 261 |
+
fill_value = arr[3] if method == "pad" else arr[5]
|
| 262 |
+
|
| 263 |
+
result = arr._pad_or_backfill(method=method)
|
| 264 |
+
assert result[4] == fill_value
|
| 265 |
+
|
| 266 |
+
# check that the original was not changed
|
| 267 |
+
assert arr[4] is NaT
|
| 268 |
+
|
| 269 |
+
def test_searchsorted(self):
|
| 270 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 271 |
+
if self.array_cls is PeriodArray:
|
| 272 |
+
arr = self.array_cls(data, dtype="period[D]")
|
| 273 |
+
else:
|
| 274 |
+
arr = self.array_cls._from_sequence(data)
|
| 275 |
+
|
| 276 |
+
# scalar
|
| 277 |
+
result = arr.searchsorted(arr[1])
|
| 278 |
+
assert result == 1
|
| 279 |
+
|
| 280 |
+
result = arr.searchsorted(arr[2], side="right")
|
| 281 |
+
assert result == 3
|
| 282 |
+
|
| 283 |
+
# own-type
|
| 284 |
+
result = arr.searchsorted(arr[1:3])
|
| 285 |
+
expected = np.array([1, 2], dtype=np.intp)
|
| 286 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 287 |
+
|
| 288 |
+
result = arr.searchsorted(arr[1:3], side="right")
|
| 289 |
+
expected = np.array([2, 3], dtype=np.intp)
|
| 290 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 291 |
+
|
| 292 |
+
# GH#29884 match numpy convention on whether NaT goes
|
| 293 |
+
# at the end or the beginning
|
| 294 |
+
result = arr.searchsorted(NaT)
|
| 295 |
+
assert result == 10
|
| 296 |
+
|
| 297 |
+
@pytest.mark.parametrize("box", [None, "index", "series"])
|
| 298 |
+
def test_searchsorted_castable_strings(self, arr1d, box, string_storage):
|
| 299 |
+
arr = arr1d
|
| 300 |
+
if box is None:
|
| 301 |
+
pass
|
| 302 |
+
elif box == "index":
|
| 303 |
+
# Test the equivalent Index.searchsorted method while we're here
|
| 304 |
+
arr = self.index_cls(arr)
|
| 305 |
+
else:
|
| 306 |
+
# Test the equivalent Series.searchsorted method while we're here
|
| 307 |
+
arr = pd.Series(arr)
|
| 308 |
+
|
| 309 |
+
# scalar
|
| 310 |
+
result = arr.searchsorted(str(arr[1]))
|
| 311 |
+
assert result == 1
|
| 312 |
+
|
| 313 |
+
result = arr.searchsorted(str(arr[2]), side="right")
|
| 314 |
+
assert result == 3
|
| 315 |
+
|
| 316 |
+
result = arr.searchsorted([str(x) for x in arr[1:3]])
|
| 317 |
+
expected = np.array([1, 2], dtype=np.intp)
|
| 318 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 319 |
+
|
| 320 |
+
with pytest.raises(
|
| 321 |
+
TypeError,
|
| 322 |
+
match=re.escape(
|
| 323 |
+
f"value should be a '{arr1d._scalar_type.__name__}', 'NaT', "
|
| 324 |
+
"or array of those. Got 'str' instead."
|
| 325 |
+
),
|
| 326 |
+
):
|
| 327 |
+
arr.searchsorted("foo")
|
| 328 |
+
|
| 329 |
+
with pd.option_context("string_storage", string_storage):
|
| 330 |
+
with pytest.raises(
|
| 331 |
+
TypeError,
|
| 332 |
+
match=re.escape(
|
| 333 |
+
f"value should be a '{arr1d._scalar_type.__name__}', 'NaT', "
|
| 334 |
+
"or array of those. Got string array instead."
|
| 335 |
+
),
|
| 336 |
+
):
|
| 337 |
+
arr.searchsorted([str(arr[1]), "baz"])
|
| 338 |
+
|
| 339 |
+
def test_getitem_near_implementation_bounds(self):
|
| 340 |
+
# We only check tz-naive for DTA bc the bounds are slightly different
|
| 341 |
+
# for other tzs
|
| 342 |
+
i8vals = np.asarray([NaT._value + n for n in range(1, 5)], dtype="i8")
|
| 343 |
+
if self.array_cls is PeriodArray:
|
| 344 |
+
arr = self.array_cls(i8vals, dtype="period[ns]")
|
| 345 |
+
else:
|
| 346 |
+
arr = self.index_cls(i8vals, freq="ns")._data
|
| 347 |
+
arr[0] # should not raise OutOfBoundsDatetime
|
| 348 |
+
|
| 349 |
+
index = pd.Index(arr)
|
| 350 |
+
index[0] # should not raise OutOfBoundsDatetime
|
| 351 |
+
|
| 352 |
+
ser = pd.Series(arr)
|
| 353 |
+
ser[0] # should not raise OutOfBoundsDatetime
|
| 354 |
+
|
| 355 |
+
def test_getitem_2d(self, arr1d):
|
| 356 |
+
# 2d slicing on a 1D array
|
| 357 |
+
expected = type(arr1d)._simple_new(
|
| 358 |
+
arr1d._ndarray[:, np.newaxis], dtype=arr1d.dtype
|
| 359 |
+
)
|
| 360 |
+
result = arr1d[:, np.newaxis]
|
| 361 |
+
tm.assert_equal(result, expected)
|
| 362 |
+
|
| 363 |
+
# Lookup on a 2D array
|
| 364 |
+
arr2d = expected
|
| 365 |
+
expected = type(arr2d)._simple_new(arr2d._ndarray[:3, 0], dtype=arr2d.dtype)
|
| 366 |
+
result = arr2d[:3, 0]
|
| 367 |
+
tm.assert_equal(result, expected)
|
| 368 |
+
|
| 369 |
+
# Scalar lookup
|
| 370 |
+
result = arr2d[-1, 0]
|
| 371 |
+
expected = arr1d[-1]
|
| 372 |
+
assert result == expected
|
| 373 |
+
|
| 374 |
+
def test_iter_2d(self, arr1d):
|
| 375 |
+
data2d = arr1d._ndarray[:3, np.newaxis]
|
| 376 |
+
arr2d = type(arr1d)._simple_new(data2d, dtype=arr1d.dtype)
|
| 377 |
+
result = list(arr2d)
|
| 378 |
+
assert len(result) == 3
|
| 379 |
+
for x in result:
|
| 380 |
+
assert isinstance(x, type(arr1d))
|
| 381 |
+
assert x.ndim == 1
|
| 382 |
+
assert x.dtype == arr1d.dtype
|
| 383 |
+
|
| 384 |
+
def test_repr_2d(self, arr1d):
|
| 385 |
+
data2d = arr1d._ndarray[:3, np.newaxis]
|
| 386 |
+
arr2d = type(arr1d)._simple_new(data2d, dtype=arr1d.dtype)
|
| 387 |
+
|
| 388 |
+
result = repr(arr2d)
|
| 389 |
+
|
| 390 |
+
if isinstance(arr2d, TimedeltaArray):
|
| 391 |
+
expected = (
|
| 392 |
+
f"<{type(arr2d).__name__}>\n"
|
| 393 |
+
"[\n"
|
| 394 |
+
f"['{arr1d[0]._repr_base()}'],\n"
|
| 395 |
+
f"['{arr1d[1]._repr_base()}'],\n"
|
| 396 |
+
f"['{arr1d[2]._repr_base()}']\n"
|
| 397 |
+
"]\n"
|
| 398 |
+
f"Shape: (3, 1), dtype: {arr1d.dtype}"
|
| 399 |
+
)
|
| 400 |
+
else:
|
| 401 |
+
expected = (
|
| 402 |
+
f"<{type(arr2d).__name__}>\n"
|
| 403 |
+
"[\n"
|
| 404 |
+
f"['{arr1d[0]}'],\n"
|
| 405 |
+
f"['{arr1d[1]}'],\n"
|
| 406 |
+
f"['{arr1d[2]}']\n"
|
| 407 |
+
"]\n"
|
| 408 |
+
f"Shape: (3, 1), dtype: {arr1d.dtype}"
|
| 409 |
+
)
|
| 410 |
+
|
| 411 |
+
assert result == expected
|
| 412 |
+
|
| 413 |
+
def test_setitem(self):
|
| 414 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 415 |
+
if self.array_cls is PeriodArray:
|
| 416 |
+
arr = self.array_cls(data, dtype="period[D]")
|
| 417 |
+
else:
|
| 418 |
+
arr = self.index_cls(data, freq="D")._data
|
| 419 |
+
|
| 420 |
+
arr[0] = arr[1]
|
| 421 |
+
expected = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 422 |
+
expected[0] = expected[1]
|
| 423 |
+
|
| 424 |
+
tm.assert_numpy_array_equal(arr.asi8, expected)
|
| 425 |
+
|
| 426 |
+
arr[:2] = arr[-2:]
|
| 427 |
+
expected[:2] = expected[-2:]
|
| 428 |
+
tm.assert_numpy_array_equal(arr.asi8, expected)
|
| 429 |
+
|
| 430 |
+
@pytest.mark.parametrize(
|
| 431 |
+
"box",
|
| 432 |
+
[
|
| 433 |
+
pd.Index,
|
| 434 |
+
pd.Series,
|
| 435 |
+
np.array,
|
| 436 |
+
list,
|
| 437 |
+
NumpyExtensionArray,
|
| 438 |
+
],
|
| 439 |
+
)
|
| 440 |
+
def test_setitem_object_dtype(self, box, arr1d):
|
| 441 |
+
expected = arr1d.copy()[::-1]
|
| 442 |
+
if expected.dtype.kind in ["m", "M"]:
|
| 443 |
+
expected = expected._with_freq(None)
|
| 444 |
+
|
| 445 |
+
vals = expected
|
| 446 |
+
if box is list:
|
| 447 |
+
vals = list(vals)
|
| 448 |
+
elif box is np.array:
|
| 449 |
+
# if we do np.array(x).astype(object) then dt64 and td64 cast to ints
|
| 450 |
+
vals = np.array(vals.astype(object))
|
| 451 |
+
elif box is NumpyExtensionArray:
|
| 452 |
+
vals = box(np.asarray(vals, dtype=object))
|
| 453 |
+
else:
|
| 454 |
+
vals = box(vals).astype(object)
|
| 455 |
+
|
| 456 |
+
arr1d[:] = vals
|
| 457 |
+
|
| 458 |
+
tm.assert_equal(arr1d, expected)
|
| 459 |
+
|
| 460 |
+
def test_setitem_strs(self, arr1d):
|
| 461 |
+
# Check that we parse strs in both scalar and listlike
|
| 462 |
+
|
| 463 |
+
# Setting list-like of strs
|
| 464 |
+
expected = arr1d.copy()
|
| 465 |
+
expected[[0, 1]] = arr1d[-2:]
|
| 466 |
+
|
| 467 |
+
result = arr1d.copy()
|
| 468 |
+
result[:2] = [str(x) for x in arr1d[-2:]]
|
| 469 |
+
tm.assert_equal(result, expected)
|
| 470 |
+
|
| 471 |
+
# Same thing but now for just a scalar str
|
| 472 |
+
expected = arr1d.copy()
|
| 473 |
+
expected[0] = arr1d[-1]
|
| 474 |
+
|
| 475 |
+
result = arr1d.copy()
|
| 476 |
+
result[0] = str(arr1d[-1])
|
| 477 |
+
tm.assert_equal(result, expected)
|
| 478 |
+
|
| 479 |
+
@pytest.mark.parametrize("as_index", [True, False])
|
| 480 |
+
def test_setitem_categorical(self, arr1d, as_index):
|
| 481 |
+
expected = arr1d.copy()[::-1]
|
| 482 |
+
if not isinstance(expected, PeriodArray):
|
| 483 |
+
expected = expected._with_freq(None)
|
| 484 |
+
|
| 485 |
+
cat = pd.Categorical(arr1d)
|
| 486 |
+
if as_index:
|
| 487 |
+
cat = pd.CategoricalIndex(cat)
|
| 488 |
+
|
| 489 |
+
arr1d[:] = cat[::-1]
|
| 490 |
+
|
| 491 |
+
tm.assert_equal(arr1d, expected)
|
| 492 |
+
|
| 493 |
+
def test_setitem_raises(self, arr1d):
|
| 494 |
+
arr = arr1d[:10]
|
| 495 |
+
val = arr[0]
|
| 496 |
+
|
| 497 |
+
with pytest.raises(IndexError, match="index 12 is out of bounds"):
|
| 498 |
+
arr[12] = val
|
| 499 |
+
|
| 500 |
+
with pytest.raises(TypeError, match="value should be a.* 'object'"):
|
| 501 |
+
arr[0] = object()
|
| 502 |
+
|
| 503 |
+
msg = "cannot set using a list-like indexer with a different length"
|
| 504 |
+
with pytest.raises(ValueError, match=msg):
|
| 505 |
+
# GH#36339
|
| 506 |
+
arr[[]] = [arr[1]]
|
| 507 |
+
|
| 508 |
+
msg = "cannot set using a slice indexer with a different length than"
|
| 509 |
+
with pytest.raises(ValueError, match=msg):
|
| 510 |
+
# GH#36339
|
| 511 |
+
arr[1:1] = arr[:3]
|
| 512 |
+
|
| 513 |
+
@pytest.mark.parametrize("box", [list, np.array, pd.Index, pd.Series])
|
| 514 |
+
def test_setitem_numeric_raises(self, arr1d, box):
|
| 515 |
+
# We dont case e.g. int64 to our own dtype for setitem
|
| 516 |
+
|
| 517 |
+
msg = (
|
| 518 |
+
f"value should be a '{arr1d._scalar_type.__name__}', "
|
| 519 |
+
"'NaT', or array of those. Got"
|
| 520 |
+
)
|
| 521 |
+
with pytest.raises(TypeError, match=msg):
|
| 522 |
+
arr1d[:2] = box([0, 1])
|
| 523 |
+
|
| 524 |
+
with pytest.raises(TypeError, match=msg):
|
| 525 |
+
arr1d[:2] = box([0.0, 1.0])
|
| 526 |
+
|
| 527 |
+
def test_inplace_arithmetic(self):
|
| 528 |
+
# GH#24115 check that iadd and isub are actually in-place
|
| 529 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 530 |
+
if self.array_cls is PeriodArray:
|
| 531 |
+
arr = self.array_cls(data, dtype="period[D]")
|
| 532 |
+
else:
|
| 533 |
+
arr = self.index_cls(data, freq="D")._data
|
| 534 |
+
|
| 535 |
+
expected = arr + pd.Timedelta(days=1)
|
| 536 |
+
arr += pd.Timedelta(days=1)
|
| 537 |
+
tm.assert_equal(arr, expected)
|
| 538 |
+
|
| 539 |
+
expected = arr - pd.Timedelta(days=1)
|
| 540 |
+
arr -= pd.Timedelta(days=1)
|
| 541 |
+
tm.assert_equal(arr, expected)
|
| 542 |
+
|
| 543 |
+
def test_shift_fill_int_deprecated(self, arr1d):
|
| 544 |
+
# GH#31971, enforced in 2.0
|
| 545 |
+
with pytest.raises(TypeError, match="value should be a"):
|
| 546 |
+
arr1d.shift(1, fill_value=1)
|
| 547 |
+
|
| 548 |
+
def test_median(self, arr1d):
|
| 549 |
+
arr = arr1d
|
| 550 |
+
if len(arr) % 2 == 0:
|
| 551 |
+
# make it easier to define `expected`
|
| 552 |
+
arr = arr[:-1]
|
| 553 |
+
|
| 554 |
+
expected = arr[len(arr) // 2]
|
| 555 |
+
|
| 556 |
+
result = arr.median()
|
| 557 |
+
assert type(result) is type(expected)
|
| 558 |
+
assert result == expected
|
| 559 |
+
|
| 560 |
+
arr[len(arr) // 2] = NaT
|
| 561 |
+
if not isinstance(expected, Period):
|
| 562 |
+
expected = arr[len(arr) // 2 - 1 : len(arr) // 2 + 2].mean()
|
| 563 |
+
|
| 564 |
+
assert arr.median(skipna=False) is NaT
|
| 565 |
+
|
| 566 |
+
result = arr.median()
|
| 567 |
+
assert type(result) is type(expected)
|
| 568 |
+
assert result == expected
|
| 569 |
+
|
| 570 |
+
assert arr[:0].median() is NaT
|
| 571 |
+
assert arr[:0].median(skipna=False) is NaT
|
| 572 |
+
|
| 573 |
+
# 2d Case
|
| 574 |
+
arr2 = arr.reshape(-1, 1)
|
| 575 |
+
|
| 576 |
+
result = arr2.median(axis=None)
|
| 577 |
+
assert type(result) is type(expected)
|
| 578 |
+
assert result == expected
|
| 579 |
+
|
| 580 |
+
assert arr2.median(axis=None, skipna=False) is NaT
|
| 581 |
+
|
| 582 |
+
result = arr2.median(axis=0)
|
| 583 |
+
expected2 = type(arr)._from_sequence([expected], dtype=arr.dtype)
|
| 584 |
+
tm.assert_equal(result, expected2)
|
| 585 |
+
|
| 586 |
+
result = arr2.median(axis=0, skipna=False)
|
| 587 |
+
expected2 = type(arr)._from_sequence([NaT], dtype=arr.dtype)
|
| 588 |
+
tm.assert_equal(result, expected2)
|
| 589 |
+
|
| 590 |
+
result = arr2.median(axis=1)
|
| 591 |
+
tm.assert_equal(result, arr)
|
| 592 |
+
|
| 593 |
+
result = arr2.median(axis=1, skipna=False)
|
| 594 |
+
tm.assert_equal(result, arr)
|
| 595 |
+
|
| 596 |
+
def test_from_integer_array(self):
|
| 597 |
+
arr = np.array([1, 2, 3], dtype=np.int64)
|
| 598 |
+
data = pd.array(arr, dtype="Int64")
|
| 599 |
+
if self.array_cls is PeriodArray:
|
| 600 |
+
expected = self.array_cls(arr, dtype=self.example_dtype)
|
| 601 |
+
result = self.array_cls(data, dtype=self.example_dtype)
|
| 602 |
+
else:
|
| 603 |
+
expected = self.array_cls._from_sequence(arr, dtype=self.example_dtype)
|
| 604 |
+
result = self.array_cls._from_sequence(data, dtype=self.example_dtype)
|
| 605 |
+
|
| 606 |
+
tm.assert_extension_array_equal(result, expected)
|
| 607 |
+
|
| 608 |
+
|
| 609 |
+
class TestDatetimeArray(SharedTests):
|
| 610 |
+
index_cls = DatetimeIndex
|
| 611 |
+
array_cls = DatetimeArray
|
| 612 |
+
scalar_type = Timestamp
|
| 613 |
+
example_dtype = "M8[ns]"
|
| 614 |
+
|
| 615 |
+
@pytest.fixture
|
| 616 |
+
def arr1d(self, tz_naive_fixture, freqstr):
|
| 617 |
+
"""
|
| 618 |
+
Fixture returning DatetimeArray with parametrized frequency and
|
| 619 |
+
timezones
|
| 620 |
+
"""
|
| 621 |
+
tz = tz_naive_fixture
|
| 622 |
+
dti = pd.date_range("2016-01-01 01:01:00", periods=5, freq=freqstr, tz=tz)
|
| 623 |
+
dta = dti._data
|
| 624 |
+
return dta
|
| 625 |
+
|
| 626 |
+
def test_round(self, arr1d):
|
| 627 |
+
# GH#24064
|
| 628 |
+
dti = self.index_cls(arr1d)
|
| 629 |
+
|
| 630 |
+
result = dti.round(freq="2min")
|
| 631 |
+
expected = dti - pd.Timedelta(minutes=1)
|
| 632 |
+
expected = expected._with_freq(None)
|
| 633 |
+
tm.assert_index_equal(result, expected)
|
| 634 |
+
|
| 635 |
+
dta = dti._data
|
| 636 |
+
result = dta.round(freq="2min")
|
| 637 |
+
expected = expected._data._with_freq(None)
|
| 638 |
+
tm.assert_datetime_array_equal(result, expected)
|
| 639 |
+
|
| 640 |
+
def test_array_interface(self, datetime_index):
|
| 641 |
+
arr = datetime_index._data
|
| 642 |
+
copy_false = None if np_version_gt2 else False
|
| 643 |
+
|
| 644 |
+
# default asarray gives the same underlying data (for tz naive)
|
| 645 |
+
result = np.asarray(arr)
|
| 646 |
+
expected = arr._ndarray
|
| 647 |
+
assert result is expected
|
| 648 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 649 |
+
result = np.array(arr, copy=copy_false)
|
| 650 |
+
assert result is expected
|
| 651 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 652 |
+
|
| 653 |
+
# specifying M8[ns] gives the same result as default
|
| 654 |
+
result = np.asarray(arr, dtype="datetime64[ns]")
|
| 655 |
+
expected = arr._ndarray
|
| 656 |
+
assert result is expected
|
| 657 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 658 |
+
result = np.array(arr, dtype="datetime64[ns]", copy=copy_false)
|
| 659 |
+
assert result is expected
|
| 660 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 661 |
+
result = np.array(arr, dtype="datetime64[ns]")
|
| 662 |
+
if not np_version_gt2:
|
| 663 |
+
# TODO: GH 57739
|
| 664 |
+
assert result is not expected
|
| 665 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 666 |
+
|
| 667 |
+
# to object dtype
|
| 668 |
+
result = np.asarray(arr, dtype=object)
|
| 669 |
+
expected = np.array(list(arr), dtype=object)
|
| 670 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 671 |
+
|
| 672 |
+
# to other dtype always copies
|
| 673 |
+
result = np.asarray(arr, dtype="int64")
|
| 674 |
+
assert result is not arr.asi8
|
| 675 |
+
assert not np.may_share_memory(arr, result)
|
| 676 |
+
expected = arr.asi8.copy()
|
| 677 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 678 |
+
|
| 679 |
+
# other dtypes handled by numpy
|
| 680 |
+
for dtype in ["float64", str]:
|
| 681 |
+
result = np.asarray(arr, dtype=dtype)
|
| 682 |
+
expected = np.asarray(arr).astype(dtype)
|
| 683 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 684 |
+
|
| 685 |
+
def test_array_object_dtype(self, arr1d):
|
| 686 |
+
# GH#23524
|
| 687 |
+
arr = arr1d
|
| 688 |
+
dti = self.index_cls(arr1d)
|
| 689 |
+
|
| 690 |
+
expected = np.array(list(dti))
|
| 691 |
+
|
| 692 |
+
result = np.array(arr, dtype=object)
|
| 693 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 694 |
+
|
| 695 |
+
# also test the DatetimeIndex method while we're at it
|
| 696 |
+
result = np.array(dti, dtype=object)
|
| 697 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 698 |
+
|
| 699 |
+
def test_array_tz(self, arr1d):
|
| 700 |
+
# GH#23524
|
| 701 |
+
arr = arr1d
|
| 702 |
+
dti = self.index_cls(arr1d)
|
| 703 |
+
copy_false = None if np_version_gt2 else False
|
| 704 |
+
|
| 705 |
+
expected = dti.asi8.view("M8[ns]")
|
| 706 |
+
result = np.array(arr, dtype="M8[ns]")
|
| 707 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 708 |
+
|
| 709 |
+
result = np.array(arr, dtype="datetime64[ns]")
|
| 710 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 711 |
+
|
| 712 |
+
# check that we are not making copies when setting copy=copy_false
|
| 713 |
+
result = np.array(arr, dtype="M8[ns]", copy=copy_false)
|
| 714 |
+
assert result.base is expected.base
|
| 715 |
+
assert result.base is not None
|
| 716 |
+
result = np.array(arr, dtype="datetime64[ns]", copy=copy_false)
|
| 717 |
+
assert result.base is expected.base
|
| 718 |
+
assert result.base is not None
|
| 719 |
+
|
| 720 |
+
def test_array_i8_dtype(self, arr1d):
|
| 721 |
+
arr = arr1d
|
| 722 |
+
dti = self.index_cls(arr1d)
|
| 723 |
+
copy_false = None if np_version_gt2 else False
|
| 724 |
+
|
| 725 |
+
expected = dti.asi8
|
| 726 |
+
result = np.array(arr, dtype="i8")
|
| 727 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 728 |
+
|
| 729 |
+
result = np.array(arr, dtype=np.int64)
|
| 730 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 731 |
+
|
| 732 |
+
# check that we are still making copies when setting copy=copy_false
|
| 733 |
+
result = np.array(arr, dtype="i8", copy=copy_false)
|
| 734 |
+
assert result.base is not expected.base
|
| 735 |
+
assert result.base is None
|
| 736 |
+
|
| 737 |
+
def test_from_array_keeps_base(self):
|
| 738 |
+
# Ensure that DatetimeArray._ndarray.base isn't lost.
|
| 739 |
+
arr = np.array(["2000-01-01", "2000-01-02"], dtype="M8[ns]")
|
| 740 |
+
dta = DatetimeArray._from_sequence(arr)
|
| 741 |
+
|
| 742 |
+
assert dta._ndarray is arr
|
| 743 |
+
dta = DatetimeArray._from_sequence(arr[:0])
|
| 744 |
+
assert dta._ndarray.base is arr
|
| 745 |
+
|
| 746 |
+
def test_from_dti(self, arr1d):
|
| 747 |
+
arr = arr1d
|
| 748 |
+
dti = self.index_cls(arr1d)
|
| 749 |
+
assert list(dti) == list(arr)
|
| 750 |
+
|
| 751 |
+
# Check that Index.__new__ knows what to do with DatetimeArray
|
| 752 |
+
dti2 = pd.Index(arr)
|
| 753 |
+
assert isinstance(dti2, DatetimeIndex)
|
| 754 |
+
assert list(dti2) == list(arr)
|
| 755 |
+
|
| 756 |
+
def test_astype_object(self, arr1d):
|
| 757 |
+
arr = arr1d
|
| 758 |
+
dti = self.index_cls(arr1d)
|
| 759 |
+
|
| 760 |
+
asobj = arr.astype("O")
|
| 761 |
+
assert isinstance(asobj, np.ndarray)
|
| 762 |
+
assert asobj.dtype == "O"
|
| 763 |
+
assert list(asobj) == list(dti)
|
| 764 |
+
|
| 765 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 766 |
+
def test_to_period(self, datetime_index, freqstr):
|
| 767 |
+
dti = datetime_index
|
| 768 |
+
arr = dti._data
|
| 769 |
+
|
| 770 |
+
freqstr = freq_to_period_freqstr(1, freqstr)
|
| 771 |
+
expected = dti.to_period(freq=freqstr)
|
| 772 |
+
result = arr.to_period(freq=freqstr)
|
| 773 |
+
assert isinstance(result, PeriodArray)
|
| 774 |
+
|
| 775 |
+
tm.assert_equal(result, expected._data)
|
| 776 |
+
|
| 777 |
+
def test_to_period_2d(self, arr1d):
|
| 778 |
+
arr2d = arr1d.reshape(1, -1)
|
| 779 |
+
|
| 780 |
+
warn = None if arr1d.tz is None else UserWarning
|
| 781 |
+
with tm.assert_produces_warning(warn):
|
| 782 |
+
result = arr2d.to_period("D")
|
| 783 |
+
expected = arr1d.to_period("D").reshape(1, -1)
|
| 784 |
+
tm.assert_period_array_equal(result, expected)
|
| 785 |
+
|
| 786 |
+
@pytest.mark.parametrize("propname", DatetimeArray._bool_ops)
|
| 787 |
+
def test_bool_properties(self, arr1d, propname):
|
| 788 |
+
# in this case _bool_ops is just `is_leap_year`
|
| 789 |
+
dti = self.index_cls(arr1d)
|
| 790 |
+
arr = arr1d
|
| 791 |
+
assert dti.freq == arr.freq
|
| 792 |
+
|
| 793 |
+
result = getattr(arr, propname)
|
| 794 |
+
expected = np.array(getattr(dti, propname), dtype=result.dtype)
|
| 795 |
+
|
| 796 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 797 |
+
|
| 798 |
+
@pytest.mark.parametrize("propname", DatetimeArray._field_ops)
|
| 799 |
+
def test_int_properties(self, arr1d, propname):
|
| 800 |
+
dti = self.index_cls(arr1d)
|
| 801 |
+
arr = arr1d
|
| 802 |
+
|
| 803 |
+
result = getattr(arr, propname)
|
| 804 |
+
expected = np.array(getattr(dti, propname), dtype=result.dtype)
|
| 805 |
+
|
| 806 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 807 |
+
|
| 808 |
+
def test_take_fill_valid(self, arr1d, fixed_now_ts):
|
| 809 |
+
arr = arr1d
|
| 810 |
+
dti = self.index_cls(arr1d)
|
| 811 |
+
|
| 812 |
+
now = fixed_now_ts.tz_localize(dti.tz)
|
| 813 |
+
result = arr.take([-1, 1], allow_fill=True, fill_value=now)
|
| 814 |
+
assert result[0] == now
|
| 815 |
+
|
| 816 |
+
msg = f"value should be a '{arr1d._scalar_type.__name__}' or 'NaT'. Got"
|
| 817 |
+
with pytest.raises(TypeError, match=msg):
|
| 818 |
+
# fill_value Timedelta invalid
|
| 819 |
+
arr.take([-1, 1], allow_fill=True, fill_value=now - now)
|
| 820 |
+
|
| 821 |
+
with pytest.raises(TypeError, match=msg):
|
| 822 |
+
# fill_value Period invalid
|
| 823 |
+
arr.take([-1, 1], allow_fill=True, fill_value=Period("2014Q1"))
|
| 824 |
+
|
| 825 |
+
tz = None if dti.tz is not None else "US/Eastern"
|
| 826 |
+
now = fixed_now_ts.tz_localize(tz)
|
| 827 |
+
msg = "Cannot compare tz-naive and tz-aware datetime-like objects"
|
| 828 |
+
with pytest.raises(TypeError, match=msg):
|
| 829 |
+
# Timestamp with mismatched tz-awareness
|
| 830 |
+
arr.take([-1, 1], allow_fill=True, fill_value=now)
|
| 831 |
+
|
| 832 |
+
value = NaT._value
|
| 833 |
+
msg = f"value should be a '{arr1d._scalar_type.__name__}' or 'NaT'. Got"
|
| 834 |
+
with pytest.raises(TypeError, match=msg):
|
| 835 |
+
# require NaT, not iNaT, as it could be confused with an integer
|
| 836 |
+
arr.take([-1, 1], allow_fill=True, fill_value=value)
|
| 837 |
+
|
| 838 |
+
value = np.timedelta64("NaT", "ns")
|
| 839 |
+
with pytest.raises(TypeError, match=msg):
|
| 840 |
+
# require appropriate-dtype if we have a NA value
|
| 841 |
+
arr.take([-1, 1], allow_fill=True, fill_value=value)
|
| 842 |
+
|
| 843 |
+
if arr.tz is not None:
|
| 844 |
+
# GH#37356
|
| 845 |
+
# Assuming here that arr1d fixture does not include Australia/Melbourne
|
| 846 |
+
value = fixed_now_ts.tz_localize("Australia/Melbourne")
|
| 847 |
+
result = arr.take([-1, 1], allow_fill=True, fill_value=value)
|
| 848 |
+
|
| 849 |
+
expected = arr.take(
|
| 850 |
+
[-1, 1],
|
| 851 |
+
allow_fill=True,
|
| 852 |
+
fill_value=value.tz_convert(arr.dtype.tz),
|
| 853 |
+
)
|
| 854 |
+
tm.assert_equal(result, expected)
|
| 855 |
+
|
| 856 |
+
def test_concat_same_type_invalid(self, arr1d):
|
| 857 |
+
# different timezones
|
| 858 |
+
arr = arr1d
|
| 859 |
+
|
| 860 |
+
if arr.tz is None:
|
| 861 |
+
other = arr.tz_localize("UTC")
|
| 862 |
+
else:
|
| 863 |
+
other = arr.tz_localize(None)
|
| 864 |
+
|
| 865 |
+
with pytest.raises(ValueError, match="to_concat must have the same"):
|
| 866 |
+
arr._concat_same_type([arr, other])
|
| 867 |
+
|
| 868 |
+
def test_concat_same_type_different_freq(self, unit):
|
| 869 |
+
# we *can* concatenate DTI with different freqs.
|
| 870 |
+
a = pd.date_range("2000", periods=2, freq="D", tz="US/Central", unit=unit)._data
|
| 871 |
+
b = pd.date_range("2000", periods=2, freq="h", tz="US/Central", unit=unit)._data
|
| 872 |
+
result = DatetimeArray._concat_same_type([a, b])
|
| 873 |
+
expected = (
|
| 874 |
+
pd.to_datetime(
|
| 875 |
+
[
|
| 876 |
+
"2000-01-01 00:00:00",
|
| 877 |
+
"2000-01-02 00:00:00",
|
| 878 |
+
"2000-01-01 00:00:00",
|
| 879 |
+
"2000-01-01 01:00:00",
|
| 880 |
+
]
|
| 881 |
+
)
|
| 882 |
+
.tz_localize("US/Central")
|
| 883 |
+
.as_unit(unit)
|
| 884 |
+
._data
|
| 885 |
+
)
|
| 886 |
+
|
| 887 |
+
tm.assert_datetime_array_equal(result, expected)
|
| 888 |
+
|
| 889 |
+
def test_strftime(self, arr1d):
|
| 890 |
+
arr = arr1d
|
| 891 |
+
|
| 892 |
+
result = arr.strftime("%Y %b")
|
| 893 |
+
expected = np.array([ts.strftime("%Y %b") for ts in arr], dtype=object)
|
| 894 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 895 |
+
|
| 896 |
+
def test_strftime_nat(self):
|
| 897 |
+
# GH 29578
|
| 898 |
+
arr = DatetimeIndex(["2019-01-01", NaT])._data
|
| 899 |
+
|
| 900 |
+
result = arr.strftime("%Y-%m-%d")
|
| 901 |
+
expected = np.array(["2019-01-01", np.nan], dtype=object)
|
| 902 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 903 |
+
|
| 904 |
+
|
| 905 |
+
class TestTimedeltaArray(SharedTests):
|
| 906 |
+
index_cls = TimedeltaIndex
|
| 907 |
+
array_cls = TimedeltaArray
|
| 908 |
+
scalar_type = pd.Timedelta
|
| 909 |
+
example_dtype = "m8[ns]"
|
| 910 |
+
|
| 911 |
+
def test_from_tdi(self):
|
| 912 |
+
tdi = TimedeltaIndex(["1 Day", "3 Hours"])
|
| 913 |
+
arr = tdi._data
|
| 914 |
+
assert list(arr) == list(tdi)
|
| 915 |
+
|
| 916 |
+
# Check that Index.__new__ knows what to do with TimedeltaArray
|
| 917 |
+
tdi2 = pd.Index(arr)
|
| 918 |
+
assert isinstance(tdi2, TimedeltaIndex)
|
| 919 |
+
assert list(tdi2) == list(arr)
|
| 920 |
+
|
| 921 |
+
def test_astype_object(self):
|
| 922 |
+
tdi = TimedeltaIndex(["1 Day", "3 Hours"])
|
| 923 |
+
arr = tdi._data
|
| 924 |
+
asobj = arr.astype("O")
|
| 925 |
+
assert isinstance(asobj, np.ndarray)
|
| 926 |
+
assert asobj.dtype == "O"
|
| 927 |
+
assert list(asobj) == list(tdi)
|
| 928 |
+
|
| 929 |
+
def test_to_pytimedelta(self, timedelta_index):
|
| 930 |
+
tdi = timedelta_index
|
| 931 |
+
arr = tdi._data
|
| 932 |
+
|
| 933 |
+
expected = tdi.to_pytimedelta()
|
| 934 |
+
result = arr.to_pytimedelta()
|
| 935 |
+
|
| 936 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 937 |
+
|
| 938 |
+
def test_total_seconds(self, timedelta_index):
|
| 939 |
+
tdi = timedelta_index
|
| 940 |
+
arr = tdi._data
|
| 941 |
+
|
| 942 |
+
expected = tdi.total_seconds()
|
| 943 |
+
result = arr.total_seconds()
|
| 944 |
+
|
| 945 |
+
tm.assert_numpy_array_equal(result, expected.values)
|
| 946 |
+
|
| 947 |
+
@pytest.mark.parametrize("propname", TimedeltaArray._field_ops)
|
| 948 |
+
def test_int_properties(self, timedelta_index, propname):
|
| 949 |
+
tdi = timedelta_index
|
| 950 |
+
arr = tdi._data
|
| 951 |
+
|
| 952 |
+
result = getattr(arr, propname)
|
| 953 |
+
expected = np.array(getattr(tdi, propname), dtype=result.dtype)
|
| 954 |
+
|
| 955 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 956 |
+
|
| 957 |
+
def test_array_interface(self, timedelta_index):
|
| 958 |
+
arr = timedelta_index._data
|
| 959 |
+
copy_false = None if np_version_gt2 else False
|
| 960 |
+
|
| 961 |
+
# default asarray gives the same underlying data
|
| 962 |
+
result = np.asarray(arr)
|
| 963 |
+
expected = arr._ndarray
|
| 964 |
+
assert result is expected
|
| 965 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 966 |
+
result = np.array(arr, copy=copy_false)
|
| 967 |
+
assert result is expected
|
| 968 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 969 |
+
|
| 970 |
+
# specifying m8[ns] gives the same result as default
|
| 971 |
+
result = np.asarray(arr, dtype="timedelta64[ns]")
|
| 972 |
+
expected = arr._ndarray
|
| 973 |
+
assert result is expected
|
| 974 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 975 |
+
result = np.array(arr, dtype="timedelta64[ns]", copy=copy_false)
|
| 976 |
+
assert result is expected
|
| 977 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 978 |
+
result = np.array(arr, dtype="timedelta64[ns]")
|
| 979 |
+
if not np_version_gt2:
|
| 980 |
+
# TODO: GH 57739
|
| 981 |
+
assert result is not expected
|
| 982 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 983 |
+
|
| 984 |
+
# to object dtype
|
| 985 |
+
result = np.asarray(arr, dtype=object)
|
| 986 |
+
expected = np.array(list(arr), dtype=object)
|
| 987 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 988 |
+
|
| 989 |
+
# to other dtype always copies
|
| 990 |
+
result = np.asarray(arr, dtype="int64")
|
| 991 |
+
assert result is not arr.asi8
|
| 992 |
+
assert not np.may_share_memory(arr, result)
|
| 993 |
+
expected = arr.asi8.copy()
|
| 994 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 995 |
+
|
| 996 |
+
# other dtypes handled by numpy
|
| 997 |
+
for dtype in ["float64", str]:
|
| 998 |
+
result = np.asarray(arr, dtype=dtype)
|
| 999 |
+
expected = np.asarray(arr).astype(dtype)
|
| 1000 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1001 |
+
|
| 1002 |
+
def test_take_fill_valid(self, timedelta_index, fixed_now_ts):
|
| 1003 |
+
tdi = timedelta_index
|
| 1004 |
+
arr = tdi._data
|
| 1005 |
+
|
| 1006 |
+
td1 = pd.Timedelta(days=1)
|
| 1007 |
+
result = arr.take([-1, 1], allow_fill=True, fill_value=td1)
|
| 1008 |
+
assert result[0] == td1
|
| 1009 |
+
|
| 1010 |
+
value = fixed_now_ts
|
| 1011 |
+
msg = f"value should be a '{arr._scalar_type.__name__}' or 'NaT'. Got"
|
| 1012 |
+
with pytest.raises(TypeError, match=msg):
|
| 1013 |
+
# fill_value Timestamp invalid
|
| 1014 |
+
arr.take([0, 1], allow_fill=True, fill_value=value)
|
| 1015 |
+
|
| 1016 |
+
value = fixed_now_ts.to_period("D")
|
| 1017 |
+
with pytest.raises(TypeError, match=msg):
|
| 1018 |
+
# fill_value Period invalid
|
| 1019 |
+
arr.take([0, 1], allow_fill=True, fill_value=value)
|
| 1020 |
+
|
| 1021 |
+
value = np.datetime64("NaT", "ns")
|
| 1022 |
+
with pytest.raises(TypeError, match=msg):
|
| 1023 |
+
# require appropriate-dtype if we have a NA value
|
| 1024 |
+
arr.take([-1, 1], allow_fill=True, fill_value=value)
|
| 1025 |
+
|
| 1026 |
+
|
| 1027 |
+
@pytest.mark.filterwarnings(r"ignore:Period with BDay freq is deprecated:FutureWarning")
|
| 1028 |
+
@pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
|
| 1029 |
+
class TestPeriodArray(SharedTests):
|
| 1030 |
+
index_cls = PeriodIndex
|
| 1031 |
+
array_cls = PeriodArray
|
| 1032 |
+
scalar_type = Period
|
| 1033 |
+
example_dtype = PeriodIndex([], freq="W").dtype
|
| 1034 |
+
|
| 1035 |
+
@pytest.fixture
|
| 1036 |
+
def arr1d(self, period_index):
|
| 1037 |
+
"""
|
| 1038 |
+
Fixture returning DatetimeArray from parametrized PeriodIndex objects
|
| 1039 |
+
"""
|
| 1040 |
+
return period_index._data
|
| 1041 |
+
|
| 1042 |
+
def test_from_pi(self, arr1d):
|
| 1043 |
+
pi = self.index_cls(arr1d)
|
| 1044 |
+
arr = arr1d
|
| 1045 |
+
assert list(arr) == list(pi)
|
| 1046 |
+
|
| 1047 |
+
# Check that Index.__new__ knows what to do with PeriodArray
|
| 1048 |
+
pi2 = pd.Index(arr)
|
| 1049 |
+
assert isinstance(pi2, PeriodIndex)
|
| 1050 |
+
assert list(pi2) == list(arr)
|
| 1051 |
+
|
| 1052 |
+
def test_astype_object(self, arr1d):
|
| 1053 |
+
pi = self.index_cls(arr1d)
|
| 1054 |
+
arr = arr1d
|
| 1055 |
+
asobj = arr.astype("O")
|
| 1056 |
+
assert isinstance(asobj, np.ndarray)
|
| 1057 |
+
assert asobj.dtype == "O"
|
| 1058 |
+
assert list(asobj) == list(pi)
|
| 1059 |
+
|
| 1060 |
+
def test_take_fill_valid(self, arr1d):
|
| 1061 |
+
arr = arr1d
|
| 1062 |
+
|
| 1063 |
+
value = NaT._value
|
| 1064 |
+
msg = f"value should be a '{arr1d._scalar_type.__name__}' or 'NaT'. Got"
|
| 1065 |
+
with pytest.raises(TypeError, match=msg):
|
| 1066 |
+
# require NaT, not iNaT, as it could be confused with an integer
|
| 1067 |
+
arr.take([-1, 1], allow_fill=True, fill_value=value)
|
| 1068 |
+
|
| 1069 |
+
value = np.timedelta64("NaT", "ns")
|
| 1070 |
+
with pytest.raises(TypeError, match=msg):
|
| 1071 |
+
# require appropriate-dtype if we have a NA value
|
| 1072 |
+
arr.take([-1, 1], allow_fill=True, fill_value=value)
|
| 1073 |
+
|
| 1074 |
+
@pytest.mark.parametrize("how", ["S", "E"])
|
| 1075 |
+
def test_to_timestamp(self, how, arr1d):
|
| 1076 |
+
pi = self.index_cls(arr1d)
|
| 1077 |
+
arr = arr1d
|
| 1078 |
+
|
| 1079 |
+
expected = DatetimeIndex(pi.to_timestamp(how=how))._data
|
| 1080 |
+
result = arr.to_timestamp(how=how)
|
| 1081 |
+
assert isinstance(result, DatetimeArray)
|
| 1082 |
+
|
| 1083 |
+
tm.assert_equal(result, expected)
|
| 1084 |
+
|
| 1085 |
+
def test_to_timestamp_roundtrip_bday(self):
|
| 1086 |
+
# Case where infer_freq inside would choose "D" instead of "B"
|
| 1087 |
+
dta = pd.date_range("2021-10-18", periods=3, freq="B")._data
|
| 1088 |
+
parr = dta.to_period()
|
| 1089 |
+
result = parr.to_timestamp()
|
| 1090 |
+
assert result.freq == "B"
|
| 1091 |
+
tm.assert_extension_array_equal(result, dta)
|
| 1092 |
+
|
| 1093 |
+
dta2 = dta[::2]
|
| 1094 |
+
parr2 = dta2.to_period()
|
| 1095 |
+
result2 = parr2.to_timestamp()
|
| 1096 |
+
assert result2.freq == "2B"
|
| 1097 |
+
tm.assert_extension_array_equal(result2, dta2)
|
| 1098 |
+
|
| 1099 |
+
parr3 = dta.to_period("2B")
|
| 1100 |
+
result3 = parr3.to_timestamp()
|
| 1101 |
+
assert result3.freq == "B"
|
| 1102 |
+
tm.assert_extension_array_equal(result3, dta)
|
| 1103 |
+
|
| 1104 |
+
def test_to_timestamp_out_of_bounds(self):
|
| 1105 |
+
# GH#19643 previously overflowed silently
|
| 1106 |
+
pi = pd.period_range("1500", freq="Y", periods=3)
|
| 1107 |
+
msg = "Out of bounds nanosecond timestamp: 1500-01-01 00:00:00"
|
| 1108 |
+
with pytest.raises(OutOfBoundsDatetime, match=msg):
|
| 1109 |
+
pi.to_timestamp()
|
| 1110 |
+
|
| 1111 |
+
with pytest.raises(OutOfBoundsDatetime, match=msg):
|
| 1112 |
+
pi._data.to_timestamp()
|
| 1113 |
+
|
| 1114 |
+
@pytest.mark.parametrize("propname", PeriodArray._bool_ops)
|
| 1115 |
+
def test_bool_properties(self, arr1d, propname):
|
| 1116 |
+
# in this case _bool_ops is just `is_leap_year`
|
| 1117 |
+
pi = self.index_cls(arr1d)
|
| 1118 |
+
arr = arr1d
|
| 1119 |
+
|
| 1120 |
+
result = getattr(arr, propname)
|
| 1121 |
+
expected = np.array(getattr(pi, propname))
|
| 1122 |
+
|
| 1123 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1124 |
+
|
| 1125 |
+
@pytest.mark.parametrize("propname", PeriodArray._field_ops)
|
| 1126 |
+
def test_int_properties(self, arr1d, propname):
|
| 1127 |
+
pi = self.index_cls(arr1d)
|
| 1128 |
+
arr = arr1d
|
| 1129 |
+
|
| 1130 |
+
result = getattr(arr, propname)
|
| 1131 |
+
expected = np.array(getattr(pi, propname))
|
| 1132 |
+
|
| 1133 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1134 |
+
|
| 1135 |
+
def test_array_interface(self, arr1d):
|
| 1136 |
+
arr = arr1d
|
| 1137 |
+
|
| 1138 |
+
# default asarray gives objects
|
| 1139 |
+
result = np.asarray(arr)
|
| 1140 |
+
expected = np.array(list(arr), dtype=object)
|
| 1141 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1142 |
+
|
| 1143 |
+
# to object dtype (same as default)
|
| 1144 |
+
result = np.asarray(arr, dtype=object)
|
| 1145 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1146 |
+
|
| 1147 |
+
result = np.asarray(arr, dtype="int64")
|
| 1148 |
+
tm.assert_numpy_array_equal(result, arr.asi8)
|
| 1149 |
+
|
| 1150 |
+
# to other dtypes
|
| 1151 |
+
msg = r"float\(\) argument must be a string or a( real)? number, not 'Period'"
|
| 1152 |
+
with pytest.raises(TypeError, match=msg):
|
| 1153 |
+
np.asarray(arr, dtype="float64")
|
| 1154 |
+
|
| 1155 |
+
result = np.asarray(arr, dtype="S20")
|
| 1156 |
+
expected = np.asarray(arr).astype("S20")
|
| 1157 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1158 |
+
|
| 1159 |
+
def test_strftime(self, arr1d):
|
| 1160 |
+
arr = arr1d
|
| 1161 |
+
|
| 1162 |
+
result = arr.strftime("%Y")
|
| 1163 |
+
expected = np.array([per.strftime("%Y") for per in arr], dtype=object)
|
| 1164 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1165 |
+
|
| 1166 |
+
def test_strftime_nat(self):
|
| 1167 |
+
# GH 29578
|
| 1168 |
+
arr = PeriodArray(PeriodIndex(["2019-01-01", NaT], dtype="period[D]"))
|
| 1169 |
+
|
| 1170 |
+
result = arr.strftime("%Y-%m-%d")
|
| 1171 |
+
expected = np.array(["2019-01-01", np.nan], dtype=object)
|
| 1172 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1173 |
+
|
| 1174 |
+
|
| 1175 |
+
@pytest.mark.parametrize(
|
| 1176 |
+
"arr,casting_nats",
|
| 1177 |
+
[
|
| 1178 |
+
(
|
| 1179 |
+
TimedeltaIndex(["1 Day", "3 Hours", "NaT"])._data,
|
| 1180 |
+
(NaT, np.timedelta64("NaT", "ns")),
|
| 1181 |
+
),
|
| 1182 |
+
(
|
| 1183 |
+
pd.date_range("2000-01-01", periods=3, freq="D")._data,
|
| 1184 |
+
(NaT, np.datetime64("NaT", "ns")),
|
| 1185 |
+
),
|
| 1186 |
+
(pd.period_range("2000-01-01", periods=3, freq="D")._data, (NaT,)),
|
| 1187 |
+
],
|
| 1188 |
+
ids=lambda x: type(x).__name__,
|
| 1189 |
+
)
|
| 1190 |
+
def test_casting_nat_setitem_array(arr, casting_nats):
|
| 1191 |
+
expected = type(arr)._from_sequence([NaT, arr[1], arr[2]], dtype=arr.dtype)
|
| 1192 |
+
|
| 1193 |
+
for nat in casting_nats:
|
| 1194 |
+
arr = arr.copy()
|
| 1195 |
+
arr[0] = nat
|
| 1196 |
+
tm.assert_equal(arr, expected)
|
| 1197 |
+
|
| 1198 |
+
|
| 1199 |
+
@pytest.mark.parametrize(
|
| 1200 |
+
"arr,non_casting_nats",
|
| 1201 |
+
[
|
| 1202 |
+
(
|
| 1203 |
+
TimedeltaIndex(["1 Day", "3 Hours", "NaT"])._data,
|
| 1204 |
+
(np.datetime64("NaT", "ns"), NaT._value),
|
| 1205 |
+
),
|
| 1206 |
+
(
|
| 1207 |
+
pd.date_range("2000-01-01", periods=3, freq="D")._data,
|
| 1208 |
+
(np.timedelta64("NaT", "ns"), NaT._value),
|
| 1209 |
+
),
|
| 1210 |
+
(
|
| 1211 |
+
pd.period_range("2000-01-01", periods=3, freq="D")._data,
|
| 1212 |
+
(np.datetime64("NaT", "ns"), np.timedelta64("NaT", "ns"), NaT._value),
|
| 1213 |
+
),
|
| 1214 |
+
],
|
| 1215 |
+
ids=lambda x: type(x).__name__,
|
| 1216 |
+
)
|
| 1217 |
+
def test_invalid_nat_setitem_array(arr, non_casting_nats):
|
| 1218 |
+
msg = (
|
| 1219 |
+
"value should be a '(Timestamp|Timedelta|Period)', 'NaT', or array of those. "
|
| 1220 |
+
"Got '(timedelta64|datetime64|int)' instead."
|
| 1221 |
+
)
|
| 1222 |
+
|
| 1223 |
+
for nat in non_casting_nats:
|
| 1224 |
+
with pytest.raises(TypeError, match=msg):
|
| 1225 |
+
arr[0] = nat
|
| 1226 |
+
|
| 1227 |
+
|
| 1228 |
+
@pytest.mark.parametrize(
|
| 1229 |
+
"arr",
|
| 1230 |
+
[
|
| 1231 |
+
pd.date_range("2000", periods=4).array,
|
| 1232 |
+
pd.timedelta_range("2000", periods=4).array,
|
| 1233 |
+
],
|
| 1234 |
+
)
|
| 1235 |
+
def test_to_numpy_extra(arr):
|
| 1236 |
+
arr[0] = NaT
|
| 1237 |
+
original = arr.copy()
|
| 1238 |
+
|
| 1239 |
+
result = arr.to_numpy()
|
| 1240 |
+
assert np.isnan(result[0])
|
| 1241 |
+
|
| 1242 |
+
result = arr.to_numpy(dtype="int64")
|
| 1243 |
+
assert result[0] == -9223372036854775808
|
| 1244 |
+
|
| 1245 |
+
result = arr.to_numpy(dtype="int64", na_value=0)
|
| 1246 |
+
assert result[0] == 0
|
| 1247 |
+
|
| 1248 |
+
result = arr.to_numpy(na_value=arr[1].to_numpy())
|
| 1249 |
+
assert result[0] == result[1]
|
| 1250 |
+
|
| 1251 |
+
result = arr.to_numpy(na_value=arr[1].to_numpy(copy=False))
|
| 1252 |
+
assert result[0] == result[1]
|
| 1253 |
+
|
| 1254 |
+
tm.assert_equal(arr, original)
|
| 1255 |
+
|
| 1256 |
+
|
| 1257 |
+
@pytest.mark.parametrize("as_index", [True, False])
|
| 1258 |
+
@pytest.mark.parametrize(
|
| 1259 |
+
"values",
|
| 1260 |
+
[
|
| 1261 |
+
pd.to_datetime(["2020-01-01", "2020-02-01"]),
|
| 1262 |
+
pd.to_timedelta([1, 2], unit="D"),
|
| 1263 |
+
PeriodIndex(["2020-01-01", "2020-02-01"], freq="D"),
|
| 1264 |
+
],
|
| 1265 |
+
)
|
| 1266 |
+
@pytest.mark.parametrize(
|
| 1267 |
+
"klass",
|
| 1268 |
+
[
|
| 1269 |
+
list,
|
| 1270 |
+
np.array,
|
| 1271 |
+
pd.array,
|
| 1272 |
+
pd.Series,
|
| 1273 |
+
pd.Index,
|
| 1274 |
+
pd.Categorical,
|
| 1275 |
+
pd.CategoricalIndex,
|
| 1276 |
+
],
|
| 1277 |
+
)
|
| 1278 |
+
def test_searchsorted_datetimelike_with_listlike(values, klass, as_index):
|
| 1279 |
+
# https://github.com/pandas-dev/pandas/issues/32762
|
| 1280 |
+
if not as_index:
|
| 1281 |
+
values = values._data
|
| 1282 |
+
|
| 1283 |
+
result = values.searchsorted(klass(values))
|
| 1284 |
+
expected = np.array([0, 1], dtype=result.dtype)
|
| 1285 |
+
|
| 1286 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 1287 |
+
|
| 1288 |
+
|
| 1289 |
+
@pytest.mark.parametrize(
|
| 1290 |
+
"values",
|
| 1291 |
+
[
|
| 1292 |
+
pd.to_datetime(["2020-01-01", "2020-02-01"]),
|
| 1293 |
+
pd.to_timedelta([1, 2], unit="D"),
|
| 1294 |
+
PeriodIndex(["2020-01-01", "2020-02-01"], freq="D"),
|
| 1295 |
+
],
|
| 1296 |
+
)
|
| 1297 |
+
@pytest.mark.parametrize(
|
| 1298 |
+
"arg", [[1, 2], ["a", "b"], [Timestamp("2020-01-01", tz="Europe/London")] * 2]
|
| 1299 |
+
)
|
| 1300 |
+
def test_searchsorted_datetimelike_with_listlike_invalid_dtype(values, arg):
|
| 1301 |
+
# https://github.com/pandas-dev/pandas/issues/32762
|
| 1302 |
+
msg = "[Unexpected type|Cannot compare]"
|
| 1303 |
+
with pytest.raises(TypeError, match=msg):
|
| 1304 |
+
values.searchsorted(arg)
|
| 1305 |
+
|
| 1306 |
+
|
| 1307 |
+
@pytest.mark.parametrize("klass", [list, tuple, np.array, pd.Series])
|
| 1308 |
+
def test_period_index_construction_from_strings(klass):
|
| 1309 |
+
# https://github.com/pandas-dev/pandas/issues/26109
|
| 1310 |
+
strings = ["2020Q1", "2020Q2"] * 2
|
| 1311 |
+
data = klass(strings)
|
| 1312 |
+
result = PeriodIndex(data, freq="Q")
|
| 1313 |
+
expected = PeriodIndex([Period(s) for s in strings])
|
| 1314 |
+
tm.assert_index_equal(result, expected)
|
| 1315 |
+
|
| 1316 |
+
|
| 1317 |
+
@pytest.mark.parametrize("dtype", ["M8[ns]", "m8[ns]"])
|
| 1318 |
+
def test_from_pandas_array(dtype):
|
| 1319 |
+
# GH#24615
|
| 1320 |
+
data = np.array([1, 2, 3], dtype=dtype)
|
| 1321 |
+
arr = NumpyExtensionArray(data)
|
| 1322 |
+
|
| 1323 |
+
cls = {"M8[ns]": DatetimeArray, "m8[ns]": TimedeltaArray}[dtype]
|
| 1324 |
+
|
| 1325 |
+
depr_msg = f"{cls.__name__}.__init__ is deprecated"
|
| 1326 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
| 1327 |
+
result = cls(arr)
|
| 1328 |
+
expected = cls(data)
|
| 1329 |
+
tm.assert_extension_array_equal(result, expected)
|
| 1330 |
+
|
| 1331 |
+
result = cls._from_sequence(arr, dtype=dtype)
|
| 1332 |
+
expected = cls._from_sequence(data, dtype=dtype)
|
| 1333 |
+
tm.assert_extension_array_equal(result, expected)
|
| 1334 |
+
|
| 1335 |
+
func = {"M8[ns]": pd.to_datetime, "m8[ns]": pd.to_timedelta}[dtype]
|
| 1336 |
+
result = func(arr).array
|
| 1337 |
+
expected = func(data).array
|
| 1338 |
+
tm.assert_equal(result, expected)
|
| 1339 |
+
|
| 1340 |
+
# Let's check the Indexes while we're here
|
| 1341 |
+
idx_cls = {"M8[ns]": DatetimeIndex, "m8[ns]": TimedeltaIndex}[dtype]
|
| 1342 |
+
result = idx_cls(arr)
|
| 1343 |
+
expected = idx_cls(data)
|
| 1344 |
+
tm.assert_index_equal(result, expected)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_datetimes.py
ADDED
|
@@ -0,0 +1,840 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for DatetimeArray
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from datetime import timedelta
|
| 7 |
+
import operator
|
| 8 |
+
|
| 9 |
+
try:
|
| 10 |
+
from zoneinfo import ZoneInfo
|
| 11 |
+
except ImportError:
|
| 12 |
+
# Cannot assign to a type
|
| 13 |
+
ZoneInfo = None # type: ignore[misc, assignment]
|
| 14 |
+
|
| 15 |
+
import numpy as np
|
| 16 |
+
import pytest
|
| 17 |
+
|
| 18 |
+
from pandas._libs.tslibs import tz_compare
|
| 19 |
+
|
| 20 |
+
from pandas.core.dtypes.dtypes import DatetimeTZDtype
|
| 21 |
+
|
| 22 |
+
import pandas as pd
|
| 23 |
+
import pandas._testing as tm
|
| 24 |
+
from pandas.core.arrays import (
|
| 25 |
+
DatetimeArray,
|
| 26 |
+
TimedeltaArray,
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class TestNonNano:
|
| 31 |
+
@pytest.fixture(params=["s", "ms", "us"])
|
| 32 |
+
def unit(self, request):
|
| 33 |
+
"""Fixture returning parametrized time units"""
|
| 34 |
+
return request.param
|
| 35 |
+
|
| 36 |
+
@pytest.fixture
|
| 37 |
+
def dtype(self, unit, tz_naive_fixture):
|
| 38 |
+
tz = tz_naive_fixture
|
| 39 |
+
if tz is None:
|
| 40 |
+
return np.dtype(f"datetime64[{unit}]")
|
| 41 |
+
else:
|
| 42 |
+
return DatetimeTZDtype(unit=unit, tz=tz)
|
| 43 |
+
|
| 44 |
+
@pytest.fixture
|
| 45 |
+
def dta_dti(self, unit, dtype):
|
| 46 |
+
tz = getattr(dtype, "tz", None)
|
| 47 |
+
|
| 48 |
+
dti = pd.date_range("2016-01-01", periods=55, freq="D", tz=tz)
|
| 49 |
+
if tz is None:
|
| 50 |
+
arr = np.asarray(dti).astype(f"M8[{unit}]")
|
| 51 |
+
else:
|
| 52 |
+
arr = np.asarray(dti.tz_convert("UTC").tz_localize(None)).astype(
|
| 53 |
+
f"M8[{unit}]"
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
dta = DatetimeArray._simple_new(arr, dtype=dtype)
|
| 57 |
+
return dta, dti
|
| 58 |
+
|
| 59 |
+
@pytest.fixture
|
| 60 |
+
def dta(self, dta_dti):
|
| 61 |
+
dta, dti = dta_dti
|
| 62 |
+
return dta
|
| 63 |
+
|
| 64 |
+
def test_non_nano(self, unit, dtype):
|
| 65 |
+
arr = np.arange(5, dtype=np.int64).view(f"M8[{unit}]")
|
| 66 |
+
dta = DatetimeArray._simple_new(arr, dtype=dtype)
|
| 67 |
+
|
| 68 |
+
assert dta.dtype == dtype
|
| 69 |
+
assert dta[0].unit == unit
|
| 70 |
+
assert tz_compare(dta.tz, dta[0].tz)
|
| 71 |
+
assert (dta[0] == dta[:1]).all()
|
| 72 |
+
|
| 73 |
+
@pytest.mark.parametrize(
|
| 74 |
+
"field", DatetimeArray._field_ops + DatetimeArray._bool_ops
|
| 75 |
+
)
|
| 76 |
+
def test_fields(self, unit, field, dtype, dta_dti):
|
| 77 |
+
dta, dti = dta_dti
|
| 78 |
+
|
| 79 |
+
assert (dti == dta).all()
|
| 80 |
+
|
| 81 |
+
res = getattr(dta, field)
|
| 82 |
+
expected = getattr(dti._data, field)
|
| 83 |
+
tm.assert_numpy_array_equal(res, expected)
|
| 84 |
+
|
| 85 |
+
def test_normalize(self, unit):
|
| 86 |
+
dti = pd.date_range("2016-01-01 06:00:00", periods=55, freq="D")
|
| 87 |
+
arr = np.asarray(dti).astype(f"M8[{unit}]")
|
| 88 |
+
|
| 89 |
+
dta = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
| 90 |
+
|
| 91 |
+
assert not dta.is_normalized
|
| 92 |
+
|
| 93 |
+
# TODO: simplify once we can just .astype to other unit
|
| 94 |
+
exp = np.asarray(dti.normalize()).astype(f"M8[{unit}]")
|
| 95 |
+
expected = DatetimeArray._simple_new(exp, dtype=exp.dtype)
|
| 96 |
+
|
| 97 |
+
res = dta.normalize()
|
| 98 |
+
tm.assert_extension_array_equal(res, expected)
|
| 99 |
+
|
| 100 |
+
def test_simple_new_requires_match(self, unit):
|
| 101 |
+
arr = np.arange(5, dtype=np.int64).view(f"M8[{unit}]")
|
| 102 |
+
dtype = DatetimeTZDtype(unit, "UTC")
|
| 103 |
+
|
| 104 |
+
dta = DatetimeArray._simple_new(arr, dtype=dtype)
|
| 105 |
+
assert dta.dtype == dtype
|
| 106 |
+
|
| 107 |
+
wrong = DatetimeTZDtype("ns", "UTC")
|
| 108 |
+
with pytest.raises(AssertionError, match=""):
|
| 109 |
+
DatetimeArray._simple_new(arr, dtype=wrong)
|
| 110 |
+
|
| 111 |
+
def test_std_non_nano(self, unit):
|
| 112 |
+
dti = pd.date_range("2016-01-01", periods=55, freq="D")
|
| 113 |
+
arr = np.asarray(dti).astype(f"M8[{unit}]")
|
| 114 |
+
|
| 115 |
+
dta = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
| 116 |
+
|
| 117 |
+
# we should match the nano-reso std, but floored to our reso.
|
| 118 |
+
res = dta.std()
|
| 119 |
+
assert res._creso == dta._creso
|
| 120 |
+
assert res == dti.std().floor(unit)
|
| 121 |
+
|
| 122 |
+
@pytest.mark.filterwarnings("ignore:Converting to PeriodArray.*:UserWarning")
|
| 123 |
+
def test_to_period(self, dta_dti):
|
| 124 |
+
dta, dti = dta_dti
|
| 125 |
+
result = dta.to_period("D")
|
| 126 |
+
expected = dti._data.to_period("D")
|
| 127 |
+
|
| 128 |
+
tm.assert_extension_array_equal(result, expected)
|
| 129 |
+
|
| 130 |
+
def test_iter(self, dta):
|
| 131 |
+
res = next(iter(dta))
|
| 132 |
+
expected = dta[0]
|
| 133 |
+
|
| 134 |
+
assert type(res) is pd.Timestamp
|
| 135 |
+
assert res._value == expected._value
|
| 136 |
+
assert res._creso == expected._creso
|
| 137 |
+
assert res == expected
|
| 138 |
+
|
| 139 |
+
def test_astype_object(self, dta):
|
| 140 |
+
result = dta.astype(object)
|
| 141 |
+
assert all(x._creso == dta._creso for x in result)
|
| 142 |
+
assert all(x == y for x, y in zip(result, dta))
|
| 143 |
+
|
| 144 |
+
def test_to_pydatetime(self, dta_dti):
|
| 145 |
+
dta, dti = dta_dti
|
| 146 |
+
|
| 147 |
+
result = dta.to_pydatetime()
|
| 148 |
+
expected = dti.to_pydatetime()
|
| 149 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 150 |
+
|
| 151 |
+
@pytest.mark.parametrize("meth", ["time", "timetz", "date"])
|
| 152 |
+
def test_time_date(self, dta_dti, meth):
|
| 153 |
+
dta, dti = dta_dti
|
| 154 |
+
|
| 155 |
+
result = getattr(dta, meth)
|
| 156 |
+
expected = getattr(dti, meth)
|
| 157 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 158 |
+
|
| 159 |
+
def test_format_native_types(self, unit, dtype, dta_dti):
|
| 160 |
+
# In this case we should get the same formatted values with our nano
|
| 161 |
+
# version dti._data as we do with the non-nano dta
|
| 162 |
+
dta, dti = dta_dti
|
| 163 |
+
|
| 164 |
+
res = dta._format_native_types()
|
| 165 |
+
exp = dti._data._format_native_types()
|
| 166 |
+
tm.assert_numpy_array_equal(res, exp)
|
| 167 |
+
|
| 168 |
+
def test_repr(self, dta_dti, unit):
|
| 169 |
+
dta, dti = dta_dti
|
| 170 |
+
|
| 171 |
+
assert repr(dta) == repr(dti._data).replace("[ns", f"[{unit}")
|
| 172 |
+
|
| 173 |
+
# TODO: tests with td64
|
| 174 |
+
def test_compare_mismatched_resolutions(self, comparison_op):
|
| 175 |
+
# comparison that numpy gets wrong bc of silent overflows
|
| 176 |
+
op = comparison_op
|
| 177 |
+
|
| 178 |
+
iinfo = np.iinfo(np.int64)
|
| 179 |
+
vals = np.array([iinfo.min, iinfo.min + 1, iinfo.max], dtype=np.int64)
|
| 180 |
+
|
| 181 |
+
# Construct so that arr2[1] < arr[1] < arr[2] < arr2[2]
|
| 182 |
+
arr = np.array(vals).view("M8[ns]")
|
| 183 |
+
arr2 = arr.view("M8[s]")
|
| 184 |
+
|
| 185 |
+
left = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
| 186 |
+
right = DatetimeArray._simple_new(arr2, dtype=arr2.dtype)
|
| 187 |
+
|
| 188 |
+
if comparison_op is operator.eq:
|
| 189 |
+
expected = np.array([False, False, False])
|
| 190 |
+
elif comparison_op is operator.ne:
|
| 191 |
+
expected = np.array([True, True, True])
|
| 192 |
+
elif comparison_op in [operator.lt, operator.le]:
|
| 193 |
+
expected = np.array([False, False, True])
|
| 194 |
+
else:
|
| 195 |
+
expected = np.array([False, True, False])
|
| 196 |
+
|
| 197 |
+
result = op(left, right)
|
| 198 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 199 |
+
|
| 200 |
+
result = op(left[1], right)
|
| 201 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 202 |
+
|
| 203 |
+
if op not in [operator.eq, operator.ne]:
|
| 204 |
+
# check that numpy still gets this wrong; if it is fixed we may be
|
| 205 |
+
# able to remove compare_mismatched_resolutions
|
| 206 |
+
np_res = op(left._ndarray, right._ndarray)
|
| 207 |
+
tm.assert_numpy_array_equal(np_res[1:], ~expected[1:])
|
| 208 |
+
|
| 209 |
+
def test_add_mismatched_reso_doesnt_downcast(self):
|
| 210 |
+
# https://github.com/pandas-dev/pandas/pull/48748#issuecomment-1260181008
|
| 211 |
+
td = pd.Timedelta(microseconds=1)
|
| 212 |
+
dti = pd.date_range("2016-01-01", periods=3) - td
|
| 213 |
+
dta = dti._data.as_unit("us")
|
| 214 |
+
|
| 215 |
+
res = dta + td.as_unit("us")
|
| 216 |
+
# even though the result is an even number of days
|
| 217 |
+
# (so we _could_ downcast to unit="s"), we do not.
|
| 218 |
+
assert res.unit == "us"
|
| 219 |
+
|
| 220 |
+
@pytest.mark.parametrize(
|
| 221 |
+
"scalar",
|
| 222 |
+
[
|
| 223 |
+
timedelta(hours=2),
|
| 224 |
+
pd.Timedelta(hours=2),
|
| 225 |
+
np.timedelta64(2, "h"),
|
| 226 |
+
np.timedelta64(2 * 3600 * 1000, "ms"),
|
| 227 |
+
pd.offsets.Minute(120),
|
| 228 |
+
pd.offsets.Hour(2),
|
| 229 |
+
],
|
| 230 |
+
)
|
| 231 |
+
def test_add_timedeltalike_scalar_mismatched_reso(self, dta_dti, scalar):
|
| 232 |
+
dta, dti = dta_dti
|
| 233 |
+
|
| 234 |
+
td = pd.Timedelta(scalar)
|
| 235 |
+
exp_unit = tm.get_finest_unit(dta.unit, td.unit)
|
| 236 |
+
|
| 237 |
+
expected = (dti + td)._data.as_unit(exp_unit)
|
| 238 |
+
result = dta + scalar
|
| 239 |
+
tm.assert_extension_array_equal(result, expected)
|
| 240 |
+
|
| 241 |
+
result = scalar + dta
|
| 242 |
+
tm.assert_extension_array_equal(result, expected)
|
| 243 |
+
|
| 244 |
+
expected = (dti - td)._data.as_unit(exp_unit)
|
| 245 |
+
result = dta - scalar
|
| 246 |
+
tm.assert_extension_array_equal(result, expected)
|
| 247 |
+
|
| 248 |
+
def test_sub_datetimelike_scalar_mismatch(self):
|
| 249 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 250 |
+
dta = dti._data.as_unit("us")
|
| 251 |
+
|
| 252 |
+
ts = dta[0].as_unit("s")
|
| 253 |
+
|
| 254 |
+
result = dta - ts
|
| 255 |
+
expected = (dti - dti[0])._data.as_unit("us")
|
| 256 |
+
assert result.dtype == "m8[us]"
|
| 257 |
+
tm.assert_extension_array_equal(result, expected)
|
| 258 |
+
|
| 259 |
+
def test_sub_datetime64_reso_mismatch(self):
|
| 260 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 261 |
+
left = dti._data.as_unit("s")
|
| 262 |
+
right = left.as_unit("ms")
|
| 263 |
+
|
| 264 |
+
result = left - right
|
| 265 |
+
exp_values = np.array([0, 0, 0], dtype="m8[ms]")
|
| 266 |
+
expected = TimedeltaArray._simple_new(
|
| 267 |
+
exp_values,
|
| 268 |
+
dtype=exp_values.dtype,
|
| 269 |
+
)
|
| 270 |
+
tm.assert_extension_array_equal(result, expected)
|
| 271 |
+
result2 = right - left
|
| 272 |
+
tm.assert_extension_array_equal(result2, expected)
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
class TestDatetimeArrayComparisons:
|
| 276 |
+
# TODO: merge this into tests/arithmetic/test_datetime64 once it is
|
| 277 |
+
# sufficiently robust
|
| 278 |
+
|
| 279 |
+
def test_cmp_dt64_arraylike_tznaive(self, comparison_op):
|
| 280 |
+
# arbitrary tz-naive DatetimeIndex
|
| 281 |
+
op = comparison_op
|
| 282 |
+
|
| 283 |
+
dti = pd.date_range("2016-01-1", freq="MS", periods=9, tz=None)
|
| 284 |
+
arr = dti._data
|
| 285 |
+
assert arr.freq == dti.freq
|
| 286 |
+
assert arr.tz == dti.tz
|
| 287 |
+
|
| 288 |
+
right = dti
|
| 289 |
+
|
| 290 |
+
expected = np.ones(len(arr), dtype=bool)
|
| 291 |
+
if comparison_op.__name__ in ["ne", "gt", "lt"]:
|
| 292 |
+
# for these the comparisons should be all-False
|
| 293 |
+
expected = ~expected
|
| 294 |
+
|
| 295 |
+
result = op(arr, arr)
|
| 296 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 297 |
+
for other in [
|
| 298 |
+
right,
|
| 299 |
+
np.array(right),
|
| 300 |
+
list(right),
|
| 301 |
+
tuple(right),
|
| 302 |
+
right.astype(object),
|
| 303 |
+
]:
|
| 304 |
+
result = op(arr, other)
|
| 305 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 306 |
+
|
| 307 |
+
result = op(other, arr)
|
| 308 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
class TestDatetimeArray:
|
| 312 |
+
def test_astype_ns_to_ms_near_bounds(self):
|
| 313 |
+
# GH#55979
|
| 314 |
+
ts = pd.Timestamp("1677-09-21 00:12:43.145225")
|
| 315 |
+
target = ts.as_unit("ms")
|
| 316 |
+
|
| 317 |
+
dta = DatetimeArray._from_sequence([ts], dtype="M8[ns]")
|
| 318 |
+
assert (dta.view("i8") == ts.as_unit("ns").value).all()
|
| 319 |
+
|
| 320 |
+
result = dta.astype("M8[ms]")
|
| 321 |
+
assert result[0] == target
|
| 322 |
+
|
| 323 |
+
expected = DatetimeArray._from_sequence([ts], dtype="M8[ms]")
|
| 324 |
+
assert (expected.view("i8") == target._value).all()
|
| 325 |
+
|
| 326 |
+
tm.assert_datetime_array_equal(result, expected)
|
| 327 |
+
|
| 328 |
+
def test_astype_non_nano_tznaive(self):
|
| 329 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 330 |
+
|
| 331 |
+
res = dti.astype("M8[s]")
|
| 332 |
+
assert res.dtype == "M8[s]"
|
| 333 |
+
|
| 334 |
+
dta = dti._data
|
| 335 |
+
res = dta.astype("M8[s]")
|
| 336 |
+
assert res.dtype == "M8[s]"
|
| 337 |
+
assert isinstance(res, pd.core.arrays.DatetimeArray) # used to be ndarray
|
| 338 |
+
|
| 339 |
+
def test_astype_non_nano_tzaware(self):
|
| 340 |
+
dti = pd.date_range("2016-01-01", periods=3, tz="UTC")
|
| 341 |
+
|
| 342 |
+
res = dti.astype("M8[s, US/Pacific]")
|
| 343 |
+
assert res.dtype == "M8[s, US/Pacific]"
|
| 344 |
+
|
| 345 |
+
dta = dti._data
|
| 346 |
+
res = dta.astype("M8[s, US/Pacific]")
|
| 347 |
+
assert res.dtype == "M8[s, US/Pacific]"
|
| 348 |
+
|
| 349 |
+
# from non-nano to non-nano, preserving reso
|
| 350 |
+
res2 = res.astype("M8[s, UTC]")
|
| 351 |
+
assert res2.dtype == "M8[s, UTC]"
|
| 352 |
+
assert not tm.shares_memory(res2, res)
|
| 353 |
+
|
| 354 |
+
res3 = res.astype("M8[s, UTC]", copy=False)
|
| 355 |
+
assert res2.dtype == "M8[s, UTC]"
|
| 356 |
+
assert tm.shares_memory(res3, res)
|
| 357 |
+
|
| 358 |
+
def test_astype_to_same(self):
|
| 359 |
+
arr = DatetimeArray._from_sequence(
|
| 360 |
+
["2000"], dtype=DatetimeTZDtype(tz="US/Central")
|
| 361 |
+
)
|
| 362 |
+
result = arr.astype(DatetimeTZDtype(tz="US/Central"), copy=False)
|
| 363 |
+
assert result is arr
|
| 364 |
+
|
| 365 |
+
@pytest.mark.parametrize("dtype", ["datetime64[ns]", "datetime64[ns, UTC]"])
|
| 366 |
+
@pytest.mark.parametrize(
|
| 367 |
+
"other", ["datetime64[ns]", "datetime64[ns, UTC]", "datetime64[ns, CET]"]
|
| 368 |
+
)
|
| 369 |
+
def test_astype_copies(self, dtype, other):
|
| 370 |
+
# https://github.com/pandas-dev/pandas/pull/32490
|
| 371 |
+
ser = pd.Series([1, 2], dtype=dtype)
|
| 372 |
+
orig = ser.copy()
|
| 373 |
+
|
| 374 |
+
err = False
|
| 375 |
+
if (dtype == "datetime64[ns]") ^ (other == "datetime64[ns]"):
|
| 376 |
+
# deprecated in favor of tz_localize
|
| 377 |
+
err = True
|
| 378 |
+
|
| 379 |
+
if err:
|
| 380 |
+
if dtype == "datetime64[ns]":
|
| 381 |
+
msg = "Use obj.tz_localize instead or series.dt.tz_localize instead"
|
| 382 |
+
else:
|
| 383 |
+
msg = "from timezone-aware dtype to timezone-naive dtype"
|
| 384 |
+
with pytest.raises(TypeError, match=msg):
|
| 385 |
+
ser.astype(other)
|
| 386 |
+
else:
|
| 387 |
+
t = ser.astype(other)
|
| 388 |
+
t[:] = pd.NaT
|
| 389 |
+
tm.assert_series_equal(ser, orig)
|
| 390 |
+
|
| 391 |
+
@pytest.mark.parametrize("dtype", [int, np.int32, np.int64, "uint32", "uint64"])
|
| 392 |
+
def test_astype_int(self, dtype):
|
| 393 |
+
arr = DatetimeArray._from_sequence(
|
| 394 |
+
[pd.Timestamp("2000"), pd.Timestamp("2001")], dtype="M8[ns]"
|
| 395 |
+
)
|
| 396 |
+
|
| 397 |
+
if np.dtype(dtype) != np.int64:
|
| 398 |
+
with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"):
|
| 399 |
+
arr.astype(dtype)
|
| 400 |
+
return
|
| 401 |
+
|
| 402 |
+
result = arr.astype(dtype)
|
| 403 |
+
expected = arr._ndarray.view("i8")
|
| 404 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 405 |
+
|
| 406 |
+
def test_astype_to_sparse_dt64(self):
|
| 407 |
+
# GH#50082
|
| 408 |
+
dti = pd.date_range("2016-01-01", periods=4)
|
| 409 |
+
dta = dti._data
|
| 410 |
+
result = dta.astype("Sparse[datetime64[ns]]")
|
| 411 |
+
|
| 412 |
+
assert result.dtype == "Sparse[datetime64[ns]]"
|
| 413 |
+
assert (result == dta).all()
|
| 414 |
+
|
| 415 |
+
def test_tz_setter_raises(self):
|
| 416 |
+
arr = DatetimeArray._from_sequence(
|
| 417 |
+
["2000"], dtype=DatetimeTZDtype(tz="US/Central")
|
| 418 |
+
)
|
| 419 |
+
with pytest.raises(AttributeError, match="tz_localize"):
|
| 420 |
+
arr.tz = "UTC"
|
| 421 |
+
|
| 422 |
+
def test_setitem_str_impute_tz(self, tz_naive_fixture):
|
| 423 |
+
# Like for getitem, if we are passed a naive-like string, we impute
|
| 424 |
+
# our own timezone.
|
| 425 |
+
tz = tz_naive_fixture
|
| 426 |
+
|
| 427 |
+
data = np.array([1, 2, 3], dtype="M8[ns]")
|
| 428 |
+
dtype = data.dtype if tz is None else DatetimeTZDtype(tz=tz)
|
| 429 |
+
arr = DatetimeArray._from_sequence(data, dtype=dtype)
|
| 430 |
+
expected = arr.copy()
|
| 431 |
+
|
| 432 |
+
ts = pd.Timestamp("2020-09-08 16:50").tz_localize(tz)
|
| 433 |
+
setter = str(ts.tz_localize(None))
|
| 434 |
+
|
| 435 |
+
# Setting a scalar tznaive string
|
| 436 |
+
expected[0] = ts
|
| 437 |
+
arr[0] = setter
|
| 438 |
+
tm.assert_equal(arr, expected)
|
| 439 |
+
|
| 440 |
+
# Setting a listlike of tznaive strings
|
| 441 |
+
expected[1] = ts
|
| 442 |
+
arr[:2] = [setter, setter]
|
| 443 |
+
tm.assert_equal(arr, expected)
|
| 444 |
+
|
| 445 |
+
def test_setitem_different_tz_raises(self):
|
| 446 |
+
# pre-2.0 we required exact tz match, in 2.0 we require only
|
| 447 |
+
# tzawareness-match
|
| 448 |
+
data = np.array([1, 2, 3], dtype="M8[ns]")
|
| 449 |
+
arr = DatetimeArray._from_sequence(
|
| 450 |
+
data, copy=False, dtype=DatetimeTZDtype(tz="US/Central")
|
| 451 |
+
)
|
| 452 |
+
with pytest.raises(TypeError, match="Cannot compare tz-naive and tz-aware"):
|
| 453 |
+
arr[0] = pd.Timestamp("2000")
|
| 454 |
+
|
| 455 |
+
ts = pd.Timestamp("2000", tz="US/Eastern")
|
| 456 |
+
arr[0] = ts
|
| 457 |
+
assert arr[0] == ts.tz_convert("US/Central")
|
| 458 |
+
|
| 459 |
+
def test_setitem_clears_freq(self):
|
| 460 |
+
a = pd.date_range("2000", periods=2, freq="D", tz="US/Central")._data
|
| 461 |
+
a[0] = pd.Timestamp("2000", tz="US/Central")
|
| 462 |
+
assert a.freq is None
|
| 463 |
+
|
| 464 |
+
@pytest.mark.parametrize(
|
| 465 |
+
"obj",
|
| 466 |
+
[
|
| 467 |
+
pd.Timestamp("2021-01-01"),
|
| 468 |
+
pd.Timestamp("2021-01-01").to_datetime64(),
|
| 469 |
+
pd.Timestamp("2021-01-01").to_pydatetime(),
|
| 470 |
+
],
|
| 471 |
+
)
|
| 472 |
+
def test_setitem_objects(self, obj):
|
| 473 |
+
# make sure we accept datetime64 and datetime in addition to Timestamp
|
| 474 |
+
dti = pd.date_range("2000", periods=2, freq="D")
|
| 475 |
+
arr = dti._data
|
| 476 |
+
|
| 477 |
+
arr[0] = obj
|
| 478 |
+
assert arr[0] == obj
|
| 479 |
+
|
| 480 |
+
def test_repeat_preserves_tz(self):
|
| 481 |
+
dti = pd.date_range("2000", periods=2, freq="D", tz="US/Central")
|
| 482 |
+
arr = dti._data
|
| 483 |
+
|
| 484 |
+
repeated = arr.repeat([1, 1])
|
| 485 |
+
|
| 486 |
+
# preserves tz and values, but not freq
|
| 487 |
+
expected = DatetimeArray._from_sequence(arr.asi8, dtype=arr.dtype)
|
| 488 |
+
tm.assert_equal(repeated, expected)
|
| 489 |
+
|
| 490 |
+
def test_value_counts_preserves_tz(self):
|
| 491 |
+
dti = pd.date_range("2000", periods=2, freq="D", tz="US/Central")
|
| 492 |
+
arr = dti._data.repeat([4, 3])
|
| 493 |
+
|
| 494 |
+
result = arr.value_counts()
|
| 495 |
+
|
| 496 |
+
# Note: not tm.assert_index_equal, since `freq`s do not match
|
| 497 |
+
assert result.index.equals(dti)
|
| 498 |
+
|
| 499 |
+
arr[-2] = pd.NaT
|
| 500 |
+
result = arr.value_counts(dropna=False)
|
| 501 |
+
expected = pd.Series([4, 2, 1], index=[dti[0], dti[1], pd.NaT], name="count")
|
| 502 |
+
tm.assert_series_equal(result, expected)
|
| 503 |
+
|
| 504 |
+
@pytest.mark.parametrize("method", ["pad", "backfill"])
|
| 505 |
+
def test_fillna_preserves_tz(self, method):
|
| 506 |
+
dti = pd.date_range("2000-01-01", periods=5, freq="D", tz="US/Central")
|
| 507 |
+
arr = DatetimeArray._from_sequence(dti, copy=True)
|
| 508 |
+
arr[2] = pd.NaT
|
| 509 |
+
|
| 510 |
+
fill_val = dti[1] if method == "pad" else dti[3]
|
| 511 |
+
expected = DatetimeArray._from_sequence(
|
| 512 |
+
[dti[0], dti[1], fill_val, dti[3], dti[4]],
|
| 513 |
+
dtype=DatetimeTZDtype(tz="US/Central"),
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
result = arr._pad_or_backfill(method=method)
|
| 517 |
+
tm.assert_extension_array_equal(result, expected)
|
| 518 |
+
|
| 519 |
+
# assert that arr and dti were not modified in-place
|
| 520 |
+
assert arr[2] is pd.NaT
|
| 521 |
+
assert dti[2] == pd.Timestamp("2000-01-03", tz="US/Central")
|
| 522 |
+
|
| 523 |
+
def test_fillna_2d(self):
|
| 524 |
+
dti = pd.date_range("2016-01-01", periods=6, tz="US/Pacific")
|
| 525 |
+
dta = dti._data.reshape(3, 2).copy()
|
| 526 |
+
dta[0, 1] = pd.NaT
|
| 527 |
+
dta[1, 0] = pd.NaT
|
| 528 |
+
|
| 529 |
+
res1 = dta._pad_or_backfill(method="pad")
|
| 530 |
+
expected1 = dta.copy()
|
| 531 |
+
expected1[1, 0] = dta[0, 0]
|
| 532 |
+
tm.assert_extension_array_equal(res1, expected1)
|
| 533 |
+
|
| 534 |
+
res2 = dta._pad_or_backfill(method="backfill")
|
| 535 |
+
expected2 = dta.copy()
|
| 536 |
+
expected2 = dta.copy()
|
| 537 |
+
expected2[1, 0] = dta[2, 0]
|
| 538 |
+
expected2[0, 1] = dta[1, 1]
|
| 539 |
+
tm.assert_extension_array_equal(res2, expected2)
|
| 540 |
+
|
| 541 |
+
# with different ordering for underlying ndarray; behavior should
|
| 542 |
+
# be unchanged
|
| 543 |
+
dta2 = dta._from_backing_data(dta._ndarray.copy(order="F"))
|
| 544 |
+
assert dta2._ndarray.flags["F_CONTIGUOUS"]
|
| 545 |
+
assert not dta2._ndarray.flags["C_CONTIGUOUS"]
|
| 546 |
+
tm.assert_extension_array_equal(dta, dta2)
|
| 547 |
+
|
| 548 |
+
res3 = dta2._pad_or_backfill(method="pad")
|
| 549 |
+
tm.assert_extension_array_equal(res3, expected1)
|
| 550 |
+
|
| 551 |
+
res4 = dta2._pad_or_backfill(method="backfill")
|
| 552 |
+
tm.assert_extension_array_equal(res4, expected2)
|
| 553 |
+
|
| 554 |
+
# test the DataFrame method while we're here
|
| 555 |
+
df = pd.DataFrame(dta)
|
| 556 |
+
res = df.ffill()
|
| 557 |
+
expected = pd.DataFrame(expected1)
|
| 558 |
+
tm.assert_frame_equal(res, expected)
|
| 559 |
+
|
| 560 |
+
res = df.bfill()
|
| 561 |
+
expected = pd.DataFrame(expected2)
|
| 562 |
+
tm.assert_frame_equal(res, expected)
|
| 563 |
+
|
| 564 |
+
def test_array_interface_tz(self):
|
| 565 |
+
tz = "US/Central"
|
| 566 |
+
data = pd.date_range("2017", periods=2, tz=tz)._data
|
| 567 |
+
result = np.asarray(data)
|
| 568 |
+
|
| 569 |
+
expected = np.array(
|
| 570 |
+
[
|
| 571 |
+
pd.Timestamp("2017-01-01T00:00:00", tz=tz),
|
| 572 |
+
pd.Timestamp("2017-01-02T00:00:00", tz=tz),
|
| 573 |
+
],
|
| 574 |
+
dtype=object,
|
| 575 |
+
)
|
| 576 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 577 |
+
|
| 578 |
+
result = np.asarray(data, dtype=object)
|
| 579 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 580 |
+
|
| 581 |
+
result = np.asarray(data, dtype="M8[ns]")
|
| 582 |
+
|
| 583 |
+
expected = np.array(
|
| 584 |
+
["2017-01-01T06:00:00", "2017-01-02T06:00:00"], dtype="M8[ns]"
|
| 585 |
+
)
|
| 586 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 587 |
+
|
| 588 |
+
def test_array_interface(self):
|
| 589 |
+
data = pd.date_range("2017", periods=2)._data
|
| 590 |
+
expected = np.array(
|
| 591 |
+
["2017-01-01T00:00:00", "2017-01-02T00:00:00"], dtype="datetime64[ns]"
|
| 592 |
+
)
|
| 593 |
+
|
| 594 |
+
result = np.asarray(data)
|
| 595 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 596 |
+
|
| 597 |
+
result = np.asarray(data, dtype=object)
|
| 598 |
+
expected = np.array(
|
| 599 |
+
[pd.Timestamp("2017-01-01T00:00:00"), pd.Timestamp("2017-01-02T00:00:00")],
|
| 600 |
+
dtype=object,
|
| 601 |
+
)
|
| 602 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 603 |
+
|
| 604 |
+
@pytest.mark.parametrize("index", [True, False])
|
| 605 |
+
def test_searchsorted_different_tz(self, index):
|
| 606 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 607 |
+
arr = pd.DatetimeIndex(data, freq="D")._data.tz_localize("Asia/Tokyo")
|
| 608 |
+
if index:
|
| 609 |
+
arr = pd.Index(arr)
|
| 610 |
+
|
| 611 |
+
expected = arr.searchsorted(arr[2])
|
| 612 |
+
result = arr.searchsorted(arr[2].tz_convert("UTC"))
|
| 613 |
+
assert result == expected
|
| 614 |
+
|
| 615 |
+
expected = arr.searchsorted(arr[2:6])
|
| 616 |
+
result = arr.searchsorted(arr[2:6].tz_convert("UTC"))
|
| 617 |
+
tm.assert_equal(result, expected)
|
| 618 |
+
|
| 619 |
+
@pytest.mark.parametrize("index", [True, False])
|
| 620 |
+
def test_searchsorted_tzawareness_compat(self, index):
|
| 621 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 622 |
+
arr = pd.DatetimeIndex(data, freq="D")._data
|
| 623 |
+
if index:
|
| 624 |
+
arr = pd.Index(arr)
|
| 625 |
+
|
| 626 |
+
mismatch = arr.tz_localize("Asia/Tokyo")
|
| 627 |
+
|
| 628 |
+
msg = "Cannot compare tz-naive and tz-aware datetime-like objects"
|
| 629 |
+
with pytest.raises(TypeError, match=msg):
|
| 630 |
+
arr.searchsorted(mismatch[0])
|
| 631 |
+
with pytest.raises(TypeError, match=msg):
|
| 632 |
+
arr.searchsorted(mismatch)
|
| 633 |
+
|
| 634 |
+
with pytest.raises(TypeError, match=msg):
|
| 635 |
+
mismatch.searchsorted(arr[0])
|
| 636 |
+
with pytest.raises(TypeError, match=msg):
|
| 637 |
+
mismatch.searchsorted(arr)
|
| 638 |
+
|
| 639 |
+
@pytest.mark.parametrize(
|
| 640 |
+
"other",
|
| 641 |
+
[
|
| 642 |
+
1,
|
| 643 |
+
np.int64(1),
|
| 644 |
+
1.0,
|
| 645 |
+
np.timedelta64("NaT"),
|
| 646 |
+
pd.Timedelta(days=2),
|
| 647 |
+
"invalid",
|
| 648 |
+
np.arange(10, dtype="i8") * 24 * 3600 * 10**9,
|
| 649 |
+
np.arange(10).view("timedelta64[ns]") * 24 * 3600 * 10**9,
|
| 650 |
+
pd.Timestamp("2021-01-01").to_period("D"),
|
| 651 |
+
],
|
| 652 |
+
)
|
| 653 |
+
@pytest.mark.parametrize("index", [True, False])
|
| 654 |
+
def test_searchsorted_invalid_types(self, other, index):
|
| 655 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 656 |
+
arr = pd.DatetimeIndex(data, freq="D")._data
|
| 657 |
+
if index:
|
| 658 |
+
arr = pd.Index(arr)
|
| 659 |
+
|
| 660 |
+
msg = "|".join(
|
| 661 |
+
[
|
| 662 |
+
"searchsorted requires compatible dtype or scalar",
|
| 663 |
+
"value should be a 'Timestamp', 'NaT', or array of those. Got",
|
| 664 |
+
]
|
| 665 |
+
)
|
| 666 |
+
with pytest.raises(TypeError, match=msg):
|
| 667 |
+
arr.searchsorted(other)
|
| 668 |
+
|
| 669 |
+
def test_shift_fill_value(self):
|
| 670 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 671 |
+
|
| 672 |
+
dta = dti._data
|
| 673 |
+
expected = DatetimeArray._from_sequence(np.roll(dta._ndarray, 1))
|
| 674 |
+
|
| 675 |
+
fv = dta[-1]
|
| 676 |
+
for fill_value in [fv, fv.to_pydatetime(), fv.to_datetime64()]:
|
| 677 |
+
result = dta.shift(1, fill_value=fill_value)
|
| 678 |
+
tm.assert_datetime_array_equal(result, expected)
|
| 679 |
+
|
| 680 |
+
dta = dta.tz_localize("UTC")
|
| 681 |
+
expected = expected.tz_localize("UTC")
|
| 682 |
+
fv = dta[-1]
|
| 683 |
+
for fill_value in [fv, fv.to_pydatetime()]:
|
| 684 |
+
result = dta.shift(1, fill_value=fill_value)
|
| 685 |
+
tm.assert_datetime_array_equal(result, expected)
|
| 686 |
+
|
| 687 |
+
def test_shift_value_tzawareness_mismatch(self):
|
| 688 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
| 689 |
+
|
| 690 |
+
dta = dti._data
|
| 691 |
+
|
| 692 |
+
fv = dta[-1].tz_localize("UTC")
|
| 693 |
+
for invalid in [fv, fv.to_pydatetime()]:
|
| 694 |
+
with pytest.raises(TypeError, match="Cannot compare"):
|
| 695 |
+
dta.shift(1, fill_value=invalid)
|
| 696 |
+
|
| 697 |
+
dta = dta.tz_localize("UTC")
|
| 698 |
+
fv = dta[-1].tz_localize(None)
|
| 699 |
+
for invalid in [fv, fv.to_pydatetime(), fv.to_datetime64()]:
|
| 700 |
+
with pytest.raises(TypeError, match="Cannot compare"):
|
| 701 |
+
dta.shift(1, fill_value=invalid)
|
| 702 |
+
|
| 703 |
+
def test_shift_requires_tzmatch(self):
|
| 704 |
+
# pre-2.0 we required exact tz match, in 2.0 we require just
|
| 705 |
+
# matching tzawareness
|
| 706 |
+
dti = pd.date_range("2016-01-01", periods=3, tz="UTC")
|
| 707 |
+
dta = dti._data
|
| 708 |
+
|
| 709 |
+
fill_value = pd.Timestamp("2020-10-18 18:44", tz="US/Pacific")
|
| 710 |
+
|
| 711 |
+
result = dta.shift(1, fill_value=fill_value)
|
| 712 |
+
expected = dta.shift(1, fill_value=fill_value.tz_convert("UTC"))
|
| 713 |
+
tm.assert_equal(result, expected)
|
| 714 |
+
|
| 715 |
+
def test_tz_localize_t2d(self):
|
| 716 |
+
dti = pd.date_range("1994-05-12", periods=12, tz="US/Pacific")
|
| 717 |
+
dta = dti._data.reshape(3, 4)
|
| 718 |
+
result = dta.tz_localize(None)
|
| 719 |
+
|
| 720 |
+
expected = dta.ravel().tz_localize(None).reshape(dta.shape)
|
| 721 |
+
tm.assert_datetime_array_equal(result, expected)
|
| 722 |
+
|
| 723 |
+
roundtrip = expected.tz_localize("US/Pacific")
|
| 724 |
+
tm.assert_datetime_array_equal(roundtrip, dta)
|
| 725 |
+
|
| 726 |
+
easts = ["US/Eastern", "dateutil/US/Eastern"]
|
| 727 |
+
if ZoneInfo is not None:
|
| 728 |
+
try:
|
| 729 |
+
tz = ZoneInfo("US/Eastern")
|
| 730 |
+
except KeyError:
|
| 731 |
+
# no tzdata
|
| 732 |
+
pass
|
| 733 |
+
else:
|
| 734 |
+
# Argument 1 to "append" of "list" has incompatible type "ZoneInfo";
|
| 735 |
+
# expected "str"
|
| 736 |
+
easts.append(tz) # type: ignore[arg-type]
|
| 737 |
+
|
| 738 |
+
@pytest.mark.parametrize("tz", easts)
|
| 739 |
+
def test_iter_zoneinfo_fold(self, tz):
|
| 740 |
+
# GH#49684
|
| 741 |
+
utc_vals = np.array(
|
| 742 |
+
[1320552000, 1320555600, 1320559200, 1320562800], dtype=np.int64
|
| 743 |
+
)
|
| 744 |
+
utc_vals *= 1_000_000_000
|
| 745 |
+
|
| 746 |
+
dta = DatetimeArray._from_sequence(utc_vals).tz_localize("UTC").tz_convert(tz)
|
| 747 |
+
|
| 748 |
+
left = dta[2]
|
| 749 |
+
right = list(dta)[2]
|
| 750 |
+
assert str(left) == str(right)
|
| 751 |
+
# previously there was a bug where with non-pytz right would be
|
| 752 |
+
# Timestamp('2011-11-06 01:00:00-0400', tz='US/Eastern')
|
| 753 |
+
# while left would be
|
| 754 |
+
# Timestamp('2011-11-06 01:00:00-0500', tz='US/Eastern')
|
| 755 |
+
# The .value's would match (so they would compare as equal),
|
| 756 |
+
# but the folds would not
|
| 757 |
+
assert left.utcoffset() == right.utcoffset()
|
| 758 |
+
|
| 759 |
+
# The same bug in ints_to_pydatetime affected .astype, so we test
|
| 760 |
+
# that here.
|
| 761 |
+
right2 = dta.astype(object)[2]
|
| 762 |
+
assert str(left) == str(right2)
|
| 763 |
+
assert left.utcoffset() == right2.utcoffset()
|
| 764 |
+
|
| 765 |
+
@pytest.mark.parametrize(
|
| 766 |
+
"freq, freq_depr",
|
| 767 |
+
[
|
| 768 |
+
("2ME", "2M"),
|
| 769 |
+
("2SME", "2SM"),
|
| 770 |
+
("2SME", "2sm"),
|
| 771 |
+
("2QE", "2Q"),
|
| 772 |
+
("2QE-SEP", "2Q-SEP"),
|
| 773 |
+
("1YE", "1Y"),
|
| 774 |
+
("2YE-MAR", "2Y-MAR"),
|
| 775 |
+
("1YE", "1A"),
|
| 776 |
+
("2YE-MAR", "2A-MAR"),
|
| 777 |
+
("2ME", "2m"),
|
| 778 |
+
("2QE-SEP", "2q-sep"),
|
| 779 |
+
("2YE-MAR", "2a-mar"),
|
| 780 |
+
("2YE", "2y"),
|
| 781 |
+
],
|
| 782 |
+
)
|
| 783 |
+
def test_date_range_frequency_M_Q_Y_A_deprecated(self, freq, freq_depr):
|
| 784 |
+
# GH#9586, GH#54275
|
| 785 |
+
depr_msg = f"'{freq_depr[1:]}' is deprecated and will be removed "
|
| 786 |
+
f"in a future version, please use '{freq[1:]}' instead."
|
| 787 |
+
|
| 788 |
+
expected = pd.date_range("1/1/2000", periods=4, freq=freq)
|
| 789 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
| 790 |
+
result = pd.date_range("1/1/2000", periods=4, freq=freq_depr)
|
| 791 |
+
tm.assert_index_equal(result, expected)
|
| 792 |
+
|
| 793 |
+
@pytest.mark.parametrize("freq_depr", ["2H", "2CBH", "2MIN", "2S", "2mS", "2Us"])
|
| 794 |
+
def test_date_range_uppercase_frequency_deprecated(self, freq_depr):
|
| 795 |
+
# GH#9586, GH#54939
|
| 796 |
+
depr_msg = f"'{freq_depr[1:]}' is deprecated and will be removed in a "
|
| 797 |
+
f"future version. Please use '{freq_depr.lower()[1:]}' instead."
|
| 798 |
+
|
| 799 |
+
expected = pd.date_range("1/1/2000", periods=4, freq=freq_depr.lower())
|
| 800 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
| 801 |
+
result = pd.date_range("1/1/2000", periods=4, freq=freq_depr)
|
| 802 |
+
tm.assert_index_equal(result, expected)
|
| 803 |
+
|
| 804 |
+
@pytest.mark.parametrize(
|
| 805 |
+
"freq_depr",
|
| 806 |
+
[
|
| 807 |
+
"2ye-mar",
|
| 808 |
+
"2ys",
|
| 809 |
+
"2qe",
|
| 810 |
+
"2qs-feb",
|
| 811 |
+
"2bqs",
|
| 812 |
+
"2sms",
|
| 813 |
+
"2bms",
|
| 814 |
+
"2cbme",
|
| 815 |
+
"2me",
|
| 816 |
+
"2w",
|
| 817 |
+
],
|
| 818 |
+
)
|
| 819 |
+
def test_date_range_lowercase_frequency_deprecated(self, freq_depr):
|
| 820 |
+
# GH#9586, GH#54939
|
| 821 |
+
depr_msg = f"'{freq_depr[1:]}' is deprecated and will be removed in a "
|
| 822 |
+
f"future version, please use '{freq_depr.upper()[1:]}' instead."
|
| 823 |
+
|
| 824 |
+
expected = pd.date_range("1/1/2000", periods=4, freq=freq_depr.upper())
|
| 825 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
| 826 |
+
result = pd.date_range("1/1/2000", periods=4, freq=freq_depr)
|
| 827 |
+
tm.assert_index_equal(result, expected)
|
| 828 |
+
|
| 829 |
+
|
| 830 |
+
def test_factorize_sort_without_freq():
|
| 831 |
+
dta = DatetimeArray._from_sequence([0, 2, 1], dtype="M8[ns]")
|
| 832 |
+
|
| 833 |
+
msg = r"call pd.factorize\(obj, sort=True\) instead"
|
| 834 |
+
with pytest.raises(NotImplementedError, match=msg):
|
| 835 |
+
dta.factorize(sort=True)
|
| 836 |
+
|
| 837 |
+
# Do TimedeltaArray while we're here
|
| 838 |
+
tda = dta - dta[0]
|
| 839 |
+
with pytest.raises(NotImplementedError, match=msg):
|
| 840 |
+
tda.factorize(sort=True)
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_ndarray_backed.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for subclasses of NDArrayBackedExtensionArray
|
| 3 |
+
"""
|
| 4 |
+
import numpy as np
|
| 5 |
+
|
| 6 |
+
from pandas import (
|
| 7 |
+
CategoricalIndex,
|
| 8 |
+
date_range,
|
| 9 |
+
)
|
| 10 |
+
from pandas.core.arrays import (
|
| 11 |
+
Categorical,
|
| 12 |
+
DatetimeArray,
|
| 13 |
+
NumpyExtensionArray,
|
| 14 |
+
TimedeltaArray,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TestEmpty:
|
| 19 |
+
def test_empty_categorical(self):
|
| 20 |
+
ci = CategoricalIndex(["a", "b", "c"], ordered=True)
|
| 21 |
+
dtype = ci.dtype
|
| 22 |
+
|
| 23 |
+
# case with int8 codes
|
| 24 |
+
shape = (4,)
|
| 25 |
+
result = Categorical._empty(shape, dtype=dtype)
|
| 26 |
+
assert isinstance(result, Categorical)
|
| 27 |
+
assert result.shape == shape
|
| 28 |
+
assert result._ndarray.dtype == np.int8
|
| 29 |
+
|
| 30 |
+
# case where repr would segfault if we didn't override base implementation
|
| 31 |
+
result = Categorical._empty((4096,), dtype=dtype)
|
| 32 |
+
assert isinstance(result, Categorical)
|
| 33 |
+
assert result.shape == (4096,)
|
| 34 |
+
assert result._ndarray.dtype == np.int8
|
| 35 |
+
repr(result)
|
| 36 |
+
|
| 37 |
+
# case with int16 codes
|
| 38 |
+
ci = CategoricalIndex(list(range(512)) * 4, ordered=False)
|
| 39 |
+
dtype = ci.dtype
|
| 40 |
+
result = Categorical._empty(shape, dtype=dtype)
|
| 41 |
+
assert isinstance(result, Categorical)
|
| 42 |
+
assert result.shape == shape
|
| 43 |
+
assert result._ndarray.dtype == np.int16
|
| 44 |
+
|
| 45 |
+
def test_empty_dt64tz(self):
|
| 46 |
+
dti = date_range("2016-01-01", periods=2, tz="Asia/Tokyo")
|
| 47 |
+
dtype = dti.dtype
|
| 48 |
+
|
| 49 |
+
shape = (0,)
|
| 50 |
+
result = DatetimeArray._empty(shape, dtype=dtype)
|
| 51 |
+
assert result.dtype == dtype
|
| 52 |
+
assert isinstance(result, DatetimeArray)
|
| 53 |
+
assert result.shape == shape
|
| 54 |
+
|
| 55 |
+
def test_empty_dt64(self):
|
| 56 |
+
shape = (3, 9)
|
| 57 |
+
result = DatetimeArray._empty(shape, dtype="datetime64[ns]")
|
| 58 |
+
assert isinstance(result, DatetimeArray)
|
| 59 |
+
assert result.shape == shape
|
| 60 |
+
|
| 61 |
+
def test_empty_td64(self):
|
| 62 |
+
shape = (3, 9)
|
| 63 |
+
result = TimedeltaArray._empty(shape, dtype="m8[ns]")
|
| 64 |
+
assert isinstance(result, TimedeltaArray)
|
| 65 |
+
assert result.shape == shape
|
| 66 |
+
|
| 67 |
+
def test_empty_pandas_array(self):
|
| 68 |
+
arr = NumpyExtensionArray(np.array([1, 2]))
|
| 69 |
+
dtype = arr.dtype
|
| 70 |
+
|
| 71 |
+
shape = (3, 9)
|
| 72 |
+
result = NumpyExtensionArray._empty(shape, dtype=dtype)
|
| 73 |
+
assert isinstance(result, NumpyExtensionArray)
|
| 74 |
+
assert result.dtype == dtype
|
| 75 |
+
assert result.shape == shape
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_period.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
from pandas._libs.tslibs import iNaT
|
| 5 |
+
from pandas._libs.tslibs.period import IncompatibleFrequency
|
| 6 |
+
|
| 7 |
+
from pandas.core.dtypes.base import _registry as registry
|
| 8 |
+
from pandas.core.dtypes.dtypes import PeriodDtype
|
| 9 |
+
|
| 10 |
+
import pandas as pd
|
| 11 |
+
import pandas._testing as tm
|
| 12 |
+
from pandas.core.arrays import PeriodArray
|
| 13 |
+
|
| 14 |
+
# ----------------------------------------------------------------------------
|
| 15 |
+
# Dtype
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def test_registered():
|
| 19 |
+
assert PeriodDtype in registry.dtypes
|
| 20 |
+
result = registry.find("Period[D]")
|
| 21 |
+
expected = PeriodDtype("D")
|
| 22 |
+
assert result == expected
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# ----------------------------------------------------------------------------
|
| 26 |
+
# period_array
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def test_asi8():
|
| 30 |
+
result = PeriodArray._from_sequence(["2000", "2001", None], dtype="period[D]").asi8
|
| 31 |
+
expected = np.array([10957, 11323, iNaT])
|
| 32 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def test_take_raises():
|
| 36 |
+
arr = PeriodArray._from_sequence(["2000", "2001"], dtype="period[D]")
|
| 37 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
| 38 |
+
arr.take([0, -1], allow_fill=True, fill_value=pd.Period("2000", freq="W"))
|
| 39 |
+
|
| 40 |
+
msg = "value should be a 'Period' or 'NaT'. Got 'str' instead"
|
| 41 |
+
with pytest.raises(TypeError, match=msg):
|
| 42 |
+
arr.take([0, -1], allow_fill=True, fill_value="foo")
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def test_fillna_raises():
|
| 46 |
+
arr = PeriodArray._from_sequence(["2000", "2001", "2002"], dtype="period[D]")
|
| 47 |
+
with pytest.raises(ValueError, match="Length"):
|
| 48 |
+
arr.fillna(arr[:2])
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def test_fillna_copies():
|
| 52 |
+
arr = PeriodArray._from_sequence(["2000", "2001", "2002"], dtype="period[D]")
|
| 53 |
+
result = arr.fillna(pd.Period("2000", "D"))
|
| 54 |
+
assert result is not arr
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# ----------------------------------------------------------------------------
|
| 58 |
+
# setitem
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@pytest.mark.parametrize(
|
| 62 |
+
"key, value, expected",
|
| 63 |
+
[
|
| 64 |
+
([0], pd.Period("2000", "D"), [10957, 1, 2]),
|
| 65 |
+
([0], None, [iNaT, 1, 2]),
|
| 66 |
+
([0], np.nan, [iNaT, 1, 2]),
|
| 67 |
+
([0, 1, 2], pd.Period("2000", "D"), [10957] * 3),
|
| 68 |
+
(
|
| 69 |
+
[0, 1, 2],
|
| 70 |
+
[pd.Period("2000", "D"), pd.Period("2001", "D"), pd.Period("2002", "D")],
|
| 71 |
+
[10957, 11323, 11688],
|
| 72 |
+
),
|
| 73 |
+
],
|
| 74 |
+
)
|
| 75 |
+
def test_setitem(key, value, expected):
|
| 76 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
| 77 |
+
expected = PeriodArray(expected, dtype="period[D]")
|
| 78 |
+
arr[key] = value
|
| 79 |
+
tm.assert_period_array_equal(arr, expected)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def test_setitem_raises_incompatible_freq():
|
| 83 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
| 84 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
| 85 |
+
arr[0] = pd.Period("2000", freq="Y")
|
| 86 |
+
|
| 87 |
+
other = PeriodArray._from_sequence(["2000", "2001"], dtype="period[Y]")
|
| 88 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
| 89 |
+
arr[[0, 1]] = other
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def test_setitem_raises_length():
|
| 93 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
| 94 |
+
with pytest.raises(ValueError, match="length"):
|
| 95 |
+
arr[[0, 1]] = [pd.Period("2000", freq="D")]
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def test_setitem_raises_type():
|
| 99 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
| 100 |
+
with pytest.raises(TypeError, match="int"):
|
| 101 |
+
arr[0] = 1
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
# ----------------------------------------------------------------------------
|
| 105 |
+
# Ops
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def test_sub_period():
|
| 109 |
+
arr = PeriodArray._from_sequence(["2000", "2001"], dtype="period[D]")
|
| 110 |
+
other = pd.Period("2000", freq="M")
|
| 111 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
| 112 |
+
arr - other
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def test_sub_period_overflow():
|
| 116 |
+
# GH#47538
|
| 117 |
+
dti = pd.date_range("1677-09-22", periods=2, freq="D")
|
| 118 |
+
pi = dti.to_period("ns")
|
| 119 |
+
|
| 120 |
+
per = pd.Period._from_ordinal(10**14, pi.freq)
|
| 121 |
+
|
| 122 |
+
with pytest.raises(OverflowError, match="Overflow in int64 addition"):
|
| 123 |
+
pi - per
|
| 124 |
+
|
| 125 |
+
with pytest.raises(OverflowError, match="Overflow in int64 addition"):
|
| 126 |
+
per - pi
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
# ----------------------------------------------------------------------------
|
| 130 |
+
# Methods
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
@pytest.mark.parametrize(
|
| 134 |
+
"other",
|
| 135 |
+
[
|
| 136 |
+
pd.Period("2000", freq="h"),
|
| 137 |
+
PeriodArray._from_sequence(["2000", "2001", "2000"], dtype="period[h]"),
|
| 138 |
+
],
|
| 139 |
+
)
|
| 140 |
+
def test_where_different_freq_raises(other):
|
| 141 |
+
# GH#45768 The PeriodArray method raises, the Series method coerces
|
| 142 |
+
ser = pd.Series(
|
| 143 |
+
PeriodArray._from_sequence(["2000", "2001", "2002"], dtype="period[D]")
|
| 144 |
+
)
|
| 145 |
+
cond = np.array([True, False, True])
|
| 146 |
+
|
| 147 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
| 148 |
+
ser.array._where(cond, other)
|
| 149 |
+
|
| 150 |
+
res = ser.where(cond, other)
|
| 151 |
+
expected = ser.astype(object).where(cond, other)
|
| 152 |
+
tm.assert_series_equal(res, expected)
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
# ----------------------------------------------------------------------------
|
| 156 |
+
# Printing
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def test_repr_small():
|
| 160 |
+
arr = PeriodArray._from_sequence(["2000", "2001"], dtype="period[D]")
|
| 161 |
+
result = str(arr)
|
| 162 |
+
expected = (
|
| 163 |
+
"<PeriodArray>\n['2000-01-01', '2001-01-01']\nLength: 2, dtype: period[D]"
|
| 164 |
+
)
|
| 165 |
+
assert result == expected
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def test_repr_large():
|
| 169 |
+
arr = PeriodArray._from_sequence(["2000", "2001"] * 500, dtype="period[D]")
|
| 170 |
+
result = str(arr)
|
| 171 |
+
expected = (
|
| 172 |
+
"<PeriodArray>\n"
|
| 173 |
+
"['2000-01-01', '2001-01-01', '2000-01-01', '2001-01-01', "
|
| 174 |
+
"'2000-01-01',\n"
|
| 175 |
+
" '2001-01-01', '2000-01-01', '2001-01-01', '2000-01-01', "
|
| 176 |
+
"'2001-01-01',\n"
|
| 177 |
+
" ...\n"
|
| 178 |
+
" '2000-01-01', '2001-01-01', '2000-01-01', '2001-01-01', "
|
| 179 |
+
"'2000-01-01',\n"
|
| 180 |
+
" '2001-01-01', '2000-01-01', '2001-01-01', '2000-01-01', "
|
| 181 |
+
"'2001-01-01']\n"
|
| 182 |
+
"Length: 1000, dtype: period[D]"
|
| 183 |
+
)
|
| 184 |
+
assert result == expected
|
vllm/lib/python3.10/site-packages/pandas/tests/arrays/test_timedeltas.py
ADDED
|
@@ -0,0 +1,313 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import timedelta
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
import pandas as pd
|
| 7 |
+
from pandas import Timedelta
|
| 8 |
+
import pandas._testing as tm
|
| 9 |
+
from pandas.core.arrays import (
|
| 10 |
+
DatetimeArray,
|
| 11 |
+
TimedeltaArray,
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestNonNano:
|
| 16 |
+
@pytest.fixture(params=["s", "ms", "us"])
|
| 17 |
+
def unit(self, request):
|
| 18 |
+
return request.param
|
| 19 |
+
|
| 20 |
+
@pytest.fixture
|
| 21 |
+
def tda(self, unit):
|
| 22 |
+
arr = np.arange(5, dtype=np.int64).view(f"m8[{unit}]")
|
| 23 |
+
return TimedeltaArray._simple_new(arr, dtype=arr.dtype)
|
| 24 |
+
|
| 25 |
+
def test_non_nano(self, unit):
|
| 26 |
+
arr = np.arange(5, dtype=np.int64).view(f"m8[{unit}]")
|
| 27 |
+
tda = TimedeltaArray._simple_new(arr, dtype=arr.dtype)
|
| 28 |
+
|
| 29 |
+
assert tda.dtype == arr.dtype
|
| 30 |
+
assert tda[0].unit == unit
|
| 31 |
+
|
| 32 |
+
def test_as_unit_raises(self, tda):
|
| 33 |
+
# GH#50616
|
| 34 |
+
with pytest.raises(ValueError, match="Supported units"):
|
| 35 |
+
tda.as_unit("D")
|
| 36 |
+
|
| 37 |
+
tdi = pd.Index(tda)
|
| 38 |
+
with pytest.raises(ValueError, match="Supported units"):
|
| 39 |
+
tdi.as_unit("D")
|
| 40 |
+
|
| 41 |
+
@pytest.mark.parametrize("field", TimedeltaArray._field_ops)
|
| 42 |
+
def test_fields(self, tda, field):
|
| 43 |
+
as_nano = tda._ndarray.astype("m8[ns]")
|
| 44 |
+
tda_nano = TimedeltaArray._simple_new(as_nano, dtype=as_nano.dtype)
|
| 45 |
+
|
| 46 |
+
result = getattr(tda, field)
|
| 47 |
+
expected = getattr(tda_nano, field)
|
| 48 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 49 |
+
|
| 50 |
+
def test_to_pytimedelta(self, tda):
|
| 51 |
+
as_nano = tda._ndarray.astype("m8[ns]")
|
| 52 |
+
tda_nano = TimedeltaArray._simple_new(as_nano, dtype=as_nano.dtype)
|
| 53 |
+
|
| 54 |
+
result = tda.to_pytimedelta()
|
| 55 |
+
expected = tda_nano.to_pytimedelta()
|
| 56 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 57 |
+
|
| 58 |
+
def test_total_seconds(self, unit, tda):
|
| 59 |
+
as_nano = tda._ndarray.astype("m8[ns]")
|
| 60 |
+
tda_nano = TimedeltaArray._simple_new(as_nano, dtype=as_nano.dtype)
|
| 61 |
+
|
| 62 |
+
result = tda.total_seconds()
|
| 63 |
+
expected = tda_nano.total_seconds()
|
| 64 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 65 |
+
|
| 66 |
+
def test_timedelta_array_total_seconds(self):
|
| 67 |
+
# GH34290
|
| 68 |
+
expected = Timedelta("2 min").total_seconds()
|
| 69 |
+
|
| 70 |
+
result = pd.array([Timedelta("2 min")]).total_seconds()[0]
|
| 71 |
+
assert result == expected
|
| 72 |
+
|
| 73 |
+
def test_total_seconds_nanoseconds(self):
|
| 74 |
+
# issue #48521
|
| 75 |
+
start_time = pd.Series(["2145-11-02 06:00:00"]).astype("datetime64[ns]")
|
| 76 |
+
end_time = pd.Series(["2145-11-02 07:06:00"]).astype("datetime64[ns]")
|
| 77 |
+
expected = (end_time - start_time).values / np.timedelta64(1, "s")
|
| 78 |
+
result = (end_time - start_time).dt.total_seconds().values
|
| 79 |
+
assert result == expected
|
| 80 |
+
|
| 81 |
+
@pytest.mark.parametrize(
|
| 82 |
+
"nat", [np.datetime64("NaT", "ns"), np.datetime64("NaT", "us")]
|
| 83 |
+
)
|
| 84 |
+
def test_add_nat_datetimelike_scalar(self, nat, tda):
|
| 85 |
+
result = tda + nat
|
| 86 |
+
assert isinstance(result, DatetimeArray)
|
| 87 |
+
assert result._creso == tda._creso
|
| 88 |
+
assert result.isna().all()
|
| 89 |
+
|
| 90 |
+
result = nat + tda
|
| 91 |
+
assert isinstance(result, DatetimeArray)
|
| 92 |
+
assert result._creso == tda._creso
|
| 93 |
+
assert result.isna().all()
|
| 94 |
+
|
| 95 |
+
def test_add_pdnat(self, tda):
|
| 96 |
+
result = tda + pd.NaT
|
| 97 |
+
assert isinstance(result, TimedeltaArray)
|
| 98 |
+
assert result._creso == tda._creso
|
| 99 |
+
assert result.isna().all()
|
| 100 |
+
|
| 101 |
+
result = pd.NaT + tda
|
| 102 |
+
assert isinstance(result, TimedeltaArray)
|
| 103 |
+
assert result._creso == tda._creso
|
| 104 |
+
assert result.isna().all()
|
| 105 |
+
|
| 106 |
+
# TODO: 2022-07-11 this is the only test that gets to DTA.tz_convert
|
| 107 |
+
# or tz_localize with non-nano; implement tests specific to that.
|
| 108 |
+
def test_add_datetimelike_scalar(self, tda, tz_naive_fixture):
|
| 109 |
+
ts = pd.Timestamp("2016-01-01", tz=tz_naive_fixture).as_unit("ns")
|
| 110 |
+
|
| 111 |
+
expected = tda.as_unit("ns") + ts
|
| 112 |
+
res = tda + ts
|
| 113 |
+
tm.assert_extension_array_equal(res, expected)
|
| 114 |
+
res = ts + tda
|
| 115 |
+
tm.assert_extension_array_equal(res, expected)
|
| 116 |
+
|
| 117 |
+
ts += Timedelta(1) # case where we can't cast losslessly
|
| 118 |
+
|
| 119 |
+
exp_values = tda._ndarray + ts.asm8
|
| 120 |
+
expected = (
|
| 121 |
+
DatetimeArray._simple_new(exp_values, dtype=exp_values.dtype)
|
| 122 |
+
.tz_localize("UTC")
|
| 123 |
+
.tz_convert(ts.tz)
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
result = tda + ts
|
| 127 |
+
tm.assert_extension_array_equal(result, expected)
|
| 128 |
+
|
| 129 |
+
result = ts + tda
|
| 130 |
+
tm.assert_extension_array_equal(result, expected)
|
| 131 |
+
|
| 132 |
+
def test_mul_scalar(self, tda):
|
| 133 |
+
other = 2
|
| 134 |
+
result = tda * other
|
| 135 |
+
expected = TimedeltaArray._simple_new(tda._ndarray * other, dtype=tda.dtype)
|
| 136 |
+
tm.assert_extension_array_equal(result, expected)
|
| 137 |
+
assert result._creso == tda._creso
|
| 138 |
+
|
| 139 |
+
def test_mul_listlike(self, tda):
|
| 140 |
+
other = np.arange(len(tda))
|
| 141 |
+
result = tda * other
|
| 142 |
+
expected = TimedeltaArray._simple_new(tda._ndarray * other, dtype=tda.dtype)
|
| 143 |
+
tm.assert_extension_array_equal(result, expected)
|
| 144 |
+
assert result._creso == tda._creso
|
| 145 |
+
|
| 146 |
+
def test_mul_listlike_object(self, tda):
|
| 147 |
+
other = np.arange(len(tda))
|
| 148 |
+
result = tda * other.astype(object)
|
| 149 |
+
expected = TimedeltaArray._simple_new(tda._ndarray * other, dtype=tda.dtype)
|
| 150 |
+
tm.assert_extension_array_equal(result, expected)
|
| 151 |
+
assert result._creso == tda._creso
|
| 152 |
+
|
| 153 |
+
def test_div_numeric_scalar(self, tda):
|
| 154 |
+
other = 2
|
| 155 |
+
result = tda / other
|
| 156 |
+
expected = TimedeltaArray._simple_new(tda._ndarray / other, dtype=tda.dtype)
|
| 157 |
+
tm.assert_extension_array_equal(result, expected)
|
| 158 |
+
assert result._creso == tda._creso
|
| 159 |
+
|
| 160 |
+
def test_div_td_scalar(self, tda):
|
| 161 |
+
other = timedelta(seconds=1)
|
| 162 |
+
result = tda / other
|
| 163 |
+
expected = tda._ndarray / np.timedelta64(1, "s")
|
| 164 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 165 |
+
|
| 166 |
+
def test_div_numeric_array(self, tda):
|
| 167 |
+
other = np.arange(len(tda))
|
| 168 |
+
result = tda / other
|
| 169 |
+
expected = TimedeltaArray._simple_new(tda._ndarray / other, dtype=tda.dtype)
|
| 170 |
+
tm.assert_extension_array_equal(result, expected)
|
| 171 |
+
assert result._creso == tda._creso
|
| 172 |
+
|
| 173 |
+
def test_div_td_array(self, tda):
|
| 174 |
+
other = tda._ndarray + tda._ndarray[-1]
|
| 175 |
+
result = tda / other
|
| 176 |
+
expected = tda._ndarray / other
|
| 177 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 178 |
+
|
| 179 |
+
def test_add_timedeltaarraylike(self, tda):
|
| 180 |
+
tda_nano = tda.astype("m8[ns]")
|
| 181 |
+
|
| 182 |
+
expected = tda_nano * 2
|
| 183 |
+
res = tda_nano + tda
|
| 184 |
+
tm.assert_extension_array_equal(res, expected)
|
| 185 |
+
res = tda + tda_nano
|
| 186 |
+
tm.assert_extension_array_equal(res, expected)
|
| 187 |
+
|
| 188 |
+
expected = tda_nano * 0
|
| 189 |
+
res = tda - tda_nano
|
| 190 |
+
tm.assert_extension_array_equal(res, expected)
|
| 191 |
+
|
| 192 |
+
res = tda_nano - tda
|
| 193 |
+
tm.assert_extension_array_equal(res, expected)
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
class TestTimedeltaArray:
|
| 197 |
+
@pytest.mark.parametrize("dtype", [int, np.int32, np.int64, "uint32", "uint64"])
|
| 198 |
+
def test_astype_int(self, dtype):
|
| 199 |
+
arr = TimedeltaArray._from_sequence(
|
| 200 |
+
[Timedelta("1h"), Timedelta("2h")], dtype="m8[ns]"
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
+
if np.dtype(dtype) != np.int64:
|
| 204 |
+
with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"):
|
| 205 |
+
arr.astype(dtype)
|
| 206 |
+
return
|
| 207 |
+
|
| 208 |
+
result = arr.astype(dtype)
|
| 209 |
+
expected = arr._ndarray.view("i8")
|
| 210 |
+
tm.assert_numpy_array_equal(result, expected)
|
| 211 |
+
|
| 212 |
+
def test_setitem_clears_freq(self):
|
| 213 |
+
a = pd.timedelta_range("1h", periods=2, freq="h")._data
|
| 214 |
+
a[0] = Timedelta("1h")
|
| 215 |
+
assert a.freq is None
|
| 216 |
+
|
| 217 |
+
@pytest.mark.parametrize(
|
| 218 |
+
"obj",
|
| 219 |
+
[
|
| 220 |
+
Timedelta(seconds=1),
|
| 221 |
+
Timedelta(seconds=1).to_timedelta64(),
|
| 222 |
+
Timedelta(seconds=1).to_pytimedelta(),
|
| 223 |
+
],
|
| 224 |
+
)
|
| 225 |
+
def test_setitem_objects(self, obj):
|
| 226 |
+
# make sure we accept timedelta64 and timedelta in addition to Timedelta
|
| 227 |
+
tdi = pd.timedelta_range("2 Days", periods=4, freq="h")
|
| 228 |
+
arr = tdi._data
|
| 229 |
+
|
| 230 |
+
arr[0] = obj
|
| 231 |
+
assert arr[0] == Timedelta(seconds=1)
|
| 232 |
+
|
| 233 |
+
@pytest.mark.parametrize(
|
| 234 |
+
"other",
|
| 235 |
+
[
|
| 236 |
+
1,
|
| 237 |
+
np.int64(1),
|
| 238 |
+
1.0,
|
| 239 |
+
np.datetime64("NaT"),
|
| 240 |
+
pd.Timestamp("2021-01-01"),
|
| 241 |
+
"invalid",
|
| 242 |
+
np.arange(10, dtype="i8") * 24 * 3600 * 10**9,
|
| 243 |
+
(np.arange(10) * 24 * 3600 * 10**9).view("datetime64[ns]"),
|
| 244 |
+
pd.Timestamp("2021-01-01").to_period("D"),
|
| 245 |
+
],
|
| 246 |
+
)
|
| 247 |
+
@pytest.mark.parametrize("index", [True, False])
|
| 248 |
+
def test_searchsorted_invalid_types(self, other, index):
|
| 249 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
| 250 |
+
arr = pd.TimedeltaIndex(data, freq="D")._data
|
| 251 |
+
if index:
|
| 252 |
+
arr = pd.Index(arr)
|
| 253 |
+
|
| 254 |
+
msg = "|".join(
|
| 255 |
+
[
|
| 256 |
+
"searchsorted requires compatible dtype or scalar",
|
| 257 |
+
"value should be a 'Timedelta', 'NaT', or array of those. Got",
|
| 258 |
+
]
|
| 259 |
+
)
|
| 260 |
+
with pytest.raises(TypeError, match=msg):
|
| 261 |
+
arr.searchsorted(other)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
class TestUnaryOps:
|
| 265 |
+
def test_abs(self):
|
| 266 |
+
vals = np.array([-3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
| 267 |
+
arr = TimedeltaArray._from_sequence(vals)
|
| 268 |
+
|
| 269 |
+
evals = np.array([3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
| 270 |
+
expected = TimedeltaArray._from_sequence(evals)
|
| 271 |
+
|
| 272 |
+
result = abs(arr)
|
| 273 |
+
tm.assert_timedelta_array_equal(result, expected)
|
| 274 |
+
|
| 275 |
+
result2 = np.abs(arr)
|
| 276 |
+
tm.assert_timedelta_array_equal(result2, expected)
|
| 277 |
+
|
| 278 |
+
def test_pos(self):
|
| 279 |
+
vals = np.array([-3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
| 280 |
+
arr = TimedeltaArray._from_sequence(vals)
|
| 281 |
+
|
| 282 |
+
result = +arr
|
| 283 |
+
tm.assert_timedelta_array_equal(result, arr)
|
| 284 |
+
assert not tm.shares_memory(result, arr)
|
| 285 |
+
|
| 286 |
+
result2 = np.positive(arr)
|
| 287 |
+
tm.assert_timedelta_array_equal(result2, arr)
|
| 288 |
+
assert not tm.shares_memory(result2, arr)
|
| 289 |
+
|
| 290 |
+
def test_neg(self):
|
| 291 |
+
vals = np.array([-3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
| 292 |
+
arr = TimedeltaArray._from_sequence(vals)
|
| 293 |
+
|
| 294 |
+
evals = np.array([3600 * 10**9, "NaT", -7200 * 10**9], dtype="m8[ns]")
|
| 295 |
+
expected = TimedeltaArray._from_sequence(evals)
|
| 296 |
+
|
| 297 |
+
result = -arr
|
| 298 |
+
tm.assert_timedelta_array_equal(result, expected)
|
| 299 |
+
|
| 300 |
+
result2 = np.negative(arr)
|
| 301 |
+
tm.assert_timedelta_array_equal(result2, expected)
|
| 302 |
+
|
| 303 |
+
def test_neg_freq(self):
|
| 304 |
+
tdi = pd.timedelta_range("2 Days", periods=4, freq="h")
|
| 305 |
+
arr = tdi._data
|
| 306 |
+
|
| 307 |
+
expected = -tdi._data
|
| 308 |
+
|
| 309 |
+
result = -arr
|
| 310 |
+
tm.assert_timedelta_array_equal(result, expected)
|
| 311 |
+
|
| 312 |
+
result2 = np.negative(arr)
|
| 313 |
+
tm.assert_timedelta_array_equal(result2, expected)
|
vllm/lib/python3.10/site-packages/pandas/tests/config/__init__.py
ADDED
|
File without changes
|
vllm/lib/python3.10/site-packages/pandas/tests/config/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (169 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/config/__pycache__/test_config.cpython-310.pyc
ADDED
|
Binary file (12.9 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/config/__pycache__/test_localization.cpython-310.pyc
ADDED
|
Binary file (3.69 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/config/test_config.py
ADDED
|
@@ -0,0 +1,437 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from pandas._config import config as cf
|
| 4 |
+
from pandas._config.config import OptionError
|
| 5 |
+
|
| 6 |
+
import pandas as pd
|
| 7 |
+
import pandas._testing as tm
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestConfig:
|
| 11 |
+
@pytest.fixture(autouse=True)
|
| 12 |
+
def clean_config(self, monkeypatch):
|
| 13 |
+
with monkeypatch.context() as m:
|
| 14 |
+
m.setattr(cf, "_global_config", {})
|
| 15 |
+
m.setattr(cf, "options", cf.DictWrapper(cf._global_config))
|
| 16 |
+
m.setattr(cf, "_deprecated_options", {})
|
| 17 |
+
m.setattr(cf, "_registered_options", {})
|
| 18 |
+
|
| 19 |
+
# Our test fixture in conftest.py sets "chained_assignment"
|
| 20 |
+
# to "raise" only after all test methods have been setup.
|
| 21 |
+
# However, after this setup, there is no longer any
|
| 22 |
+
# "chained_assignment" option, so re-register it.
|
| 23 |
+
cf.register_option("chained_assignment", "raise")
|
| 24 |
+
yield
|
| 25 |
+
|
| 26 |
+
def test_api(self):
|
| 27 |
+
# the pandas object exposes the user API
|
| 28 |
+
assert hasattr(pd, "get_option")
|
| 29 |
+
assert hasattr(pd, "set_option")
|
| 30 |
+
assert hasattr(pd, "reset_option")
|
| 31 |
+
assert hasattr(pd, "describe_option")
|
| 32 |
+
|
| 33 |
+
def test_is_one_of_factory(self):
|
| 34 |
+
v = cf.is_one_of_factory([None, 12])
|
| 35 |
+
|
| 36 |
+
v(12)
|
| 37 |
+
v(None)
|
| 38 |
+
msg = r"Value must be one of None\|12"
|
| 39 |
+
with pytest.raises(ValueError, match=msg):
|
| 40 |
+
v(1.1)
|
| 41 |
+
|
| 42 |
+
def test_register_option(self):
|
| 43 |
+
cf.register_option("a", 1, "doc")
|
| 44 |
+
|
| 45 |
+
# can't register an already registered option
|
| 46 |
+
msg = "Option 'a' has already been registered"
|
| 47 |
+
with pytest.raises(OptionError, match=msg):
|
| 48 |
+
cf.register_option("a", 1, "doc")
|
| 49 |
+
|
| 50 |
+
# can't register an already registered option
|
| 51 |
+
msg = "Path prefix to option 'a' is already an option"
|
| 52 |
+
with pytest.raises(OptionError, match=msg):
|
| 53 |
+
cf.register_option("a.b.c.d1", 1, "doc")
|
| 54 |
+
with pytest.raises(OptionError, match=msg):
|
| 55 |
+
cf.register_option("a.b.c.d2", 1, "doc")
|
| 56 |
+
|
| 57 |
+
# no python keywords
|
| 58 |
+
msg = "for is a python keyword"
|
| 59 |
+
with pytest.raises(ValueError, match=msg):
|
| 60 |
+
cf.register_option("for", 0)
|
| 61 |
+
with pytest.raises(ValueError, match=msg):
|
| 62 |
+
cf.register_option("a.for.b", 0)
|
| 63 |
+
# must be valid identifier (ensure attribute access works)
|
| 64 |
+
msg = "oh my goddess! is not a valid identifier"
|
| 65 |
+
with pytest.raises(ValueError, match=msg):
|
| 66 |
+
cf.register_option("Oh my Goddess!", 0)
|
| 67 |
+
|
| 68 |
+
# we can register options several levels deep
|
| 69 |
+
# without predefining the intermediate steps
|
| 70 |
+
# and we can define differently named options
|
| 71 |
+
# in the same namespace
|
| 72 |
+
cf.register_option("k.b.c.d1", 1, "doc")
|
| 73 |
+
cf.register_option("k.b.c.d2", 1, "doc")
|
| 74 |
+
|
| 75 |
+
def test_describe_option(self):
|
| 76 |
+
cf.register_option("a", 1, "doc")
|
| 77 |
+
cf.register_option("b", 1, "doc2")
|
| 78 |
+
cf.deprecate_option("b")
|
| 79 |
+
|
| 80 |
+
cf.register_option("c.d.e1", 1, "doc3")
|
| 81 |
+
cf.register_option("c.d.e2", 1, "doc4")
|
| 82 |
+
cf.register_option("f", 1)
|
| 83 |
+
cf.register_option("g.h", 1)
|
| 84 |
+
cf.register_option("k", 2)
|
| 85 |
+
cf.deprecate_option("g.h", rkey="k")
|
| 86 |
+
cf.register_option("l", "foo")
|
| 87 |
+
|
| 88 |
+
# non-existent keys raise KeyError
|
| 89 |
+
msg = r"No such keys\(s\)"
|
| 90 |
+
with pytest.raises(OptionError, match=msg):
|
| 91 |
+
cf.describe_option("no.such.key")
|
| 92 |
+
|
| 93 |
+
# we can get the description for any key we registered
|
| 94 |
+
assert "doc" in cf.describe_option("a", _print_desc=False)
|
| 95 |
+
assert "doc2" in cf.describe_option("b", _print_desc=False)
|
| 96 |
+
assert "precated" in cf.describe_option("b", _print_desc=False)
|
| 97 |
+
assert "doc3" in cf.describe_option("c.d.e1", _print_desc=False)
|
| 98 |
+
assert "doc4" in cf.describe_option("c.d.e2", _print_desc=False)
|
| 99 |
+
|
| 100 |
+
# if no doc is specified we get a default message
|
| 101 |
+
# saying "description not available"
|
| 102 |
+
assert "available" in cf.describe_option("f", _print_desc=False)
|
| 103 |
+
assert "available" in cf.describe_option("g.h", _print_desc=False)
|
| 104 |
+
assert "precated" in cf.describe_option("g.h", _print_desc=False)
|
| 105 |
+
assert "k" in cf.describe_option("g.h", _print_desc=False)
|
| 106 |
+
|
| 107 |
+
# default is reported
|
| 108 |
+
assert "foo" in cf.describe_option("l", _print_desc=False)
|
| 109 |
+
# current value is reported
|
| 110 |
+
assert "bar" not in cf.describe_option("l", _print_desc=False)
|
| 111 |
+
cf.set_option("l", "bar")
|
| 112 |
+
assert "bar" in cf.describe_option("l", _print_desc=False)
|
| 113 |
+
|
| 114 |
+
def test_case_insensitive(self):
|
| 115 |
+
cf.register_option("KanBAN", 1, "doc")
|
| 116 |
+
|
| 117 |
+
assert "doc" in cf.describe_option("kanbaN", _print_desc=False)
|
| 118 |
+
assert cf.get_option("kanBaN") == 1
|
| 119 |
+
cf.set_option("KanBan", 2)
|
| 120 |
+
assert cf.get_option("kAnBaN") == 2
|
| 121 |
+
|
| 122 |
+
# gets of non-existent keys fail
|
| 123 |
+
msg = r"No such keys\(s\): 'no_such_option'"
|
| 124 |
+
with pytest.raises(OptionError, match=msg):
|
| 125 |
+
cf.get_option("no_such_option")
|
| 126 |
+
cf.deprecate_option("KanBan")
|
| 127 |
+
|
| 128 |
+
assert cf._is_deprecated("kAnBaN")
|
| 129 |
+
|
| 130 |
+
def test_get_option(self):
|
| 131 |
+
cf.register_option("a", 1, "doc")
|
| 132 |
+
cf.register_option("b.c", "hullo", "doc2")
|
| 133 |
+
cf.register_option("b.b", None, "doc2")
|
| 134 |
+
|
| 135 |
+
# gets of existing keys succeed
|
| 136 |
+
assert cf.get_option("a") == 1
|
| 137 |
+
assert cf.get_option("b.c") == "hullo"
|
| 138 |
+
assert cf.get_option("b.b") is None
|
| 139 |
+
|
| 140 |
+
# gets of non-existent keys fail
|
| 141 |
+
msg = r"No such keys\(s\): 'no_such_option'"
|
| 142 |
+
with pytest.raises(OptionError, match=msg):
|
| 143 |
+
cf.get_option("no_such_option")
|
| 144 |
+
|
| 145 |
+
def test_set_option(self):
|
| 146 |
+
cf.register_option("a", 1, "doc")
|
| 147 |
+
cf.register_option("b.c", "hullo", "doc2")
|
| 148 |
+
cf.register_option("b.b", None, "doc2")
|
| 149 |
+
|
| 150 |
+
assert cf.get_option("a") == 1
|
| 151 |
+
assert cf.get_option("b.c") == "hullo"
|
| 152 |
+
assert cf.get_option("b.b") is None
|
| 153 |
+
|
| 154 |
+
cf.set_option("a", 2)
|
| 155 |
+
cf.set_option("b.c", "wurld")
|
| 156 |
+
cf.set_option("b.b", 1.1)
|
| 157 |
+
|
| 158 |
+
assert cf.get_option("a") == 2
|
| 159 |
+
assert cf.get_option("b.c") == "wurld"
|
| 160 |
+
assert cf.get_option("b.b") == 1.1
|
| 161 |
+
|
| 162 |
+
msg = r"No such keys\(s\): 'no.such.key'"
|
| 163 |
+
with pytest.raises(OptionError, match=msg):
|
| 164 |
+
cf.set_option("no.such.key", None)
|
| 165 |
+
|
| 166 |
+
def test_set_option_empty_args(self):
|
| 167 |
+
msg = "Must provide an even number of non-keyword arguments"
|
| 168 |
+
with pytest.raises(ValueError, match=msg):
|
| 169 |
+
cf.set_option()
|
| 170 |
+
|
| 171 |
+
def test_set_option_uneven_args(self):
|
| 172 |
+
msg = "Must provide an even number of non-keyword arguments"
|
| 173 |
+
with pytest.raises(ValueError, match=msg):
|
| 174 |
+
cf.set_option("a.b", 2, "b.c")
|
| 175 |
+
|
| 176 |
+
def test_set_option_invalid_single_argument_type(self):
|
| 177 |
+
msg = "Must provide an even number of non-keyword arguments"
|
| 178 |
+
with pytest.raises(ValueError, match=msg):
|
| 179 |
+
cf.set_option(2)
|
| 180 |
+
|
| 181 |
+
def test_set_option_multiple(self):
|
| 182 |
+
cf.register_option("a", 1, "doc")
|
| 183 |
+
cf.register_option("b.c", "hullo", "doc2")
|
| 184 |
+
cf.register_option("b.b", None, "doc2")
|
| 185 |
+
|
| 186 |
+
assert cf.get_option("a") == 1
|
| 187 |
+
assert cf.get_option("b.c") == "hullo"
|
| 188 |
+
assert cf.get_option("b.b") is None
|
| 189 |
+
|
| 190 |
+
cf.set_option("a", "2", "b.c", None, "b.b", 10.0)
|
| 191 |
+
|
| 192 |
+
assert cf.get_option("a") == "2"
|
| 193 |
+
assert cf.get_option("b.c") is None
|
| 194 |
+
assert cf.get_option("b.b") == 10.0
|
| 195 |
+
|
| 196 |
+
def test_validation(self):
|
| 197 |
+
cf.register_option("a", 1, "doc", validator=cf.is_int)
|
| 198 |
+
cf.register_option("d", 1, "doc", validator=cf.is_nonnegative_int)
|
| 199 |
+
cf.register_option("b.c", "hullo", "doc2", validator=cf.is_text)
|
| 200 |
+
|
| 201 |
+
msg = "Value must have type '<class 'int'>'"
|
| 202 |
+
with pytest.raises(ValueError, match=msg):
|
| 203 |
+
cf.register_option("a.b.c.d2", "NO", "doc", validator=cf.is_int)
|
| 204 |
+
|
| 205 |
+
cf.set_option("a", 2) # int is_int
|
| 206 |
+
cf.set_option("b.c", "wurld") # str is_str
|
| 207 |
+
cf.set_option("d", 2)
|
| 208 |
+
cf.set_option("d", None) # non-negative int can be None
|
| 209 |
+
|
| 210 |
+
# None not is_int
|
| 211 |
+
with pytest.raises(ValueError, match=msg):
|
| 212 |
+
cf.set_option("a", None)
|
| 213 |
+
with pytest.raises(ValueError, match=msg):
|
| 214 |
+
cf.set_option("a", "ab")
|
| 215 |
+
|
| 216 |
+
msg = "Value must be a nonnegative integer or None"
|
| 217 |
+
with pytest.raises(ValueError, match=msg):
|
| 218 |
+
cf.register_option("a.b.c.d3", "NO", "doc", validator=cf.is_nonnegative_int)
|
| 219 |
+
with pytest.raises(ValueError, match=msg):
|
| 220 |
+
cf.register_option("a.b.c.d3", -2, "doc", validator=cf.is_nonnegative_int)
|
| 221 |
+
|
| 222 |
+
msg = r"Value must be an instance of <class 'str'>\|<class 'bytes'>"
|
| 223 |
+
with pytest.raises(ValueError, match=msg):
|
| 224 |
+
cf.set_option("b.c", 1)
|
| 225 |
+
|
| 226 |
+
validator = cf.is_one_of_factory([None, cf.is_callable])
|
| 227 |
+
cf.register_option("b", lambda: None, "doc", validator=validator)
|
| 228 |
+
# pylint: disable-next=consider-using-f-string
|
| 229 |
+
cf.set_option("b", "%.1f".format) # Formatter is callable
|
| 230 |
+
cf.set_option("b", None) # Formatter is none (default)
|
| 231 |
+
with pytest.raises(ValueError, match="Value must be a callable"):
|
| 232 |
+
cf.set_option("b", "%.1f")
|
| 233 |
+
|
| 234 |
+
def test_reset_option(self):
|
| 235 |
+
cf.register_option("a", 1, "doc", validator=cf.is_int)
|
| 236 |
+
cf.register_option("b.c", "hullo", "doc2", validator=cf.is_str)
|
| 237 |
+
assert cf.get_option("a") == 1
|
| 238 |
+
assert cf.get_option("b.c") == "hullo"
|
| 239 |
+
|
| 240 |
+
cf.set_option("a", 2)
|
| 241 |
+
cf.set_option("b.c", "wurld")
|
| 242 |
+
assert cf.get_option("a") == 2
|
| 243 |
+
assert cf.get_option("b.c") == "wurld"
|
| 244 |
+
|
| 245 |
+
cf.reset_option("a")
|
| 246 |
+
assert cf.get_option("a") == 1
|
| 247 |
+
assert cf.get_option("b.c") == "wurld"
|
| 248 |
+
cf.reset_option("b.c")
|
| 249 |
+
assert cf.get_option("a") == 1
|
| 250 |
+
assert cf.get_option("b.c") == "hullo"
|
| 251 |
+
|
| 252 |
+
def test_reset_option_all(self):
|
| 253 |
+
cf.register_option("a", 1, "doc", validator=cf.is_int)
|
| 254 |
+
cf.register_option("b.c", "hullo", "doc2", validator=cf.is_str)
|
| 255 |
+
assert cf.get_option("a") == 1
|
| 256 |
+
assert cf.get_option("b.c") == "hullo"
|
| 257 |
+
|
| 258 |
+
cf.set_option("a", 2)
|
| 259 |
+
cf.set_option("b.c", "wurld")
|
| 260 |
+
assert cf.get_option("a") == 2
|
| 261 |
+
assert cf.get_option("b.c") == "wurld"
|
| 262 |
+
|
| 263 |
+
cf.reset_option("all")
|
| 264 |
+
assert cf.get_option("a") == 1
|
| 265 |
+
assert cf.get_option("b.c") == "hullo"
|
| 266 |
+
|
| 267 |
+
def test_deprecate_option(self):
|
| 268 |
+
# we can deprecate non-existent options
|
| 269 |
+
cf.deprecate_option("foo")
|
| 270 |
+
|
| 271 |
+
assert cf._is_deprecated("foo")
|
| 272 |
+
with tm.assert_produces_warning(FutureWarning, match="deprecated"):
|
| 273 |
+
with pytest.raises(KeyError, match="No such keys.s.: 'foo'"):
|
| 274 |
+
cf.get_option("foo")
|
| 275 |
+
|
| 276 |
+
cf.register_option("a", 1, "doc", validator=cf.is_int)
|
| 277 |
+
cf.register_option("b.c", "hullo", "doc2")
|
| 278 |
+
cf.register_option("foo", "hullo", "doc2")
|
| 279 |
+
|
| 280 |
+
cf.deprecate_option("a", removal_ver="nifty_ver")
|
| 281 |
+
with tm.assert_produces_warning(FutureWarning, match="eprecated.*nifty_ver"):
|
| 282 |
+
cf.get_option("a")
|
| 283 |
+
|
| 284 |
+
msg = "Option 'a' has already been defined as deprecated"
|
| 285 |
+
with pytest.raises(OptionError, match=msg):
|
| 286 |
+
cf.deprecate_option("a")
|
| 287 |
+
|
| 288 |
+
cf.deprecate_option("b.c", "zounds!")
|
| 289 |
+
with tm.assert_produces_warning(FutureWarning, match="zounds!"):
|
| 290 |
+
cf.get_option("b.c")
|
| 291 |
+
|
| 292 |
+
# test rerouting keys
|
| 293 |
+
cf.register_option("d.a", "foo", "doc2")
|
| 294 |
+
cf.register_option("d.dep", "bar", "doc2")
|
| 295 |
+
assert cf.get_option("d.a") == "foo"
|
| 296 |
+
assert cf.get_option("d.dep") == "bar"
|
| 297 |
+
|
| 298 |
+
cf.deprecate_option("d.dep", rkey="d.a") # reroute d.dep to d.a
|
| 299 |
+
with tm.assert_produces_warning(FutureWarning, match="eprecated"):
|
| 300 |
+
assert cf.get_option("d.dep") == "foo"
|
| 301 |
+
|
| 302 |
+
with tm.assert_produces_warning(FutureWarning, match="eprecated"):
|
| 303 |
+
cf.set_option("d.dep", "baz") # should overwrite "d.a"
|
| 304 |
+
|
| 305 |
+
with tm.assert_produces_warning(FutureWarning, match="eprecated"):
|
| 306 |
+
assert cf.get_option("d.dep") == "baz"
|
| 307 |
+
|
| 308 |
+
def test_config_prefix(self):
|
| 309 |
+
with cf.config_prefix("base"):
|
| 310 |
+
cf.register_option("a", 1, "doc1")
|
| 311 |
+
cf.register_option("b", 2, "doc2")
|
| 312 |
+
assert cf.get_option("a") == 1
|
| 313 |
+
assert cf.get_option("b") == 2
|
| 314 |
+
|
| 315 |
+
cf.set_option("a", 3)
|
| 316 |
+
cf.set_option("b", 4)
|
| 317 |
+
assert cf.get_option("a") == 3
|
| 318 |
+
assert cf.get_option("b") == 4
|
| 319 |
+
|
| 320 |
+
assert cf.get_option("base.a") == 3
|
| 321 |
+
assert cf.get_option("base.b") == 4
|
| 322 |
+
assert "doc1" in cf.describe_option("base.a", _print_desc=False)
|
| 323 |
+
assert "doc2" in cf.describe_option("base.b", _print_desc=False)
|
| 324 |
+
|
| 325 |
+
cf.reset_option("base.a")
|
| 326 |
+
cf.reset_option("base.b")
|
| 327 |
+
|
| 328 |
+
with cf.config_prefix("base"):
|
| 329 |
+
assert cf.get_option("a") == 1
|
| 330 |
+
assert cf.get_option("b") == 2
|
| 331 |
+
|
| 332 |
+
def test_callback(self):
|
| 333 |
+
k = [None]
|
| 334 |
+
v = [None]
|
| 335 |
+
|
| 336 |
+
def callback(key):
|
| 337 |
+
k.append(key)
|
| 338 |
+
v.append(cf.get_option(key))
|
| 339 |
+
|
| 340 |
+
cf.register_option("d.a", "foo", cb=callback)
|
| 341 |
+
cf.register_option("d.b", "foo", cb=callback)
|
| 342 |
+
|
| 343 |
+
del k[-1], v[-1]
|
| 344 |
+
cf.set_option("d.a", "fooz")
|
| 345 |
+
assert k[-1] == "d.a"
|
| 346 |
+
assert v[-1] == "fooz"
|
| 347 |
+
|
| 348 |
+
del k[-1], v[-1]
|
| 349 |
+
cf.set_option("d.b", "boo")
|
| 350 |
+
assert k[-1] == "d.b"
|
| 351 |
+
assert v[-1] == "boo"
|
| 352 |
+
|
| 353 |
+
del k[-1], v[-1]
|
| 354 |
+
cf.reset_option("d.b")
|
| 355 |
+
assert k[-1] == "d.b"
|
| 356 |
+
|
| 357 |
+
def test_set_ContextManager(self):
|
| 358 |
+
def eq(val):
|
| 359 |
+
assert cf.get_option("a") == val
|
| 360 |
+
|
| 361 |
+
cf.register_option("a", 0)
|
| 362 |
+
eq(0)
|
| 363 |
+
with cf.option_context("a", 15):
|
| 364 |
+
eq(15)
|
| 365 |
+
with cf.option_context("a", 25):
|
| 366 |
+
eq(25)
|
| 367 |
+
eq(15)
|
| 368 |
+
eq(0)
|
| 369 |
+
|
| 370 |
+
cf.set_option("a", 17)
|
| 371 |
+
eq(17)
|
| 372 |
+
|
| 373 |
+
# Test that option_context can be used as a decorator too (#34253).
|
| 374 |
+
@cf.option_context("a", 123)
|
| 375 |
+
def f():
|
| 376 |
+
eq(123)
|
| 377 |
+
|
| 378 |
+
f()
|
| 379 |
+
|
| 380 |
+
def test_attribute_access(self):
|
| 381 |
+
holder = []
|
| 382 |
+
|
| 383 |
+
def f3(key):
|
| 384 |
+
holder.append(True)
|
| 385 |
+
|
| 386 |
+
cf.register_option("a", 0)
|
| 387 |
+
cf.register_option("c", 0, cb=f3)
|
| 388 |
+
options = cf.options
|
| 389 |
+
|
| 390 |
+
assert options.a == 0
|
| 391 |
+
with cf.option_context("a", 15):
|
| 392 |
+
assert options.a == 15
|
| 393 |
+
|
| 394 |
+
options.a = 500
|
| 395 |
+
assert cf.get_option("a") == 500
|
| 396 |
+
|
| 397 |
+
cf.reset_option("a")
|
| 398 |
+
assert options.a == cf.get_option("a", 0)
|
| 399 |
+
|
| 400 |
+
msg = "You can only set the value of existing options"
|
| 401 |
+
with pytest.raises(OptionError, match=msg):
|
| 402 |
+
options.b = 1
|
| 403 |
+
with pytest.raises(OptionError, match=msg):
|
| 404 |
+
options.display = 1
|
| 405 |
+
|
| 406 |
+
# make sure callback kicks when using this form of setting
|
| 407 |
+
options.c = 1
|
| 408 |
+
assert len(holder) == 1
|
| 409 |
+
|
| 410 |
+
def test_option_context_scope(self):
|
| 411 |
+
# Ensure that creating a context does not affect the existing
|
| 412 |
+
# environment as it is supposed to be used with the `with` statement.
|
| 413 |
+
# See https://github.com/pandas-dev/pandas/issues/8514
|
| 414 |
+
|
| 415 |
+
original_value = 60
|
| 416 |
+
context_value = 10
|
| 417 |
+
option_name = "a"
|
| 418 |
+
|
| 419 |
+
cf.register_option(option_name, original_value)
|
| 420 |
+
|
| 421 |
+
# Ensure creating contexts didn't affect the current context.
|
| 422 |
+
ctx = cf.option_context(option_name, context_value)
|
| 423 |
+
assert cf.get_option(option_name) == original_value
|
| 424 |
+
|
| 425 |
+
# Ensure the correct value is available inside the context.
|
| 426 |
+
with ctx:
|
| 427 |
+
assert cf.get_option(option_name) == context_value
|
| 428 |
+
|
| 429 |
+
# Ensure the current context is reset
|
| 430 |
+
assert cf.get_option(option_name) == original_value
|
| 431 |
+
|
| 432 |
+
def test_dictwrapper_getattr(self):
|
| 433 |
+
options = cf.options
|
| 434 |
+
# GH 19789
|
| 435 |
+
with pytest.raises(OptionError, match="No such option"):
|
| 436 |
+
options.bananas
|
| 437 |
+
assert not hasattr(options, "bananas")
|
vllm/lib/python3.10/site-packages/pandas/tests/config/test_localization.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import codecs
|
| 2 |
+
import locale
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
from pandas._config.localization import (
|
| 8 |
+
can_set_locale,
|
| 9 |
+
get_locales,
|
| 10 |
+
set_locale,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
from pandas.compat import ISMUSL
|
| 14 |
+
|
| 15 |
+
import pandas as pd
|
| 16 |
+
|
| 17 |
+
_all_locales = get_locales()
|
| 18 |
+
_current_locale = locale.setlocale(locale.LC_ALL) # getlocale() is wrong, see GH#46595
|
| 19 |
+
|
| 20 |
+
# Don't run any of these tests if we have no locales.
|
| 21 |
+
pytestmark = pytest.mark.skipif(not _all_locales, reason="Need locales")
|
| 22 |
+
|
| 23 |
+
_skip_if_only_one_locale = pytest.mark.skipif(
|
| 24 |
+
len(_all_locales) <= 1, reason="Need multiple locales for meaningful test"
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _get_current_locale(lc_var: int = locale.LC_ALL) -> str:
|
| 29 |
+
# getlocale is not always compliant with setlocale, use setlocale. GH#46595
|
| 30 |
+
return locale.setlocale(lc_var)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.mark.parametrize("lc_var", (locale.LC_ALL, locale.LC_CTYPE, locale.LC_TIME))
|
| 34 |
+
def test_can_set_current_locale(lc_var):
|
| 35 |
+
# Can set the current locale
|
| 36 |
+
before_locale = _get_current_locale(lc_var)
|
| 37 |
+
assert can_set_locale(before_locale, lc_var=lc_var)
|
| 38 |
+
after_locale = _get_current_locale(lc_var)
|
| 39 |
+
assert before_locale == after_locale
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@pytest.mark.parametrize("lc_var", (locale.LC_ALL, locale.LC_CTYPE, locale.LC_TIME))
|
| 43 |
+
def test_can_set_locale_valid_set(lc_var):
|
| 44 |
+
# Can set the default locale.
|
| 45 |
+
before_locale = _get_current_locale(lc_var)
|
| 46 |
+
assert can_set_locale("", lc_var=lc_var)
|
| 47 |
+
after_locale = _get_current_locale(lc_var)
|
| 48 |
+
assert before_locale == after_locale
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@pytest.mark.parametrize(
|
| 52 |
+
"lc_var",
|
| 53 |
+
(
|
| 54 |
+
locale.LC_ALL,
|
| 55 |
+
locale.LC_CTYPE,
|
| 56 |
+
pytest.param(
|
| 57 |
+
locale.LC_TIME,
|
| 58 |
+
marks=pytest.mark.skipif(
|
| 59 |
+
ISMUSL, reason="MUSL allows setting invalid LC_TIME."
|
| 60 |
+
),
|
| 61 |
+
),
|
| 62 |
+
),
|
| 63 |
+
)
|
| 64 |
+
def test_can_set_locale_invalid_set(lc_var):
|
| 65 |
+
# Cannot set an invalid locale.
|
| 66 |
+
before_locale = _get_current_locale(lc_var)
|
| 67 |
+
assert not can_set_locale("non-existent_locale", lc_var=lc_var)
|
| 68 |
+
after_locale = _get_current_locale(lc_var)
|
| 69 |
+
assert before_locale == after_locale
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@pytest.mark.parametrize(
|
| 73 |
+
"lang,enc",
|
| 74 |
+
[
|
| 75 |
+
("it_CH", "UTF-8"),
|
| 76 |
+
("en_US", "ascii"),
|
| 77 |
+
("zh_CN", "GB2312"),
|
| 78 |
+
("it_IT", "ISO-8859-1"),
|
| 79 |
+
],
|
| 80 |
+
)
|
| 81 |
+
@pytest.mark.parametrize("lc_var", (locale.LC_ALL, locale.LC_CTYPE, locale.LC_TIME))
|
| 82 |
+
def test_can_set_locale_no_leak(lang, enc, lc_var):
|
| 83 |
+
# Test that can_set_locale does not leak even when returning False. See GH#46595
|
| 84 |
+
before_locale = _get_current_locale(lc_var)
|
| 85 |
+
can_set_locale((lang, enc), locale.LC_ALL)
|
| 86 |
+
after_locale = _get_current_locale(lc_var)
|
| 87 |
+
assert before_locale == after_locale
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def test_can_set_locale_invalid_get(monkeypatch):
|
| 91 |
+
# see GH#22129
|
| 92 |
+
# In some cases, an invalid locale can be set,
|
| 93 |
+
# but a subsequent getlocale() raises a ValueError.
|
| 94 |
+
|
| 95 |
+
def mock_get_locale():
|
| 96 |
+
raise ValueError()
|
| 97 |
+
|
| 98 |
+
with monkeypatch.context() as m:
|
| 99 |
+
m.setattr(locale, "getlocale", mock_get_locale)
|
| 100 |
+
assert not can_set_locale("")
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def test_get_locales_at_least_one():
|
| 104 |
+
# see GH#9744
|
| 105 |
+
assert len(_all_locales) > 0
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
@_skip_if_only_one_locale
|
| 109 |
+
def test_get_locales_prefix():
|
| 110 |
+
first_locale = _all_locales[0]
|
| 111 |
+
assert len(get_locales(prefix=first_locale[:2])) > 0
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
@_skip_if_only_one_locale
|
| 115 |
+
@pytest.mark.parametrize(
|
| 116 |
+
"lang,enc",
|
| 117 |
+
[
|
| 118 |
+
("it_CH", "UTF-8"),
|
| 119 |
+
("en_US", "ascii"),
|
| 120 |
+
("zh_CN", "GB2312"),
|
| 121 |
+
("it_IT", "ISO-8859-1"),
|
| 122 |
+
],
|
| 123 |
+
)
|
| 124 |
+
def test_set_locale(lang, enc):
|
| 125 |
+
before_locale = _get_current_locale()
|
| 126 |
+
|
| 127 |
+
enc = codecs.lookup(enc).name
|
| 128 |
+
new_locale = lang, enc
|
| 129 |
+
|
| 130 |
+
if not can_set_locale(new_locale):
|
| 131 |
+
msg = "unsupported locale setting"
|
| 132 |
+
|
| 133 |
+
with pytest.raises(locale.Error, match=msg):
|
| 134 |
+
with set_locale(new_locale):
|
| 135 |
+
pass
|
| 136 |
+
else:
|
| 137 |
+
with set_locale(new_locale) as normalized_locale:
|
| 138 |
+
new_lang, new_enc = normalized_locale.split(".")
|
| 139 |
+
new_enc = codecs.lookup(enc).name
|
| 140 |
+
|
| 141 |
+
normalized_locale = new_lang, new_enc
|
| 142 |
+
assert normalized_locale == new_locale
|
| 143 |
+
|
| 144 |
+
# Once we exit the "with" statement, locale should be back to what it was.
|
| 145 |
+
after_locale = _get_current_locale()
|
| 146 |
+
assert before_locale == after_locale
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def test_encoding_detected():
|
| 150 |
+
system_locale = os.environ.get("LC_ALL")
|
| 151 |
+
system_encoding = system_locale.split(".")[-1] if system_locale else "utf-8"
|
| 152 |
+
|
| 153 |
+
assert (
|
| 154 |
+
codecs.lookup(pd.options.display.encoding).name
|
| 155 |
+
== codecs.lookup(system_encoding).name
|
| 156 |
+
)
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__init__.py
ADDED
|
File without changes
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (172 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_array.cpython-310.pyc
ADDED
|
Binary file (4.77 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_astype.cpython-310.pyc
ADDED
|
Binary file (6.74 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_chained_assignment_deprecation.cpython-310.pyc
ADDED
|
Binary file (4.03 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_clip.cpython-310.pyc
ADDED
|
Binary file (3.05 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_constructors.cpython-310.pyc
ADDED
|
Binary file (9.8 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_core_functionalities.cpython-310.pyc
ADDED
|
Binary file (3.11 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_functions.cpython-310.pyc
ADDED
|
Binary file (10.1 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_indexing.cpython-310.pyc
ADDED
|
Binary file (27 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_internals.cpython-310.pyc
ADDED
|
Binary file (3.72 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_interp_fillna.cpython-310.pyc
ADDED
|
Binary file (11.7 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_methods.cpython-310.pyc
ADDED
|
Binary file (52 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_replace.cpython-310.pyc
ADDED
|
Binary file (12.6 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_setitem.cpython-310.pyc
ADDED
|
Binary file (3.74 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/test_util.cpython-310.pyc
ADDED
|
Binary file (696 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/__pycache__/util.cpython-310.pyc
ADDED
|
Binary file (976 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__init__.py
ADDED
|
File without changes
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (178 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_datetimeindex.cpython-310.pyc
ADDED
|
Binary file (2.29 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_index.cpython-310.pyc
ADDED
|
Binary file (5.88 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_periodindex.cpython-310.pyc
ADDED
|
Binary file (1.05 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/__pycache__/test_timedeltaindex.cpython-310.pyc
ADDED
|
Binary file (1.04 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pandas/tests/copy_view/index/test_datetimeindex.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from pandas import (
|
| 4 |
+
DatetimeIndex,
|
| 5 |
+
Series,
|
| 6 |
+
Timestamp,
|
| 7 |
+
date_range,
|
| 8 |
+
)
|
| 9 |
+
import pandas._testing as tm
|
| 10 |
+
|
| 11 |
+
pytestmark = pytest.mark.filterwarnings(
|
| 12 |
+
"ignore:Setting a value on a view:FutureWarning"
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@pytest.mark.parametrize(
|
| 17 |
+
"cons",
|
| 18 |
+
[
|
| 19 |
+
lambda x: DatetimeIndex(x),
|
| 20 |
+
lambda x: DatetimeIndex(DatetimeIndex(x)),
|
| 21 |
+
],
|
| 22 |
+
)
|
| 23 |
+
def test_datetimeindex(using_copy_on_write, cons):
|
| 24 |
+
dt = date_range("2019-12-31", periods=3, freq="D")
|
| 25 |
+
ser = Series(dt)
|
| 26 |
+
idx = cons(ser)
|
| 27 |
+
expected = idx.copy(deep=True)
|
| 28 |
+
ser.iloc[0] = Timestamp("2020-12-31")
|
| 29 |
+
if using_copy_on_write:
|
| 30 |
+
tm.assert_index_equal(idx, expected)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def test_datetimeindex_tz_convert(using_copy_on_write):
|
| 34 |
+
dt = date_range("2019-12-31", periods=3, freq="D", tz="Europe/Berlin")
|
| 35 |
+
ser = Series(dt)
|
| 36 |
+
idx = DatetimeIndex(ser).tz_convert("US/Eastern")
|
| 37 |
+
expected = idx.copy(deep=True)
|
| 38 |
+
ser.iloc[0] = Timestamp("2020-12-31", tz="Europe/Berlin")
|
| 39 |
+
if using_copy_on_write:
|
| 40 |
+
tm.assert_index_equal(idx, expected)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def test_datetimeindex_tz_localize(using_copy_on_write):
|
| 44 |
+
dt = date_range("2019-12-31", periods=3, freq="D")
|
| 45 |
+
ser = Series(dt)
|
| 46 |
+
idx = DatetimeIndex(ser).tz_localize("Europe/Berlin")
|
| 47 |
+
expected = idx.copy(deep=True)
|
| 48 |
+
ser.iloc[0] = Timestamp("2020-12-31")
|
| 49 |
+
if using_copy_on_write:
|
| 50 |
+
tm.assert_index_equal(idx, expected)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def test_datetimeindex_isocalendar(using_copy_on_write):
|
| 54 |
+
dt = date_range("2019-12-31", periods=3, freq="D")
|
| 55 |
+
ser = Series(dt)
|
| 56 |
+
df = DatetimeIndex(ser).isocalendar()
|
| 57 |
+
expected = df.index.copy(deep=True)
|
| 58 |
+
ser.iloc[0] = Timestamp("2020-12-31")
|
| 59 |
+
if using_copy_on_write:
|
| 60 |
+
tm.assert_index_equal(df.index, expected)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def test_index_values(using_copy_on_write):
|
| 64 |
+
idx = date_range("2019-12-31", periods=3, freq="D")
|
| 65 |
+
result = idx.values
|
| 66 |
+
if using_copy_on_write:
|
| 67 |
+
assert result.flags.writeable is False
|
| 68 |
+
else:
|
| 69 |
+
assert result.flags.writeable is True
|