ZTWHHH commited on
Commit
90878cc
·
verified ·
1 Parent(s): 07b7a81

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_aggregation.cpython-310.pyc +0 -0
  3. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_algos.cpython-310.pyc +0 -0
  4. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_downstream.cpython-310.pyc +0 -0
  5. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_errors.cpython-310.pyc +0 -0
  6. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_expressions.cpython-310.pyc +0 -0
  7. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_flags.cpython-310.pyc +0 -0
  8. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_multilevel.cpython-310.pyc +0 -0
  9. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_nanops.cpython-310.pyc +0 -0
  10. videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_sorting.cpython-310.pyc +0 -0
  11. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__init__.py +0 -0
  12. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc +0 -0
  13. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/common.cpython-310.pyc +0 -0
  14. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/conftest.cpython-310.pyc +0 -0
  15. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply.cpython-310.pyc +0 -0
  16. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc +0 -0
  17. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc +0 -0
  18. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc +0 -0
  19. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc +0 -0
  20. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc +0 -0
  21. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc +0 -0
  22. videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_str.cpython-310.pyc +0 -0
  23. videochat2/lib/python3.10/site-packages/pandas/tests/apply/common.py +7 -0
  24. videochat2/lib/python3.10/site-packages/pandas/tests/apply/conftest.py +18 -0
  25. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply.py +1644 -0
  26. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply_relabeling.py +101 -0
  27. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_frame_transform.py +242 -0
  28. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_invalid_arg.py +365 -0
  29. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_series_apply.py +956 -0
  30. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_series_apply_relabeling.py +33 -0
  31. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_series_transform.py +49 -0
  32. videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_str.py +297 -0
  33. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__init__.py +0 -0
  34. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/__init__.cpython-310.pyc +0 -0
  35. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/common.cpython-310.pyc +0 -0
  36. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/conftest.cpython-310.pyc +0 -0
  37. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_array_ops.cpython-310.pyc +0 -0
  38. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_categorical.cpython-310.pyc +0 -0
  39. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_datetime64.cpython-310.pyc +0 -0
  40. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_interval.cpython-310.pyc +0 -0
  41. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_numeric.cpython-310.pyc +0 -0
  42. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_object.cpython-310.pyc +0 -0
  43. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_period.cpython-310.pyc +0 -0
  44. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_timedelta64.cpython-310.pyc +0 -0
  45. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/common.py +155 -0
  46. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/conftest.py +228 -0
  47. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_array_ops.py +39 -0
  48. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_categorical.py +25 -0
  49. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_datetime64.py +2475 -0
  50. videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_interval.py +306 -0
.gitattributes CHANGED
@@ -1285,3 +1285,4 @@ videochat2/lib/python3.10/site-packages/pandas/io/sas/_sas.cpython-310-x86_64-li
1285
  videochat2/lib/python3.10/site-packages/pandas/io/__pycache__/pytables.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1286
  videochat2/lib/python3.10/site-packages/pandas/io/__pycache__/stata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1287
  videochat2/lib/python3.10/site-packages/pandas/tests/frame/__pycache__/test_constructors.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
 
 
1285
  videochat2/lib/python3.10/site-packages/pandas/io/__pycache__/pytables.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1286
  videochat2/lib/python3.10/site-packages/pandas/io/__pycache__/stata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1287
  videochat2/lib/python3.10/site-packages/pandas/tests/frame/__pycache__/test_constructors.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1288
+ videochat2/lib/python3.10/site-packages/pandas/tests/io/formats/__pycache__/test_format.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_aggregation.cpython-310.pyc ADDED
Binary file (3.07 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_algos.cpython-310.pyc ADDED
Binary file (60.5 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_downstream.cpython-310.pyc ADDED
Binary file (6.91 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_errors.cpython-310.pyc ADDED
Binary file (3.02 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_expressions.cpython-310.pyc ADDED
Binary file (11.6 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_flags.cpython-310.pyc ADDED
Binary file (1.88 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_multilevel.cpython-310.pyc ADDED
Binary file (8.98 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_nanops.cpython-310.pyc ADDED
Binary file (33.5 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/__pycache__/test_sorting.cpython-310.pyc ADDED
Binary file (15 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__init__.py ADDED
File without changes
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (174 Bytes). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/common.cpython-310.pyc ADDED
Binary file (547 Bytes). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (667 Bytes). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply.cpython-310.pyc ADDED
Binary file (53.7 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc ADDED
Binary file (2.83 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc ADDED
Binary file (7.15 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc ADDED
Binary file (12.8 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc ADDED
Binary file (30.5 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc ADDED
Binary file (1.15 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc ADDED
Binary file (1.8 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_str.cpython-310.pyc ADDED
Binary file (6.2 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/apply/common.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from pandas.core.groupby.base import transformation_kernels
2
+
3
+ # There is no Series.cumcount or DataFrame.cumcount
4
+ series_transform_kernels = [
5
+ x for x in sorted(transformation_kernels) if x != "cumcount"
6
+ ]
7
+ frame_transform_kernels = [x for x in sorted(transformation_kernels) if x != "cumcount"]
videochat2/lib/python3.10/site-packages/pandas/tests/apply/conftest.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import DataFrame
5
+
6
+
7
+ @pytest.fixture
8
+ def int_frame_const_col():
9
+ """
10
+ Fixture for DataFrame of ints which are constant per column
11
+
12
+ Columns are ['A', 'B', 'C'], with values (per column): [1, 2, 3]
13
+ """
14
+ df = DataFrame(
15
+ np.tile(np.arange(3, dtype="int64"), 6).reshape(6, -1) + 1,
16
+ columns=["A", "B", "C"],
17
+ )
18
+ return df
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply.py ADDED
@@ -0,0 +1,1644 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ import warnings
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas.core.dtypes.dtypes import CategoricalDtype
8
+
9
+ import pandas as pd
10
+ from pandas import (
11
+ DataFrame,
12
+ MultiIndex,
13
+ Series,
14
+ Timestamp,
15
+ date_range,
16
+ )
17
+ import pandas._testing as tm
18
+ from pandas.tests.frame.common import zip_frames
19
+
20
+
21
+ def test_apply(float_frame):
22
+ with np.errstate(all="ignore"):
23
+ # ufunc
24
+ result = np.sqrt(float_frame["A"])
25
+ expected = float_frame.apply(np.sqrt)["A"]
26
+ tm.assert_series_equal(result, expected)
27
+
28
+ # aggregator
29
+ result = float_frame.apply(np.mean)["A"]
30
+ expected = np.mean(float_frame["A"])
31
+ assert result == expected
32
+
33
+ d = float_frame.index[0]
34
+ result = float_frame.apply(np.mean, axis=1)
35
+ expected = np.mean(float_frame.xs(d))
36
+ assert result[d] == expected
37
+ assert result.index is float_frame.index
38
+
39
+
40
+ def test_apply_categorical_func():
41
+ # GH 9573
42
+ df = DataFrame({"c0": ["A", "A", "B", "B"], "c1": ["C", "C", "D", "D"]})
43
+ result = df.apply(lambda ts: ts.astype("category"))
44
+
45
+ assert result.shape == (4, 2)
46
+ assert isinstance(result["c0"].dtype, CategoricalDtype)
47
+ assert isinstance(result["c1"].dtype, CategoricalDtype)
48
+
49
+
50
+ def test_apply_axis1_with_ea():
51
+ # GH#36785
52
+ expected = DataFrame({"A": [Timestamp("2013-01-01", tz="UTC")]})
53
+ result = expected.apply(lambda x: x, axis=1)
54
+ tm.assert_frame_equal(result, expected)
55
+
56
+
57
+ @pytest.mark.parametrize(
58
+ "data, dtype",
59
+ [(1, None), (1, CategoricalDtype([1])), (Timestamp("2013-01-01", tz="UTC"), None)],
60
+ )
61
+ def test_agg_axis1_duplicate_index(data, dtype):
62
+ # GH 42380
63
+ expected = DataFrame([[data], [data]], index=["a", "a"], dtype=dtype)
64
+ result = expected.agg(lambda x: x, axis=1)
65
+ tm.assert_frame_equal(result, expected)
66
+
67
+
68
+ def test_apply_mixed_datetimelike():
69
+ # mixed datetimelike
70
+ # GH 7778
71
+ expected = DataFrame(
72
+ {
73
+ "A": date_range("20130101", periods=3),
74
+ "B": pd.to_timedelta(np.arange(3), unit="s"),
75
+ }
76
+ )
77
+ result = expected.apply(lambda x: x, axis=1)
78
+ tm.assert_frame_equal(result, expected)
79
+
80
+
81
+ @pytest.mark.parametrize("func", [np.sqrt, np.mean])
82
+ def test_apply_empty(func):
83
+ # empty
84
+ empty_frame = DataFrame()
85
+
86
+ result = empty_frame.apply(func)
87
+ assert result.empty
88
+
89
+
90
+ def test_apply_float_frame(float_frame):
91
+ no_rows = float_frame[:0]
92
+ result = no_rows.apply(lambda x: x.mean())
93
+ expected = Series(np.nan, index=float_frame.columns)
94
+ tm.assert_series_equal(result, expected)
95
+
96
+ no_cols = float_frame.loc[:, []]
97
+ result = no_cols.apply(lambda x: x.mean(), axis=1)
98
+ expected = Series(np.nan, index=float_frame.index)
99
+ tm.assert_series_equal(result, expected)
100
+
101
+
102
+ def test_apply_empty_except_index():
103
+ # GH 2476
104
+ expected = DataFrame(index=["a"])
105
+ result = expected.apply(lambda x: x["a"], axis=1)
106
+ tm.assert_frame_equal(result, expected)
107
+
108
+
109
+ def test_apply_with_reduce_empty():
110
+ # reduce with an empty DataFrame
111
+ empty_frame = DataFrame()
112
+
113
+ x = []
114
+ result = empty_frame.apply(x.append, axis=1, result_type="expand")
115
+ tm.assert_frame_equal(result, empty_frame)
116
+ result = empty_frame.apply(x.append, axis=1, result_type="reduce")
117
+ expected = Series([], dtype=np.float64)
118
+ tm.assert_series_equal(result, expected)
119
+
120
+ empty_with_cols = DataFrame(columns=["a", "b", "c"])
121
+ result = empty_with_cols.apply(x.append, axis=1, result_type="expand")
122
+ tm.assert_frame_equal(result, empty_with_cols)
123
+ result = empty_with_cols.apply(x.append, axis=1, result_type="reduce")
124
+ expected = Series([], dtype=np.float64)
125
+ tm.assert_series_equal(result, expected)
126
+
127
+ # Ensure that x.append hasn't been called
128
+ assert x == []
129
+
130
+
131
+ @pytest.mark.parametrize("func", ["sum", "prod", "any", "all"])
132
+ def test_apply_funcs_over_empty(func):
133
+ # GH 28213
134
+ df = DataFrame(columns=["a", "b", "c"])
135
+
136
+ result = df.apply(getattr(np, func))
137
+ expected = getattr(df, func)()
138
+ if func in ("sum", "prod"):
139
+ expected = expected.astype(float)
140
+ tm.assert_series_equal(result, expected)
141
+
142
+
143
+ def test_nunique_empty():
144
+ # GH 28213
145
+ df = DataFrame(columns=["a", "b", "c"])
146
+
147
+ result = df.nunique()
148
+ expected = Series(0, index=df.columns)
149
+ tm.assert_series_equal(result, expected)
150
+
151
+ result = df.T.nunique()
152
+ expected = Series([], dtype=np.float64)
153
+ tm.assert_series_equal(result, expected)
154
+
155
+
156
+ def test_apply_standard_nonunique():
157
+ df = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]], index=["a", "a", "c"])
158
+
159
+ result = df.apply(lambda s: s[0], axis=1)
160
+ expected = Series([1, 4, 7], ["a", "a", "c"])
161
+ tm.assert_series_equal(result, expected)
162
+
163
+ result = df.T.apply(lambda s: s[0], axis=0)
164
+ tm.assert_series_equal(result, expected)
165
+
166
+
167
+ def test_apply_broadcast_scalars(float_frame):
168
+ # scalars
169
+ result = float_frame.apply(np.mean, result_type="broadcast")
170
+ expected = DataFrame([float_frame.mean()], index=float_frame.index)
171
+ tm.assert_frame_equal(result, expected)
172
+
173
+
174
+ def test_apply_broadcast_scalars_axis1(float_frame):
175
+ result = float_frame.apply(np.mean, axis=1, result_type="broadcast")
176
+ m = float_frame.mean(axis=1)
177
+ expected = DataFrame({c: m for c in float_frame.columns})
178
+ tm.assert_frame_equal(result, expected)
179
+
180
+
181
+ def test_apply_broadcast_lists_columns(float_frame):
182
+ # lists
183
+ result = float_frame.apply(
184
+ lambda x: list(range(len(float_frame.columns))),
185
+ axis=1,
186
+ result_type="broadcast",
187
+ )
188
+ m = list(range(len(float_frame.columns)))
189
+ expected = DataFrame(
190
+ [m] * len(float_frame.index),
191
+ dtype="float64",
192
+ index=float_frame.index,
193
+ columns=float_frame.columns,
194
+ )
195
+ tm.assert_frame_equal(result, expected)
196
+
197
+
198
+ def test_apply_broadcast_lists_index(float_frame):
199
+ result = float_frame.apply(
200
+ lambda x: list(range(len(float_frame.index))), result_type="broadcast"
201
+ )
202
+ m = list(range(len(float_frame.index)))
203
+ expected = DataFrame(
204
+ {c: m for c in float_frame.columns},
205
+ dtype="float64",
206
+ index=float_frame.index,
207
+ )
208
+ tm.assert_frame_equal(result, expected)
209
+
210
+
211
+ def test_apply_broadcast_list_lambda_func(int_frame_const_col):
212
+ # preserve columns
213
+ df = int_frame_const_col
214
+ result = df.apply(lambda x: [1, 2, 3], axis=1, result_type="broadcast")
215
+ tm.assert_frame_equal(result, df)
216
+
217
+
218
+ def test_apply_broadcast_series_lambda_func(int_frame_const_col):
219
+ df = int_frame_const_col
220
+ result = df.apply(
221
+ lambda x: Series([1, 2, 3], index=list("abc")),
222
+ axis=1,
223
+ result_type="broadcast",
224
+ )
225
+ expected = df.copy()
226
+ tm.assert_frame_equal(result, expected)
227
+
228
+
229
+ @pytest.mark.parametrize("axis", [0, 1])
230
+ def test_apply_raw_float_frame(float_frame, axis):
231
+ def _assert_raw(x):
232
+ assert isinstance(x, np.ndarray)
233
+ assert x.ndim == 1
234
+
235
+ float_frame.apply(_assert_raw, axis=axis, raw=True)
236
+
237
+
238
+ @pytest.mark.parametrize("axis", [0, 1])
239
+ def test_apply_raw_float_frame_lambda(float_frame, axis):
240
+ result = float_frame.apply(np.mean, axis=axis, raw=True)
241
+ expected = float_frame.apply(lambda x: x.values.mean(), axis=axis)
242
+ tm.assert_series_equal(result, expected)
243
+
244
+
245
+ def test_apply_raw_float_frame_no_reduction(float_frame):
246
+ # no reduction
247
+ result = float_frame.apply(lambda x: x * 2, raw=True)
248
+ expected = float_frame * 2
249
+ tm.assert_frame_equal(result, expected)
250
+
251
+
252
+ @pytest.mark.parametrize("axis", [0, 1])
253
+ def test_apply_raw_mixed_type_frame(mixed_type_frame, axis):
254
+ def _assert_raw(x):
255
+ assert isinstance(x, np.ndarray)
256
+ assert x.ndim == 1
257
+
258
+ # Mixed dtype (GH-32423)
259
+ mixed_type_frame.apply(_assert_raw, axis=axis, raw=True)
260
+
261
+
262
+ def test_apply_axis1(float_frame):
263
+ d = float_frame.index[0]
264
+ result = float_frame.apply(np.mean, axis=1)[d]
265
+ expected = np.mean(float_frame.xs(d))
266
+ assert result == expected
267
+
268
+
269
+ def test_apply_mixed_dtype_corner():
270
+ df = DataFrame({"A": ["foo"], "B": [1.0]})
271
+ result = df[:0].apply(np.mean, axis=1)
272
+ # the result here is actually kind of ambiguous, should it be a Series
273
+ # or a DataFrame?
274
+ expected = Series(np.nan, index=pd.Index([], dtype="int64"))
275
+ tm.assert_series_equal(result, expected)
276
+
277
+
278
+ def test_apply_mixed_dtype_corner_indexing():
279
+ df = DataFrame({"A": ["foo"], "B": [1.0]})
280
+ result = df.apply(lambda x: x["A"], axis=1)
281
+ expected = Series(["foo"], index=[0])
282
+ tm.assert_series_equal(result, expected)
283
+
284
+ result = df.apply(lambda x: x["B"], axis=1)
285
+ expected = Series([1.0], index=[0])
286
+ tm.assert_series_equal(result, expected)
287
+
288
+
289
+ @pytest.mark.parametrize("ax", ["index", "columns"])
290
+ @pytest.mark.parametrize(
291
+ "func", [lambda x: x, lambda x: x.mean()], ids=["identity", "mean"]
292
+ )
293
+ @pytest.mark.parametrize("raw", [True, False])
294
+ @pytest.mark.parametrize("axis", [0, 1])
295
+ def test_apply_empty_infer_type(ax, func, raw, axis):
296
+ df = DataFrame(**{ax: ["a", "b", "c"]})
297
+
298
+ with np.errstate(all="ignore"):
299
+ with warnings.catch_warnings(record=True):
300
+ warnings.simplefilter("ignore", RuntimeWarning)
301
+ test_res = func(np.array([], dtype="f8"))
302
+ is_reduction = not isinstance(test_res, np.ndarray)
303
+
304
+ result = df.apply(func, axis=axis, raw=raw)
305
+ if is_reduction:
306
+ agg_axis = df._get_agg_axis(axis)
307
+ assert isinstance(result, Series)
308
+ assert result.index is agg_axis
309
+ else:
310
+ assert isinstance(result, DataFrame)
311
+
312
+
313
+ def test_apply_empty_infer_type_broadcast():
314
+ no_cols = DataFrame(index=["a", "b", "c"])
315
+ result = no_cols.apply(lambda x: x.mean(), result_type="broadcast")
316
+ assert isinstance(result, DataFrame)
317
+
318
+
319
+ def test_apply_with_args_kwds_add_some(float_frame):
320
+ def add_some(x, howmuch=0):
321
+ return x + howmuch
322
+
323
+ result = float_frame.apply(add_some, howmuch=2)
324
+ expected = float_frame.apply(lambda x: x + 2)
325
+ tm.assert_frame_equal(result, expected)
326
+
327
+
328
+ def test_apply_with_args_kwds_agg_and_add(float_frame):
329
+ def agg_and_add(x, howmuch=0):
330
+ return x.mean() + howmuch
331
+
332
+ result = float_frame.apply(agg_and_add, howmuch=2)
333
+ expected = float_frame.apply(lambda x: x.mean() + 2)
334
+ tm.assert_series_equal(result, expected)
335
+
336
+
337
+ def test_apply_with_args_kwds_subtract_and_divide(float_frame):
338
+ def subtract_and_divide(x, sub, divide=1):
339
+ return (x - sub) / divide
340
+
341
+ result = float_frame.apply(subtract_and_divide, args=(2,), divide=2)
342
+ expected = float_frame.apply(lambda x: (x - 2.0) / 2.0)
343
+ tm.assert_frame_equal(result, expected)
344
+
345
+
346
+ def test_apply_yield_list(float_frame):
347
+ result = float_frame.apply(list)
348
+ tm.assert_frame_equal(result, float_frame)
349
+
350
+
351
+ def test_apply_reduce_Series(float_frame):
352
+ float_frame.iloc[::2, float_frame.columns.get_loc("A")] = np.nan
353
+ expected = float_frame.mean(1)
354
+ result = float_frame.apply(np.mean, axis=1)
355
+ tm.assert_series_equal(result, expected)
356
+
357
+
358
+ def test_apply_reduce_to_dict():
359
+ # GH 25196 37544
360
+ data = DataFrame([[1, 2], [3, 4]], columns=["c0", "c1"], index=["i0", "i1"])
361
+
362
+ result = data.apply(dict, axis=0)
363
+ expected = Series([{"i0": 1, "i1": 3}, {"i0": 2, "i1": 4}], index=data.columns)
364
+ tm.assert_series_equal(result, expected)
365
+
366
+ result = data.apply(dict, axis=1)
367
+ expected = Series([{"c0": 1, "c1": 2}, {"c0": 3, "c1": 4}], index=data.index)
368
+ tm.assert_series_equal(result, expected)
369
+
370
+
371
+ def test_apply_differently_indexed():
372
+ df = DataFrame(np.random.randn(20, 10))
373
+
374
+ result = df.apply(Series.describe, axis=0)
375
+ expected = DataFrame({i: v.describe() for i, v in df.items()}, columns=df.columns)
376
+ tm.assert_frame_equal(result, expected)
377
+
378
+ result = df.apply(Series.describe, axis=1)
379
+ expected = DataFrame({i: v.describe() for i, v in df.T.items()}, columns=df.index).T
380
+ tm.assert_frame_equal(result, expected)
381
+
382
+
383
+ def test_apply_bug():
384
+ # GH 6125
385
+ positions = DataFrame(
386
+ [
387
+ [1, "ABC0", 50],
388
+ [1, "YUM0", 20],
389
+ [1, "DEF0", 20],
390
+ [2, "ABC1", 50],
391
+ [2, "YUM1", 20],
392
+ [2, "DEF1", 20],
393
+ ],
394
+ columns=["a", "market", "position"],
395
+ )
396
+
397
+ def f(r):
398
+ return r["market"]
399
+
400
+ expected = positions.apply(f, axis=1)
401
+
402
+ positions = DataFrame(
403
+ [
404
+ [datetime(2013, 1, 1), "ABC0", 50],
405
+ [datetime(2013, 1, 2), "YUM0", 20],
406
+ [datetime(2013, 1, 3), "DEF0", 20],
407
+ [datetime(2013, 1, 4), "ABC1", 50],
408
+ [datetime(2013, 1, 5), "YUM1", 20],
409
+ [datetime(2013, 1, 6), "DEF1", 20],
410
+ ],
411
+ columns=["a", "market", "position"],
412
+ )
413
+ result = positions.apply(f, axis=1)
414
+ tm.assert_series_equal(result, expected)
415
+
416
+
417
+ def test_apply_convert_objects():
418
+ expected = DataFrame(
419
+ {
420
+ "A": [
421
+ "foo",
422
+ "foo",
423
+ "foo",
424
+ "foo",
425
+ "bar",
426
+ "bar",
427
+ "bar",
428
+ "bar",
429
+ "foo",
430
+ "foo",
431
+ "foo",
432
+ ],
433
+ "B": [
434
+ "one",
435
+ "one",
436
+ "one",
437
+ "two",
438
+ "one",
439
+ "one",
440
+ "one",
441
+ "two",
442
+ "two",
443
+ "two",
444
+ "one",
445
+ ],
446
+ "C": [
447
+ "dull",
448
+ "dull",
449
+ "shiny",
450
+ "dull",
451
+ "dull",
452
+ "shiny",
453
+ "shiny",
454
+ "dull",
455
+ "shiny",
456
+ "shiny",
457
+ "shiny",
458
+ ],
459
+ "D": np.random.randn(11),
460
+ "E": np.random.randn(11),
461
+ "F": np.random.randn(11),
462
+ }
463
+ )
464
+
465
+ result = expected.apply(lambda x: x, axis=1)
466
+ tm.assert_frame_equal(result, expected)
467
+
468
+
469
+ def test_apply_attach_name(float_frame):
470
+ result = float_frame.apply(lambda x: x.name)
471
+ expected = Series(float_frame.columns, index=float_frame.columns)
472
+ tm.assert_series_equal(result, expected)
473
+
474
+
475
+ def test_apply_attach_name_axis1(float_frame):
476
+ result = float_frame.apply(lambda x: x.name, axis=1)
477
+ expected = Series(float_frame.index, index=float_frame.index)
478
+ tm.assert_series_equal(result, expected)
479
+
480
+
481
+ def test_apply_attach_name_non_reduction(float_frame):
482
+ # non-reductions
483
+ result = float_frame.apply(lambda x: np.repeat(x.name, len(x)))
484
+ expected = DataFrame(
485
+ np.tile(float_frame.columns, (len(float_frame.index), 1)),
486
+ index=float_frame.index,
487
+ columns=float_frame.columns,
488
+ )
489
+ tm.assert_frame_equal(result, expected)
490
+
491
+
492
+ def test_apply_attach_name_non_reduction_axis1(float_frame):
493
+ result = float_frame.apply(lambda x: np.repeat(x.name, len(x)), axis=1)
494
+ expected = Series(
495
+ np.repeat(t[0], len(float_frame.columns)) for t in float_frame.itertuples()
496
+ )
497
+ expected.index = float_frame.index
498
+ tm.assert_series_equal(result, expected)
499
+
500
+
501
+ def test_apply_multi_index():
502
+ index = MultiIndex.from_arrays([["a", "a", "b"], ["c", "d", "d"]])
503
+ s = DataFrame([[1, 2], [3, 4], [5, 6]], index=index, columns=["col1", "col2"])
504
+ result = s.apply(lambda x: Series({"min": min(x), "max": max(x)}), 1)
505
+ expected = DataFrame([[1, 2], [3, 4], [5, 6]], index=index, columns=["min", "max"])
506
+ tm.assert_frame_equal(result, expected, check_like=True)
507
+
508
+
509
+ @pytest.mark.parametrize(
510
+ "df, dicts",
511
+ [
512
+ [
513
+ DataFrame([["foo", "bar"], ["spam", "eggs"]]),
514
+ Series([{0: "foo", 1: "spam"}, {0: "bar", 1: "eggs"}]),
515
+ ],
516
+ [DataFrame([[0, 1], [2, 3]]), Series([{0: 0, 1: 2}, {0: 1, 1: 3}])],
517
+ ],
518
+ )
519
+ def test_apply_dict(df, dicts):
520
+ # GH 8735
521
+ fn = lambda x: x.to_dict()
522
+ reduce_true = df.apply(fn, result_type="reduce")
523
+ reduce_false = df.apply(fn, result_type="expand")
524
+ reduce_none = df.apply(fn)
525
+
526
+ tm.assert_series_equal(reduce_true, dicts)
527
+ tm.assert_frame_equal(reduce_false, df)
528
+ tm.assert_series_equal(reduce_none, dicts)
529
+
530
+
531
+ def test_applymap(float_frame):
532
+ applied = float_frame.applymap(lambda x: x * 2)
533
+ tm.assert_frame_equal(applied, float_frame * 2)
534
+ float_frame.applymap(type)
535
+
536
+ # GH 465: function returning tuples
537
+ result = float_frame.applymap(lambda x: (x, x))["A"][0]
538
+ assert isinstance(result, tuple)
539
+
540
+
541
+ @pytest.mark.parametrize("val", [1, 1.0])
542
+ def test_applymap_float_object_conversion(val):
543
+ # GH 2909: object conversion to float in constructor?
544
+ df = DataFrame(data=[val, "a"])
545
+ result = df.applymap(lambda x: x).dtypes[0]
546
+ assert result == object
547
+
548
+
549
+ def test_applymap_str():
550
+ # GH 2786
551
+ df = DataFrame(np.random.random((3, 4)))
552
+ df2 = df.copy()
553
+ cols = ["a", "a", "a", "a"]
554
+ df.columns = cols
555
+
556
+ expected = df2.applymap(str)
557
+ expected.columns = cols
558
+ result = df.applymap(str)
559
+ tm.assert_frame_equal(result, expected)
560
+
561
+
562
+ @pytest.mark.parametrize(
563
+ "col, val",
564
+ [["datetime", Timestamp("20130101")], ["timedelta", pd.Timedelta("1 min")]],
565
+ )
566
+ def test_applymap_datetimelike(col, val):
567
+ # datetime/timedelta
568
+ df = DataFrame(np.random.random((3, 4)))
569
+ df[col] = val
570
+ result = df.applymap(str)
571
+ assert result.loc[0, col] == str(df.loc[0, col])
572
+
573
+
574
+ @pytest.mark.parametrize(
575
+ "expected",
576
+ [
577
+ DataFrame(),
578
+ DataFrame(columns=list("ABC")),
579
+ DataFrame(index=list("ABC")),
580
+ DataFrame({"A": [], "B": [], "C": []}),
581
+ ],
582
+ )
583
+ @pytest.mark.parametrize("func", [round, lambda x: x])
584
+ def test_applymap_empty(expected, func):
585
+ # GH 8222
586
+ result = expected.applymap(func)
587
+ tm.assert_frame_equal(result, expected)
588
+
589
+
590
+ def test_applymap_kwargs():
591
+ # GH 40652
592
+ result = DataFrame([[1, 2], [3, 4]]).applymap(lambda x, y: x + y, y=2)
593
+ expected = DataFrame([[3, 4], [5, 6]])
594
+ tm.assert_frame_equal(result, expected)
595
+
596
+
597
+ def test_applymap_na_ignore(float_frame):
598
+ # GH 23803
599
+ strlen_frame = float_frame.applymap(lambda x: len(str(x)))
600
+ float_frame_with_na = float_frame.copy()
601
+ mask = np.random.randint(0, 2, size=float_frame.shape, dtype=bool)
602
+ float_frame_with_na[mask] = pd.NA
603
+ strlen_frame_na_ignore = float_frame_with_na.applymap(
604
+ lambda x: len(str(x)), na_action="ignore"
605
+ )
606
+ strlen_frame_with_na = strlen_frame.copy()
607
+ strlen_frame_with_na[mask] = pd.NA
608
+ tm.assert_frame_equal(strlen_frame_na_ignore, strlen_frame_with_na)
609
+
610
+
611
+ def test_applymap_box_timestamps():
612
+ # GH 2689, GH 2627
613
+ ser = Series(date_range("1/1/2000", periods=10))
614
+
615
+ def func(x):
616
+ return (x.hour, x.day, x.month)
617
+
618
+ # it works!
619
+ DataFrame(ser).applymap(func)
620
+
621
+
622
+ def test_applymap_box():
623
+ # ufunc will not be boxed. Same test cases as the test_map_box
624
+ df = DataFrame(
625
+ {
626
+ "a": [Timestamp("2011-01-01"), Timestamp("2011-01-02")],
627
+ "b": [
628
+ Timestamp("2011-01-01", tz="US/Eastern"),
629
+ Timestamp("2011-01-02", tz="US/Eastern"),
630
+ ],
631
+ "c": [pd.Timedelta("1 days"), pd.Timedelta("2 days")],
632
+ "d": [
633
+ pd.Period("2011-01-01", freq="M"),
634
+ pd.Period("2011-01-02", freq="M"),
635
+ ],
636
+ }
637
+ )
638
+
639
+ result = df.applymap(lambda x: type(x).__name__)
640
+ expected = DataFrame(
641
+ {
642
+ "a": ["Timestamp", "Timestamp"],
643
+ "b": ["Timestamp", "Timestamp"],
644
+ "c": ["Timedelta", "Timedelta"],
645
+ "d": ["Period", "Period"],
646
+ }
647
+ )
648
+ tm.assert_frame_equal(result, expected)
649
+
650
+
651
+ def test_frame_apply_dont_convert_datetime64():
652
+ from pandas.tseries.offsets import BDay
653
+
654
+ df = DataFrame({"x1": [datetime(1996, 1, 1)]})
655
+
656
+ df = df.applymap(lambda x: x + BDay())
657
+ df = df.applymap(lambda x: x + BDay())
658
+
659
+ result = df.x1.dtype
660
+ assert result == "M8[ns]"
661
+
662
+
663
+ def test_apply_non_numpy_dtype():
664
+ # GH 12244
665
+ df = DataFrame({"dt": date_range("2015-01-01", periods=3, tz="Europe/Brussels")})
666
+ result = df.apply(lambda x: x)
667
+ tm.assert_frame_equal(result, df)
668
+
669
+ result = df.apply(lambda x: x + pd.Timedelta("1day"))
670
+ expected = DataFrame(
671
+ {"dt": date_range("2015-01-02", periods=3, tz="Europe/Brussels")}
672
+ )
673
+ tm.assert_frame_equal(result, expected)
674
+
675
+
676
+ def test_apply_non_numpy_dtype_category():
677
+ df = DataFrame({"dt": ["a", "b", "c", "a"]}, dtype="category")
678
+ result = df.apply(lambda x: x)
679
+ tm.assert_frame_equal(result, df)
680
+
681
+
682
+ def test_apply_dup_names_multi_agg():
683
+ # GH 21063
684
+ df = DataFrame([[0, 1], [2, 3]], columns=["a", "a"])
685
+ expected = DataFrame([[0, 1]], columns=["a", "a"], index=["min"])
686
+ result = df.agg(["min"])
687
+
688
+ tm.assert_frame_equal(result, expected)
689
+
690
+
691
+ @pytest.mark.parametrize("op", ["apply", "agg"])
692
+ def test_apply_nested_result_axis_1(op):
693
+ # GH 13820
694
+ def apply_list(row):
695
+ return [2 * row["A"], 2 * row["C"], 2 * row["B"]]
696
+
697
+ df = DataFrame(np.zeros((4, 4)), columns=list("ABCD"))
698
+ result = getattr(df, op)(apply_list, axis=1)
699
+ expected = Series(
700
+ [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]
701
+ )
702
+ tm.assert_series_equal(result, expected)
703
+
704
+
705
+ def test_apply_noreduction_tzaware_object():
706
+ # https://github.com/pandas-dev/pandas/issues/31505
707
+ expected = DataFrame(
708
+ {"foo": [Timestamp("2020", tz="UTC")]}, dtype="datetime64[ns, UTC]"
709
+ )
710
+ result = expected.apply(lambda x: x)
711
+ tm.assert_frame_equal(result, expected)
712
+ result = expected.apply(lambda x: x.copy())
713
+ tm.assert_frame_equal(result, expected)
714
+
715
+
716
+ def test_apply_function_runs_once():
717
+ # https://github.com/pandas-dev/pandas/issues/30815
718
+
719
+ df = DataFrame({"a": [1, 2, 3]})
720
+ names = [] # Save row names function is applied to
721
+
722
+ def reducing_function(row):
723
+ names.append(row.name)
724
+
725
+ def non_reducing_function(row):
726
+ names.append(row.name)
727
+ return row
728
+
729
+ for func in [reducing_function, non_reducing_function]:
730
+ del names[:]
731
+
732
+ df.apply(func, axis=1)
733
+ assert names == list(df.index)
734
+
735
+
736
+ def test_apply_raw_function_runs_once():
737
+ # https://github.com/pandas-dev/pandas/issues/34506
738
+
739
+ df = DataFrame({"a": [1, 2, 3]})
740
+ values = [] # Save row values function is applied to
741
+
742
+ def reducing_function(row):
743
+ values.extend(row)
744
+
745
+ def non_reducing_function(row):
746
+ values.extend(row)
747
+ return row
748
+
749
+ for func in [reducing_function, non_reducing_function]:
750
+ del values[:]
751
+
752
+ df.apply(func, raw=True, axis=1)
753
+ assert values == list(df.a.to_list())
754
+
755
+
756
+ def test_applymap_function_runs_once():
757
+ df = DataFrame({"a": [1, 2, 3]})
758
+ values = [] # Save values function is applied to
759
+
760
+ def reducing_function(val):
761
+ values.append(val)
762
+
763
+ def non_reducing_function(val):
764
+ values.append(val)
765
+ return val
766
+
767
+ for func in [reducing_function, non_reducing_function]:
768
+ del values[:]
769
+
770
+ df.applymap(func)
771
+ assert values == df.a.to_list()
772
+
773
+
774
+ def test_apply_with_byte_string():
775
+ # GH 34529
776
+ df = DataFrame(np.array([b"abcd", b"efgh"]), columns=["col"])
777
+ expected = DataFrame(np.array([b"abcd", b"efgh"]), columns=["col"], dtype=object)
778
+ # After we make the apply we expect a dataframe just
779
+ # like the original but with the object datatype
780
+ result = df.apply(lambda x: x.astype("object"))
781
+ tm.assert_frame_equal(result, expected)
782
+
783
+
784
+ @pytest.mark.parametrize("val", ["asd", 12, None, np.NaN])
785
+ def test_apply_category_equalness(val):
786
+ # Check if categorical comparisons on apply, GH 21239
787
+ df_values = ["asd", None, 12, "asd", "cde", np.NaN]
788
+ df = DataFrame({"a": df_values}, dtype="category")
789
+
790
+ result = df.a.apply(lambda x: x == val)
791
+ expected = Series(
792
+ [np.NaN if pd.isnull(x) else x == val for x in df_values], name="a"
793
+ )
794
+ tm.assert_series_equal(result, expected)
795
+
796
+
797
+ # the user has supplied an opaque UDF where
798
+ # they are transforming the input that requires
799
+ # us to infer the output
800
+
801
+
802
+ def test_infer_row_shape():
803
+ # GH 17437
804
+ # if row shape is changing, infer it
805
+ df = DataFrame(np.random.rand(10, 2))
806
+ result = df.apply(np.fft.fft, axis=0).shape
807
+ assert result == (10, 2)
808
+
809
+ result = df.apply(np.fft.rfft, axis=0).shape
810
+ assert result == (6, 2)
811
+
812
+
813
+ def test_with_dictlike_columns():
814
+ # GH 17602
815
+ df = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
816
+ result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1)
817
+ expected = Series([{"s": 3} for t in df.itertuples()])
818
+ tm.assert_series_equal(result, expected)
819
+
820
+ df["tm"] = [
821
+ Timestamp("2017-05-01 00:00:00"),
822
+ Timestamp("2017-05-02 00:00:00"),
823
+ ]
824
+ result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1)
825
+ tm.assert_series_equal(result, expected)
826
+
827
+ # compose a series
828
+ result = (df["a"] + df["b"]).apply(lambda x: {"s": x})
829
+ expected = Series([{"s": 3}, {"s": 3}])
830
+ tm.assert_series_equal(result, expected)
831
+
832
+
833
+ def test_with_dictlike_columns_with_datetime():
834
+ # GH 18775
835
+ df = DataFrame()
836
+ df["author"] = ["X", "Y", "Z"]
837
+ df["publisher"] = ["BBC", "NBC", "N24"]
838
+ df["date"] = pd.to_datetime(
839
+ ["17-10-2010 07:15:30", "13-05-2011 08:20:35", "15-01-2013 09:09:09"],
840
+ dayfirst=True,
841
+ )
842
+ result = df.apply(lambda x: {}, axis=1)
843
+ expected = Series([{}, {}, {}])
844
+ tm.assert_series_equal(result, expected)
845
+
846
+
847
+ def test_with_dictlike_columns_with_infer():
848
+ # GH 17602
849
+ df = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
850
+ result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1, result_type="expand")
851
+ expected = DataFrame({"s": [3, 3]})
852
+ tm.assert_frame_equal(result, expected)
853
+
854
+ df["tm"] = [
855
+ Timestamp("2017-05-01 00:00:00"),
856
+ Timestamp("2017-05-02 00:00:00"),
857
+ ]
858
+ result = df.apply(lambda x: {"s": x["a"] + x["b"]}, axis=1, result_type="expand")
859
+ tm.assert_frame_equal(result, expected)
860
+
861
+
862
+ def test_with_listlike_columns():
863
+ # GH 17348
864
+ df = DataFrame(
865
+ {
866
+ "a": Series(np.random.randn(4)),
867
+ "b": ["a", "list", "of", "words"],
868
+ "ts": date_range("2016-10-01", periods=4, freq="H"),
869
+ }
870
+ )
871
+
872
+ result = df[["a", "b"]].apply(tuple, axis=1)
873
+ expected = Series([t[1:] for t in df[["a", "b"]].itertuples()])
874
+ tm.assert_series_equal(result, expected)
875
+
876
+ result = df[["a", "ts"]].apply(tuple, axis=1)
877
+ expected = Series([t[1:] for t in df[["a", "ts"]].itertuples()])
878
+ tm.assert_series_equal(result, expected)
879
+
880
+
881
+ def test_with_listlike_columns_returning_list():
882
+ # GH 18919
883
+ df = DataFrame({"x": Series([["a", "b"], ["q"]]), "y": Series([["z"], ["q", "t"]])})
884
+ df.index = MultiIndex.from_tuples([("i0", "j0"), ("i1", "j1")])
885
+
886
+ result = df.apply(lambda row: [el for el in row["x"] if el in row["y"]], axis=1)
887
+ expected = Series([[], ["q"]], index=df.index)
888
+ tm.assert_series_equal(result, expected)
889
+
890
+
891
+ def test_infer_output_shape_columns():
892
+ # GH 18573
893
+
894
+ df = DataFrame(
895
+ {
896
+ "number": [1.0, 2.0],
897
+ "string": ["foo", "bar"],
898
+ "datetime": [
899
+ Timestamp("2017-11-29 03:30:00"),
900
+ Timestamp("2017-11-29 03:45:00"),
901
+ ],
902
+ }
903
+ )
904
+ result = df.apply(lambda row: (row.number, row.string), axis=1)
905
+ expected = Series([(t.number, t.string) for t in df.itertuples()])
906
+ tm.assert_series_equal(result, expected)
907
+
908
+
909
+ def test_infer_output_shape_listlike_columns():
910
+ # GH 16353
911
+
912
+ df = DataFrame(np.random.randn(6, 3), columns=["A", "B", "C"])
913
+
914
+ result = df.apply(lambda x: [1, 2, 3], axis=1)
915
+ expected = Series([[1, 2, 3] for t in df.itertuples()])
916
+ tm.assert_series_equal(result, expected)
917
+
918
+ result = df.apply(lambda x: [1, 2], axis=1)
919
+ expected = Series([[1, 2] for t in df.itertuples()])
920
+ tm.assert_series_equal(result, expected)
921
+
922
+
923
+ @pytest.mark.parametrize("val", [1, 2])
924
+ def test_infer_output_shape_listlike_columns_np_func(val):
925
+ # GH 17970
926
+ df = DataFrame({"a": [1, 2, 3]}, index=list("abc"))
927
+
928
+ result = df.apply(lambda row: np.ones(val), axis=1)
929
+ expected = Series([np.ones(val) for t in df.itertuples()], index=df.index)
930
+ tm.assert_series_equal(result, expected)
931
+
932
+
933
+ def test_infer_output_shape_listlike_columns_with_timestamp():
934
+ # GH 17892
935
+ df = DataFrame(
936
+ {
937
+ "a": [
938
+ Timestamp("2010-02-01"),
939
+ Timestamp("2010-02-04"),
940
+ Timestamp("2010-02-05"),
941
+ Timestamp("2010-02-06"),
942
+ ],
943
+ "b": [9, 5, 4, 3],
944
+ "c": [5, 3, 4, 2],
945
+ "d": [1, 2, 3, 4],
946
+ }
947
+ )
948
+
949
+ def fun(x):
950
+ return (1, 2)
951
+
952
+ result = df.apply(fun, axis=1)
953
+ expected = Series([(1, 2) for t in df.itertuples()])
954
+ tm.assert_series_equal(result, expected)
955
+
956
+
957
+ @pytest.mark.parametrize("lst", [[1, 2, 3], [1, 2]])
958
+ def test_consistent_coerce_for_shapes(lst):
959
+ # we want column names to NOT be propagated
960
+ # just because the shape matches the input shape
961
+ df = DataFrame(np.random.randn(4, 3), columns=["A", "B", "C"])
962
+
963
+ result = df.apply(lambda x: lst, axis=1)
964
+ expected = Series([lst for t in df.itertuples()])
965
+ tm.assert_series_equal(result, expected)
966
+
967
+
968
+ def test_consistent_names(int_frame_const_col):
969
+ # if a Series is returned, we should use the resulting index names
970
+ df = int_frame_const_col
971
+
972
+ result = df.apply(
973
+ lambda x: Series([1, 2, 3], index=["test", "other", "cols"]), axis=1
974
+ )
975
+ expected = int_frame_const_col.rename(
976
+ columns={"A": "test", "B": "other", "C": "cols"}
977
+ )
978
+ tm.assert_frame_equal(result, expected)
979
+
980
+ result = df.apply(lambda x: Series([1, 2], index=["test", "other"]), axis=1)
981
+ expected = expected[["test", "other"]]
982
+ tm.assert_frame_equal(result, expected)
983
+
984
+
985
+ def test_result_type(int_frame_const_col):
986
+ # result_type should be consistent no matter which
987
+ # path we take in the code
988
+ df = int_frame_const_col
989
+
990
+ result = df.apply(lambda x: [1, 2, 3], axis=1, result_type="expand")
991
+ expected = df.copy()
992
+ expected.columns = [0, 1, 2]
993
+ tm.assert_frame_equal(result, expected)
994
+
995
+
996
+ def test_result_type_shorter_list(int_frame_const_col):
997
+ # result_type should be consistent no matter which
998
+ # path we take in the code
999
+ df = int_frame_const_col
1000
+ result = df.apply(lambda x: [1, 2], axis=1, result_type="expand")
1001
+ expected = df[["A", "B"]].copy()
1002
+ expected.columns = [0, 1]
1003
+ tm.assert_frame_equal(result, expected)
1004
+
1005
+
1006
+ def test_result_type_broadcast(int_frame_const_col):
1007
+ # result_type should be consistent no matter which
1008
+ # path we take in the code
1009
+ df = int_frame_const_col
1010
+ # broadcast result
1011
+ result = df.apply(lambda x: [1, 2, 3], axis=1, result_type="broadcast")
1012
+ expected = df.copy()
1013
+ tm.assert_frame_equal(result, expected)
1014
+
1015
+
1016
+ def test_result_type_broadcast_series_func(int_frame_const_col):
1017
+ # result_type should be consistent no matter which
1018
+ # path we take in the code
1019
+ df = int_frame_const_col
1020
+ columns = ["other", "col", "names"]
1021
+ result = df.apply(
1022
+ lambda x: Series([1, 2, 3], index=columns), axis=1, result_type="broadcast"
1023
+ )
1024
+ expected = df.copy()
1025
+ tm.assert_frame_equal(result, expected)
1026
+
1027
+
1028
+ def test_result_type_series_result(int_frame_const_col):
1029
+ # result_type should be consistent no matter which
1030
+ # path we take in the code
1031
+ df = int_frame_const_col
1032
+ # series result
1033
+ result = df.apply(lambda x: Series([1, 2, 3], index=x.index), axis=1)
1034
+ expected = df.copy()
1035
+ tm.assert_frame_equal(result, expected)
1036
+
1037
+
1038
+ def test_result_type_series_result_other_index(int_frame_const_col):
1039
+ # result_type should be consistent no matter which
1040
+ # path we take in the code
1041
+ df = int_frame_const_col
1042
+ # series result with other index
1043
+ columns = ["other", "col", "names"]
1044
+ result = df.apply(lambda x: Series([1, 2, 3], index=columns), axis=1)
1045
+ expected = df.copy()
1046
+ expected.columns = columns
1047
+ tm.assert_frame_equal(result, expected)
1048
+
1049
+
1050
+ @pytest.mark.parametrize(
1051
+ "box",
1052
+ [lambda x: list(x), lambda x: tuple(x), lambda x: np.array(x, dtype="int64")],
1053
+ ids=["list", "tuple", "array"],
1054
+ )
1055
+ def test_consistency_for_boxed(box, int_frame_const_col):
1056
+ # passing an array or list should not affect the output shape
1057
+ df = int_frame_const_col
1058
+
1059
+ result = df.apply(lambda x: box([1, 2]), axis=1)
1060
+ expected = Series([box([1, 2]) for t in df.itertuples()])
1061
+ tm.assert_series_equal(result, expected)
1062
+
1063
+ result = df.apply(lambda x: box([1, 2]), axis=1, result_type="expand")
1064
+ expected = int_frame_const_col[["A", "B"]].rename(columns={"A": 0, "B": 1})
1065
+ tm.assert_frame_equal(result, expected)
1066
+
1067
+
1068
+ def test_agg_transform(axis, float_frame):
1069
+ other_axis = 1 if axis in {0, "index"} else 0
1070
+
1071
+ with np.errstate(all="ignore"):
1072
+ f_abs = np.abs(float_frame)
1073
+ f_sqrt = np.sqrt(float_frame)
1074
+
1075
+ # ufunc
1076
+ expected = f_sqrt.copy()
1077
+ result = float_frame.apply(np.sqrt, axis=axis)
1078
+ tm.assert_frame_equal(result, expected)
1079
+
1080
+ # list-like
1081
+ result = float_frame.apply([np.sqrt], axis=axis)
1082
+ expected = f_sqrt.copy()
1083
+ if axis in {0, "index"}:
1084
+ expected.columns = MultiIndex.from_product([float_frame.columns, ["sqrt"]])
1085
+ else:
1086
+ expected.index = MultiIndex.from_product([float_frame.index, ["sqrt"]])
1087
+ tm.assert_frame_equal(result, expected)
1088
+
1089
+ # multiple items in list
1090
+ # these are in the order as if we are applying both
1091
+ # functions per series and then concatting
1092
+ result = float_frame.apply([np.abs, np.sqrt], axis=axis)
1093
+ expected = zip_frames([f_abs, f_sqrt], axis=other_axis)
1094
+ if axis in {0, "index"}:
1095
+ expected.columns = MultiIndex.from_product(
1096
+ [float_frame.columns, ["absolute", "sqrt"]]
1097
+ )
1098
+ else:
1099
+ expected.index = MultiIndex.from_product(
1100
+ [float_frame.index, ["absolute", "sqrt"]]
1101
+ )
1102
+ tm.assert_frame_equal(result, expected)
1103
+
1104
+
1105
+ def test_demo():
1106
+ # demonstration tests
1107
+ df = DataFrame({"A": range(5), "B": 5})
1108
+
1109
+ result = df.agg(["min", "max"])
1110
+ expected = DataFrame(
1111
+ {"A": [0, 4], "B": [5, 5]}, columns=["A", "B"], index=["min", "max"]
1112
+ )
1113
+ tm.assert_frame_equal(result, expected)
1114
+
1115
+
1116
+ def test_demo_dict_agg():
1117
+ # demonstration tests
1118
+ df = DataFrame({"A": range(5), "B": 5})
1119
+ result = df.agg({"A": ["min", "max"], "B": ["sum", "max"]})
1120
+ expected = DataFrame(
1121
+ {"A": [4.0, 0.0, np.nan], "B": [5.0, np.nan, 25.0]},
1122
+ columns=["A", "B"],
1123
+ index=["max", "min", "sum"],
1124
+ )
1125
+ tm.assert_frame_equal(result.reindex_like(expected), expected)
1126
+
1127
+
1128
+ def test_agg_with_name_as_column_name():
1129
+ # GH 36212 - Column name is "name"
1130
+ data = {"name": ["foo", "bar"]}
1131
+ df = DataFrame(data)
1132
+
1133
+ # result's name should be None
1134
+ result = df.agg({"name": "count"})
1135
+ expected = Series({"name": 2})
1136
+ tm.assert_series_equal(result, expected)
1137
+
1138
+ # Check if name is still preserved when aggregating series instead
1139
+ result = df["name"].agg({"name": "count"})
1140
+ expected = Series({"name": 2}, name="name")
1141
+ tm.assert_series_equal(result, expected)
1142
+
1143
+
1144
+ def test_agg_multiple_mixed():
1145
+ # GH 20909
1146
+ mdf = DataFrame(
1147
+ {
1148
+ "A": [1, 2, 3],
1149
+ "B": [1.0, 2.0, 3.0],
1150
+ "C": ["foo", "bar", "baz"],
1151
+ }
1152
+ )
1153
+ expected = DataFrame(
1154
+ {
1155
+ "A": [1, 6],
1156
+ "B": [1.0, 6.0],
1157
+ "C": ["bar", "foobarbaz"],
1158
+ },
1159
+ index=["min", "sum"],
1160
+ )
1161
+ # sorted index
1162
+ result = mdf.agg(["min", "sum"])
1163
+ tm.assert_frame_equal(result, expected)
1164
+
1165
+ result = mdf[["C", "B", "A"]].agg(["sum", "min"])
1166
+ # GH40420: the result of .agg should have an index that is sorted
1167
+ # according to the arguments provided to agg.
1168
+ expected = expected[["C", "B", "A"]].reindex(["sum", "min"])
1169
+ tm.assert_frame_equal(result, expected)
1170
+
1171
+
1172
+ def test_agg_multiple_mixed_raises():
1173
+ # GH 20909
1174
+ mdf = DataFrame(
1175
+ {
1176
+ "A": [1, 2, 3],
1177
+ "B": [1.0, 2.0, 3.0],
1178
+ "C": ["foo", "bar", "baz"],
1179
+ "D": date_range("20130101", periods=3),
1180
+ }
1181
+ )
1182
+
1183
+ # sorted index
1184
+ msg = "does not support reduction"
1185
+ with pytest.raises(TypeError, match=msg):
1186
+ mdf.agg(["min", "sum"])
1187
+
1188
+ with pytest.raises(TypeError, match=msg):
1189
+ mdf[["D", "C", "B", "A"]].agg(["sum", "min"])
1190
+
1191
+
1192
+ def test_agg_reduce(axis, float_frame):
1193
+ other_axis = 1 if axis in {0, "index"} else 0
1194
+ name1, name2 = float_frame.axes[other_axis].unique()[:2].sort_values()
1195
+
1196
+ # all reducers
1197
+ expected = pd.concat(
1198
+ [
1199
+ float_frame.mean(axis=axis),
1200
+ float_frame.max(axis=axis),
1201
+ float_frame.sum(axis=axis),
1202
+ ],
1203
+ axis=1,
1204
+ )
1205
+ expected.columns = ["mean", "max", "sum"]
1206
+ expected = expected.T if axis in {0, "index"} else expected
1207
+
1208
+ result = float_frame.agg(["mean", "max", "sum"], axis=axis)
1209
+ tm.assert_frame_equal(result, expected)
1210
+
1211
+ # dict input with scalars
1212
+ func = {name1: "mean", name2: "sum"}
1213
+ result = float_frame.agg(func, axis=axis)
1214
+ expected = Series(
1215
+ [
1216
+ float_frame.loc(other_axis)[name1].mean(),
1217
+ float_frame.loc(other_axis)[name2].sum(),
1218
+ ],
1219
+ index=[name1, name2],
1220
+ )
1221
+ tm.assert_series_equal(result, expected)
1222
+
1223
+ # dict input with lists
1224
+ func = {name1: ["mean"], name2: ["sum"]}
1225
+ result = float_frame.agg(func, axis=axis)
1226
+ expected = DataFrame(
1227
+ {
1228
+ name1: Series([float_frame.loc(other_axis)[name1].mean()], index=["mean"]),
1229
+ name2: Series([float_frame.loc(other_axis)[name2].sum()], index=["sum"]),
1230
+ }
1231
+ )
1232
+ expected = expected.T if axis in {1, "columns"} else expected
1233
+ tm.assert_frame_equal(result, expected)
1234
+
1235
+ # dict input with lists with multiple
1236
+ func = {name1: ["mean", "sum"], name2: ["sum", "max"]}
1237
+ result = float_frame.agg(func, axis=axis)
1238
+ expected = pd.concat(
1239
+ {
1240
+ name1: Series(
1241
+ [
1242
+ float_frame.loc(other_axis)[name1].mean(),
1243
+ float_frame.loc(other_axis)[name1].sum(),
1244
+ ],
1245
+ index=["mean", "sum"],
1246
+ ),
1247
+ name2: Series(
1248
+ [
1249
+ float_frame.loc(other_axis)[name2].sum(),
1250
+ float_frame.loc(other_axis)[name2].max(),
1251
+ ],
1252
+ index=["sum", "max"],
1253
+ ),
1254
+ },
1255
+ axis=1,
1256
+ )
1257
+ expected = expected.T if axis in {1, "columns"} else expected
1258
+ tm.assert_frame_equal(result, expected)
1259
+
1260
+
1261
+ def test_nuiscance_columns():
1262
+ # GH 15015
1263
+ df = DataFrame(
1264
+ {
1265
+ "A": [1, 2, 3],
1266
+ "B": [1.0, 2.0, 3.0],
1267
+ "C": ["foo", "bar", "baz"],
1268
+ "D": date_range("20130101", periods=3),
1269
+ }
1270
+ )
1271
+
1272
+ result = df.agg("min")
1273
+ expected = Series([1, 1.0, "bar", Timestamp("20130101")], index=df.columns)
1274
+ tm.assert_series_equal(result, expected)
1275
+
1276
+ result = df.agg(["min"])
1277
+ expected = DataFrame(
1278
+ [[1, 1.0, "bar", Timestamp("20130101")]],
1279
+ index=["min"],
1280
+ columns=df.columns,
1281
+ )
1282
+ tm.assert_frame_equal(result, expected)
1283
+
1284
+ msg = "does not support reduction"
1285
+ with pytest.raises(TypeError, match=msg):
1286
+ df.agg("sum")
1287
+
1288
+ result = df[["A", "B", "C"]].agg("sum")
1289
+ expected = Series([6, 6.0, "foobarbaz"], index=["A", "B", "C"])
1290
+ tm.assert_series_equal(result, expected)
1291
+
1292
+ msg = "does not support reduction"
1293
+ with pytest.raises(TypeError, match=msg):
1294
+ df.agg(["sum"])
1295
+
1296
+
1297
+ @pytest.mark.parametrize("how", ["agg", "apply"])
1298
+ def test_non_callable_aggregates(how):
1299
+ # GH 16405
1300
+ # 'size' is a property of frame/series
1301
+ # validate that this is working
1302
+ # GH 39116 - expand to apply
1303
+ df = DataFrame(
1304
+ {"A": [None, 2, 3], "B": [1.0, np.nan, 3.0], "C": ["foo", None, "bar"]}
1305
+ )
1306
+
1307
+ # Function aggregate
1308
+ result = getattr(df, how)({"A": "count"})
1309
+ expected = Series({"A": 2})
1310
+
1311
+ tm.assert_series_equal(result, expected)
1312
+
1313
+ # Non-function aggregate
1314
+ result = getattr(df, how)({"A": "size"})
1315
+ expected = Series({"A": 3})
1316
+
1317
+ tm.assert_series_equal(result, expected)
1318
+
1319
+ # Mix function and non-function aggs
1320
+ result1 = getattr(df, how)(["count", "size"])
1321
+ result2 = getattr(df, how)(
1322
+ {"A": ["count", "size"], "B": ["count", "size"], "C": ["count", "size"]}
1323
+ )
1324
+ expected = DataFrame(
1325
+ {
1326
+ "A": {"count": 2, "size": 3},
1327
+ "B": {"count": 2, "size": 3},
1328
+ "C": {"count": 2, "size": 3},
1329
+ }
1330
+ )
1331
+
1332
+ tm.assert_frame_equal(result1, result2, check_like=True)
1333
+ tm.assert_frame_equal(result2, expected, check_like=True)
1334
+
1335
+ # Just functional string arg is same as calling df.arg()
1336
+ result = getattr(df, how)("count")
1337
+ expected = df.count()
1338
+
1339
+ tm.assert_series_equal(result, expected)
1340
+
1341
+
1342
+ @pytest.mark.parametrize("how", ["agg", "apply"])
1343
+ def test_size_as_str(how, axis):
1344
+ # GH 39934
1345
+ df = DataFrame(
1346
+ {"A": [None, 2, 3], "B": [1.0, np.nan, 3.0], "C": ["foo", None, "bar"]}
1347
+ )
1348
+ # Just a string attribute arg same as calling df.arg
1349
+ # on the columns
1350
+ result = getattr(df, how)("size", axis=axis)
1351
+ if axis in (0, "index"):
1352
+ expected = Series(df.shape[0], index=df.columns)
1353
+ else:
1354
+ expected = Series(df.shape[1], index=df.index)
1355
+ tm.assert_series_equal(result, expected)
1356
+
1357
+
1358
+ def test_agg_listlike_result():
1359
+ # GH-29587 user defined function returning list-likes
1360
+ df = DataFrame({"A": [2, 2, 3], "B": [1.5, np.nan, 1.5], "C": ["foo", None, "bar"]})
1361
+
1362
+ def func(group_col):
1363
+ return list(group_col.dropna().unique())
1364
+
1365
+ result = df.agg(func)
1366
+ expected = Series([[2, 3], [1.5], ["foo", "bar"]], index=["A", "B", "C"])
1367
+ tm.assert_series_equal(result, expected)
1368
+
1369
+ result = df.agg([func])
1370
+ expected = expected.to_frame("func").T
1371
+ tm.assert_frame_equal(result, expected)
1372
+
1373
+
1374
+ @pytest.mark.parametrize("axis", [0, 1])
1375
+ @pytest.mark.parametrize(
1376
+ "args, kwargs",
1377
+ [
1378
+ ((1, 2, 3), {}),
1379
+ ((8, 7, 15), {}),
1380
+ ((1, 2), {}),
1381
+ ((1,), {"b": 2}),
1382
+ ((), {"a": 1, "b": 2}),
1383
+ ((), {"a": 2, "b": 1}),
1384
+ ((), {"a": 1, "b": 2, "c": 3}),
1385
+ ],
1386
+ )
1387
+ def test_agg_args_kwargs(axis, args, kwargs):
1388
+ def f(x, a, b, c=3):
1389
+ return x.sum() + (a + b) / c
1390
+
1391
+ df = DataFrame([[1, 2], [3, 4]])
1392
+
1393
+ if axis == 0:
1394
+ expected = Series([5.0, 7.0])
1395
+ else:
1396
+ expected = Series([4.0, 8.0])
1397
+
1398
+ result = df.agg(f, axis, *args, **kwargs)
1399
+
1400
+ tm.assert_series_equal(result, expected)
1401
+
1402
+
1403
+ @pytest.mark.parametrize("num_cols", [2, 3, 5])
1404
+ def test_frequency_is_original(num_cols):
1405
+ # GH 22150
1406
+ index = pd.DatetimeIndex(["1950-06-30", "1952-10-24", "1953-05-29"])
1407
+ original = index.copy()
1408
+ df = DataFrame(1, index=index, columns=range(num_cols))
1409
+ df.apply(lambda x: x)
1410
+ assert index.freq == original.freq
1411
+
1412
+
1413
+ def test_apply_datetime_tz_issue():
1414
+ # GH 29052
1415
+
1416
+ timestamps = [
1417
+ Timestamp("2019-03-15 12:34:31.909000+0000", tz="UTC"),
1418
+ Timestamp("2019-03-15 12:34:34.359000+0000", tz="UTC"),
1419
+ Timestamp("2019-03-15 12:34:34.660000+0000", tz="UTC"),
1420
+ ]
1421
+ df = DataFrame(data=[0, 1, 2], index=timestamps)
1422
+ result = df.apply(lambda x: x.name, axis=1)
1423
+ expected = Series(index=timestamps, data=timestamps)
1424
+
1425
+ tm.assert_series_equal(result, expected)
1426
+
1427
+
1428
+ @pytest.mark.parametrize("df", [DataFrame({"A": ["a", None], "B": ["c", "d"]})])
1429
+ @pytest.mark.parametrize("method", ["min", "max", "sum"])
1430
+ def test_mixed_column_raises(df, method):
1431
+ # GH 16832
1432
+ if method == "sum":
1433
+ msg = r'can only concatenate str \(not "int"\) to str'
1434
+ else:
1435
+ msg = "not supported between instances of 'str' and 'float'"
1436
+ with pytest.raises(TypeError, match=msg):
1437
+ getattr(df, method)()
1438
+
1439
+
1440
+ @pytest.mark.parametrize("col", [1, 1.0, True, "a", np.nan])
1441
+ def test_apply_dtype(col):
1442
+ # GH 31466
1443
+ df = DataFrame([[1.0, col]], columns=["a", "b"])
1444
+ result = df.apply(lambda x: x.dtype)
1445
+ expected = df.dtypes
1446
+
1447
+ tm.assert_series_equal(result, expected)
1448
+
1449
+
1450
+ def test_apply_mutating(using_array_manager, using_copy_on_write):
1451
+ # GH#35462 case where applied func pins a new BlockManager to a row
1452
+ df = DataFrame({"a": range(100), "b": range(100, 200)})
1453
+ df_orig = df.copy()
1454
+
1455
+ def func(row):
1456
+ mgr = row._mgr
1457
+ row.loc["a"] += 1
1458
+ assert row._mgr is not mgr
1459
+ return row
1460
+
1461
+ expected = df.copy()
1462
+ expected["a"] += 1
1463
+
1464
+ result = df.apply(func, axis=1)
1465
+
1466
+ tm.assert_frame_equal(result, expected)
1467
+ if using_copy_on_write or using_array_manager:
1468
+ # INFO(CoW) With copy on write, mutating a viewing row doesn't mutate the parent
1469
+ # INFO(ArrayManager) With BlockManager, the row is a view and mutated in place,
1470
+ # with ArrayManager the row is not a view, and thus not mutated in place
1471
+ tm.assert_frame_equal(df, df_orig)
1472
+ else:
1473
+ tm.assert_frame_equal(df, result)
1474
+
1475
+
1476
+ def test_apply_empty_list_reduce():
1477
+ # GH#35683 get columns correct
1478
+ df = DataFrame([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]], columns=["a", "b"])
1479
+
1480
+ result = df.apply(lambda x: [], result_type="reduce")
1481
+ expected = Series({"a": [], "b": []}, dtype=object)
1482
+ tm.assert_series_equal(result, expected)
1483
+
1484
+
1485
+ def test_apply_no_suffix_index():
1486
+ # GH36189
1487
+ pdf = DataFrame([[4, 9]] * 3, columns=["A", "B"])
1488
+ result = pdf.apply(["sum", lambda x: x.sum(), lambda x: x.sum()])
1489
+ expected = DataFrame(
1490
+ {"A": [12, 12, 12], "B": [27, 27, 27]}, index=["sum", "<lambda>", "<lambda>"]
1491
+ )
1492
+
1493
+ tm.assert_frame_equal(result, expected)
1494
+
1495
+
1496
+ def test_apply_raw_returns_string():
1497
+ # https://github.com/pandas-dev/pandas/issues/35940
1498
+ df = DataFrame({"A": ["aa", "bbb"]})
1499
+ result = df.apply(lambda x: x[0], axis=1, raw=True)
1500
+ expected = Series(["aa", "bbb"])
1501
+ tm.assert_series_equal(result, expected)
1502
+
1503
+
1504
+ def test_aggregation_func_column_order():
1505
+ # GH40420: the result of .agg should have an index that is sorted
1506
+ # according to the arguments provided to agg.
1507
+ df = DataFrame(
1508
+ [
1509
+ (1, 0, 0),
1510
+ (2, 0, 0),
1511
+ (3, 0, 0),
1512
+ (4, 5, 4),
1513
+ (5, 6, 6),
1514
+ (6, 7, 7),
1515
+ ],
1516
+ columns=("att1", "att2", "att3"),
1517
+ )
1518
+
1519
+ def sum_div2(s):
1520
+ return s.sum() / 2
1521
+
1522
+ aggs = ["sum", sum_div2, "count", "min"]
1523
+ result = df.agg(aggs)
1524
+ expected = DataFrame(
1525
+ {
1526
+ "att1": [21.0, 10.5, 6.0, 1.0],
1527
+ "att2": [18.0, 9.0, 6.0, 0.0],
1528
+ "att3": [17.0, 8.5, 6.0, 0.0],
1529
+ },
1530
+ index=["sum", "sum_div2", "count", "min"],
1531
+ )
1532
+ tm.assert_frame_equal(result, expected)
1533
+
1534
+
1535
+ def test_apply_getitem_axis_1():
1536
+ # GH 13427
1537
+ df = DataFrame({"a": [0, 1, 2], "b": [1, 2, 3]})
1538
+ result = df[["a", "a"]].apply(lambda x: x[0] + x[1], axis=1)
1539
+ expected = Series([0, 2, 4])
1540
+ tm.assert_series_equal(result, expected)
1541
+
1542
+
1543
+ def test_nuisance_depr_passes_through_warnings():
1544
+ # GH 43740
1545
+ # DataFrame.agg with list-likes may emit warnings for both individual
1546
+ # args and for entire columns, but we only want to emit once. We
1547
+ # catch and suppress the warnings for individual args, but need to make
1548
+ # sure if some other warnings were raised, they get passed through to
1549
+ # the user.
1550
+
1551
+ def expected_warning(x):
1552
+ warnings.warn("Hello, World!")
1553
+ return x.sum()
1554
+
1555
+ df = DataFrame({"a": [1, 2, 3]})
1556
+ with tm.assert_produces_warning(UserWarning, match="Hello, World!"):
1557
+ df.agg([expected_warning])
1558
+
1559
+
1560
+ def test_apply_type():
1561
+ # GH 46719
1562
+ df = DataFrame(
1563
+ {"col1": [3, "string", float], "col2": [0.25, datetime(2020, 1, 1), np.nan]},
1564
+ index=["a", "b", "c"],
1565
+ )
1566
+
1567
+ # applymap
1568
+ result = df.applymap(type)
1569
+ expected = DataFrame(
1570
+ {"col1": [int, str, type], "col2": [float, datetime, float]},
1571
+ index=["a", "b", "c"],
1572
+ )
1573
+ tm.assert_frame_equal(result, expected)
1574
+
1575
+ # axis=0
1576
+ result = df.apply(type, axis=0)
1577
+ expected = Series({"col1": Series, "col2": Series})
1578
+ tm.assert_series_equal(result, expected)
1579
+
1580
+ # axis=1
1581
+ result = df.apply(type, axis=1)
1582
+ expected = Series({"a": Series, "b": Series, "c": Series})
1583
+ tm.assert_series_equal(result, expected)
1584
+
1585
+
1586
+ def test_apply_on_empty_dataframe():
1587
+ # GH 39111
1588
+ df = DataFrame({"a": [1, 2], "b": [3, 0]})
1589
+ result = df.head(0).apply(lambda x: max(x["a"], x["b"]), axis=1)
1590
+ expected = Series([], dtype=np.float64)
1591
+ tm.assert_series_equal(result, expected)
1592
+
1593
+
1594
+ @pytest.mark.parametrize(
1595
+ "test, constant",
1596
+ [
1597
+ ({"a": [1, 2, 3], "b": [1, 1, 1]}, {"a": [1, 2, 3], "b": [1]}),
1598
+ ({"a": [2, 2, 2], "b": [1, 1, 1]}, {"a": [2], "b": [1]}),
1599
+ ],
1600
+ )
1601
+ def test_unique_agg_type_is_series(test, constant):
1602
+ # GH#22558
1603
+ df1 = DataFrame(test)
1604
+ expected = Series(data=constant, index=["a", "b"], dtype="object")
1605
+ aggregation = {"a": "unique", "b": "unique"}
1606
+
1607
+ result = df1.agg(aggregation)
1608
+
1609
+ tm.assert_series_equal(result, expected)
1610
+
1611
+
1612
+ def test_any_apply_keyword_non_zero_axis_regression():
1613
+ # https://github.com/pandas-dev/pandas/issues/48656
1614
+ df = DataFrame({"A": [1, 2, 0], "B": [0, 2, 0], "C": [0, 0, 0]})
1615
+ expected = Series([True, True, False])
1616
+ tm.assert_series_equal(df.any(axis=1), expected)
1617
+
1618
+ result = df.apply("any", axis=1)
1619
+ tm.assert_series_equal(result, expected)
1620
+
1621
+ result = df.apply("any", 1)
1622
+ tm.assert_series_equal(result, expected)
1623
+
1624
+
1625
+ def test_agg_list_like_func_with_args():
1626
+ # GH 50624
1627
+ df = DataFrame({"x": [1, 2, 3]})
1628
+
1629
+ def foo1(x, a=1, c=0):
1630
+ return x + a + c
1631
+
1632
+ def foo2(x, b=2, c=0):
1633
+ return x + b + c
1634
+
1635
+ msg = r"foo1\(\) got an unexpected keyword argument 'b'"
1636
+ with pytest.raises(TypeError, match=msg):
1637
+ df.agg([foo1, foo2], 0, 3, b=3, c=4)
1638
+
1639
+ result = df.agg([foo1, foo2], 0, 3, c=4)
1640
+ expected = DataFrame(
1641
+ [[8, 8], [9, 9], [10, 10]],
1642
+ columns=MultiIndex.from_tuples([("x", "foo1"), ("x", "foo2")]),
1643
+ )
1644
+ tm.assert_frame_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_frame_apply_relabeling.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.compat.numpy import np_version_gte1p25
5
+
6
+ import pandas as pd
7
+ import pandas._testing as tm
8
+
9
+
10
+ def test_agg_relabel():
11
+ # GH 26513
12
+ df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4], "C": [3, 4, 5, 6]})
13
+
14
+ # simplest case with one column, one func
15
+ result = df.agg(foo=("B", "sum"))
16
+ expected = pd.DataFrame({"B": [10]}, index=pd.Index(["foo"]))
17
+ tm.assert_frame_equal(result, expected)
18
+
19
+ # test on same column with different methods
20
+ result = df.agg(foo=("B", "sum"), bar=("B", "min"))
21
+ expected = pd.DataFrame({"B": [10, 1]}, index=pd.Index(["foo", "bar"]))
22
+
23
+ tm.assert_frame_equal(result, expected)
24
+
25
+
26
+ def test_agg_relabel_multi_columns_multi_methods():
27
+ # GH 26513, test on multiple columns with multiple methods
28
+ df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4], "C": [3, 4, 5, 6]})
29
+ result = df.agg(
30
+ foo=("A", "sum"),
31
+ bar=("B", "mean"),
32
+ cat=("A", "min"),
33
+ dat=("B", "max"),
34
+ f=("A", "max"),
35
+ g=("C", "min"),
36
+ )
37
+ expected = pd.DataFrame(
38
+ {
39
+ "A": [6.0, np.nan, 1.0, np.nan, 2.0, np.nan],
40
+ "B": [np.nan, 2.5, np.nan, 4.0, np.nan, np.nan],
41
+ "C": [np.nan, np.nan, np.nan, np.nan, np.nan, 3.0],
42
+ },
43
+ index=pd.Index(["foo", "bar", "cat", "dat", "f", "g"]),
44
+ )
45
+ tm.assert_frame_equal(result, expected)
46
+
47
+
48
+ @pytest.mark.xfail(np_version_gte1p25, reason="name of min now equals name of np.min")
49
+ def test_agg_relabel_partial_functions():
50
+ # GH 26513, test on partial, functools or more complex cases
51
+ df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4], "C": [3, 4, 5, 6]})
52
+ result = df.agg(foo=("A", np.mean), bar=("A", "mean"), cat=("A", min))
53
+ expected = pd.DataFrame(
54
+ {"A": [1.5, 1.5, 1.0]}, index=pd.Index(["foo", "bar", "cat"])
55
+ )
56
+ tm.assert_frame_equal(result, expected)
57
+
58
+ result = df.agg(
59
+ foo=("A", min),
60
+ bar=("A", np.min),
61
+ cat=("B", max),
62
+ dat=("C", "min"),
63
+ f=("B", np.sum),
64
+ kk=("B", lambda x: min(x)),
65
+ )
66
+ expected = pd.DataFrame(
67
+ {
68
+ "A": [1.0, 1.0, np.nan, np.nan, np.nan, np.nan],
69
+ "B": [np.nan, np.nan, 4.0, np.nan, 10.0, 1.0],
70
+ "C": [np.nan, np.nan, np.nan, 3.0, np.nan, np.nan],
71
+ },
72
+ index=pd.Index(["foo", "bar", "cat", "dat", "f", "kk"]),
73
+ )
74
+ tm.assert_frame_equal(result, expected)
75
+
76
+
77
+ def test_agg_namedtuple():
78
+ # GH 26513
79
+ df = pd.DataFrame({"A": [0, 1], "B": [1, 2]})
80
+ result = df.agg(
81
+ foo=pd.NamedAgg("B", "sum"),
82
+ bar=pd.NamedAgg("B", min),
83
+ cat=pd.NamedAgg(column="B", aggfunc="count"),
84
+ fft=pd.NamedAgg("B", aggfunc="max"),
85
+ )
86
+
87
+ expected = pd.DataFrame(
88
+ {"B": [3, 1, 2, 2]}, index=pd.Index(["foo", "bar", "cat", "fft"])
89
+ )
90
+ tm.assert_frame_equal(result, expected)
91
+
92
+ result = df.agg(
93
+ foo=pd.NamedAgg("A", "min"),
94
+ bar=pd.NamedAgg(column="B", aggfunc="max"),
95
+ cat=pd.NamedAgg(column="A", aggfunc="max"),
96
+ )
97
+ expected = pd.DataFrame(
98
+ {"A": [0.0, np.nan, 1.0], "B": [np.nan, 2.0, np.nan]},
99
+ index=pd.Index(["foo", "bar", "cat"]),
100
+ )
101
+ tm.assert_frame_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_frame_transform.py ADDED
@@ -0,0 +1,242 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DataFrame,
6
+ MultiIndex,
7
+ Series,
8
+ )
9
+ import pandas._testing as tm
10
+ from pandas.tests.apply.common import frame_transform_kernels
11
+ from pandas.tests.frame.common import zip_frames
12
+
13
+
14
+ def unpack_obj(obj, klass, axis):
15
+ """
16
+ Helper to ensure we have the right type of object for a test parametrized
17
+ over frame_or_series.
18
+ """
19
+ if klass is not DataFrame:
20
+ obj = obj["A"]
21
+ if axis != 0:
22
+ pytest.skip(f"Test is only for DataFrame with axis={axis}")
23
+ return obj
24
+
25
+
26
+ def test_transform_ufunc(axis, float_frame, frame_or_series):
27
+ # GH 35964
28
+ obj = unpack_obj(float_frame, frame_or_series, axis)
29
+
30
+ with np.errstate(all="ignore"):
31
+ f_sqrt = np.sqrt(obj)
32
+
33
+ # ufunc
34
+ result = obj.transform(np.sqrt, axis=axis)
35
+ expected = f_sqrt
36
+ tm.assert_equal(result, expected)
37
+
38
+
39
+ @pytest.mark.parametrize(
40
+ "ops, names",
41
+ [
42
+ ([np.sqrt], ["sqrt"]),
43
+ ([np.abs, np.sqrt], ["absolute", "sqrt"]),
44
+ (np.array([np.sqrt]), ["sqrt"]),
45
+ (np.array([np.abs, np.sqrt]), ["absolute", "sqrt"]),
46
+ ],
47
+ )
48
+ def test_transform_listlike(axis, float_frame, ops, names):
49
+ # GH 35964
50
+ other_axis = 1 if axis in {0, "index"} else 0
51
+ with np.errstate(all="ignore"):
52
+ expected = zip_frames([op(float_frame) for op in ops], axis=other_axis)
53
+ if axis in {0, "index"}:
54
+ expected.columns = MultiIndex.from_product([float_frame.columns, names])
55
+ else:
56
+ expected.index = MultiIndex.from_product([float_frame.index, names])
57
+ result = float_frame.transform(ops, axis=axis)
58
+ tm.assert_frame_equal(result, expected)
59
+
60
+
61
+ @pytest.mark.parametrize("ops", [[], np.array([])])
62
+ def test_transform_empty_listlike(float_frame, ops, frame_or_series):
63
+ obj = unpack_obj(float_frame, frame_or_series, 0)
64
+
65
+ with pytest.raises(ValueError, match="No transform functions were provided"):
66
+ obj.transform(ops)
67
+
68
+
69
+ @pytest.mark.parametrize("box", [dict, Series])
70
+ def test_transform_dictlike(axis, float_frame, box):
71
+ # GH 35964
72
+ if axis in (0, "index"):
73
+ e = float_frame.columns[0]
74
+ expected = float_frame[[e]].transform(np.abs)
75
+ else:
76
+ e = float_frame.index[0]
77
+ expected = float_frame.iloc[[0]].transform(np.abs)
78
+ result = float_frame.transform(box({e: np.abs}), axis=axis)
79
+ tm.assert_frame_equal(result, expected)
80
+
81
+
82
+ def test_transform_dictlike_mixed():
83
+ # GH 40018 - mix of lists and non-lists in values of a dictionary
84
+ df = DataFrame({"a": [1, 2], "b": [1, 4], "c": [1, 4]})
85
+ result = df.transform({"b": ["sqrt", "abs"], "c": "sqrt"})
86
+ expected = DataFrame(
87
+ [[1.0, 1, 1.0], [2.0, 4, 2.0]],
88
+ columns=MultiIndex([("b", "c"), ("sqrt", "abs")], [(0, 0, 1), (0, 1, 0)]),
89
+ )
90
+ tm.assert_frame_equal(result, expected)
91
+
92
+
93
+ @pytest.mark.parametrize(
94
+ "ops",
95
+ [
96
+ {},
97
+ {"A": []},
98
+ {"A": [], "B": "cumsum"},
99
+ {"A": "cumsum", "B": []},
100
+ {"A": [], "B": ["cumsum"]},
101
+ {"A": ["cumsum"], "B": []},
102
+ ],
103
+ )
104
+ def test_transform_empty_dictlike(float_frame, ops, frame_or_series):
105
+ obj = unpack_obj(float_frame, frame_or_series, 0)
106
+
107
+ with pytest.raises(ValueError, match="No transform functions were provided"):
108
+ obj.transform(ops)
109
+
110
+
111
+ @pytest.mark.parametrize("use_apply", [True, False])
112
+ def test_transform_udf(axis, float_frame, use_apply, frame_or_series):
113
+ # GH 35964
114
+ obj = unpack_obj(float_frame, frame_or_series, axis)
115
+
116
+ # transform uses UDF either via apply or passing the entire DataFrame
117
+ def func(x):
118
+ # transform is using apply iff x is not a DataFrame
119
+ if use_apply == isinstance(x, frame_or_series):
120
+ # Force transform to fallback
121
+ raise ValueError
122
+ return x + 1
123
+
124
+ result = obj.transform(func, axis=axis)
125
+ expected = obj + 1
126
+ tm.assert_equal(result, expected)
127
+
128
+
129
+ wont_fail = ["ffill", "bfill", "fillna", "pad", "backfill", "shift"]
130
+ frame_kernels_raise = [x for x in frame_transform_kernels if x not in wont_fail]
131
+
132
+
133
+ @pytest.mark.parametrize("op", [*frame_kernels_raise, lambda x: x + 1])
134
+ def test_transform_bad_dtype(op, frame_or_series, request):
135
+ # GH 35964
136
+ if op == "ngroup":
137
+ request.node.add_marker(
138
+ pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
139
+ )
140
+
141
+ obj = DataFrame({"A": 3 * [object]}) # DataFrame that will fail on most transforms
142
+ obj = tm.get_obj(obj, frame_or_series)
143
+ error = TypeError
144
+ msg = "|".join(
145
+ [
146
+ "not supported between instances of 'type' and 'type'",
147
+ "unsupported operand type",
148
+ ]
149
+ )
150
+
151
+ with pytest.raises(error, match=msg):
152
+ obj.transform(op)
153
+ with pytest.raises(error, match=msg):
154
+ obj.transform([op])
155
+ with pytest.raises(error, match=msg):
156
+ obj.transform({"A": op})
157
+ with pytest.raises(error, match=msg):
158
+ obj.transform({"A": [op]})
159
+
160
+
161
+ @pytest.mark.parametrize("op", frame_kernels_raise)
162
+ def test_transform_failure_typeerror(request, op):
163
+ # GH 35964
164
+
165
+ if op == "ngroup":
166
+ request.node.add_marker(
167
+ pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
168
+ )
169
+
170
+ # Using object makes most transform kernels fail
171
+ df = DataFrame({"A": 3 * [object], "B": [1, 2, 3]})
172
+ error = TypeError
173
+ msg = "|".join(
174
+ [
175
+ "not supported between instances of 'type' and 'type'",
176
+ "unsupported operand type",
177
+ ]
178
+ )
179
+
180
+ with pytest.raises(error, match=msg):
181
+ df.transform([op])
182
+
183
+ with pytest.raises(error, match=msg):
184
+ df.transform({"A": op, "B": op})
185
+
186
+ with pytest.raises(error, match=msg):
187
+ df.transform({"A": [op], "B": [op]})
188
+
189
+ with pytest.raises(error, match=msg):
190
+ df.transform({"A": [op, "shift"], "B": [op]})
191
+
192
+
193
+ def test_transform_failure_valueerror():
194
+ # GH 40211
195
+ def op(x):
196
+ if np.sum(np.sum(x)) < 10:
197
+ raise ValueError
198
+ return x
199
+
200
+ df = DataFrame({"A": [1, 2, 3], "B": [400, 500, 600]})
201
+ msg = "Transform function failed"
202
+
203
+ with pytest.raises(ValueError, match=msg):
204
+ df.transform([op])
205
+
206
+ with pytest.raises(ValueError, match=msg):
207
+ df.transform({"A": op, "B": op})
208
+
209
+ with pytest.raises(ValueError, match=msg):
210
+ df.transform({"A": [op], "B": [op]})
211
+
212
+ with pytest.raises(ValueError, match=msg):
213
+ df.transform({"A": [op, "shift"], "B": [op]})
214
+
215
+
216
+ @pytest.mark.parametrize("use_apply", [True, False])
217
+ def test_transform_passes_args(use_apply, frame_or_series):
218
+ # GH 35964
219
+ # transform uses UDF either via apply or passing the entire DataFrame
220
+ expected_args = [1, 2]
221
+ expected_kwargs = {"c": 3}
222
+
223
+ def f(x, a, b, c):
224
+ # transform is using apply iff x is not a DataFrame
225
+ if use_apply == isinstance(x, frame_or_series):
226
+ # Force transform to fallback
227
+ raise ValueError
228
+ assert [a, b] == expected_args
229
+ assert c == expected_kwargs["c"]
230
+ return x
231
+
232
+ frame_or_series([1]).transform(f, 0, *expected_args, **expected_kwargs)
233
+
234
+
235
+ def test_transform_empty_dataframe():
236
+ # https://github.com/pandas-dev/pandas/issues/39636
237
+ df = DataFrame([], columns=["col1", "col2"])
238
+ result = df.transform(lambda x: x + 10)
239
+ tm.assert_frame_equal(result, df)
240
+
241
+ result = df["col1"].transform(lambda x: x + 10)
242
+ tm.assert_series_equal(result, df["col1"])
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_invalid_arg.py ADDED
@@ -0,0 +1,365 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Tests specifically aimed at detecting bad arguments.
2
+ # This file is organized by reason for exception.
3
+ # 1. always invalid argument values
4
+ # 2. missing column(s)
5
+ # 3. incompatible ops/dtype/args/kwargs
6
+ # 4. invalid result shape/type
7
+ # If your test does not fit into one of these categories, add to this list.
8
+
9
+ from itertools import chain
10
+ import re
11
+
12
+ import numpy as np
13
+ import pytest
14
+
15
+ from pandas.errors import SpecificationError
16
+
17
+ from pandas import (
18
+ Categorical,
19
+ DataFrame,
20
+ Series,
21
+ date_range,
22
+ notna,
23
+ )
24
+ import pandas._testing as tm
25
+
26
+
27
+ @pytest.mark.parametrize("result_type", ["foo", 1])
28
+ def test_result_type_error(result_type, int_frame_const_col):
29
+ # allowed result_type
30
+ df = int_frame_const_col
31
+
32
+ msg = (
33
+ "invalid value for result_type, must be one of "
34
+ "{None, 'reduce', 'broadcast', 'expand'}"
35
+ )
36
+ with pytest.raises(ValueError, match=msg):
37
+ df.apply(lambda x: [1, 2, 3], axis=1, result_type=result_type)
38
+
39
+
40
+ def test_apply_invalid_axis_value():
41
+ df = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]], index=["a", "a", "c"])
42
+ msg = "No axis named 2 for object type DataFrame"
43
+ with pytest.raises(ValueError, match=msg):
44
+ df.apply(lambda x: x, 2)
45
+
46
+
47
+ def test_applymap_invalid_na_action(float_frame):
48
+ # GH 23803
49
+ with pytest.raises(ValueError, match="na_action must be .*Got 'abc'"):
50
+ float_frame.applymap(lambda x: len(str(x)), na_action="abc")
51
+
52
+
53
+ def test_agg_raises():
54
+ # GH 26513
55
+ df = DataFrame({"A": [0, 1], "B": [1, 2]})
56
+ msg = "Must provide"
57
+
58
+ with pytest.raises(TypeError, match=msg):
59
+ df.agg()
60
+
61
+
62
+ def test_map_with_invalid_na_action_raises():
63
+ # https://github.com/pandas-dev/pandas/issues/32815
64
+ s = Series([1, 2, 3])
65
+ msg = "na_action must either be 'ignore' or None"
66
+ with pytest.raises(ValueError, match=msg):
67
+ s.map(lambda x: x, na_action="____")
68
+
69
+
70
+ @pytest.mark.parametrize("input_na_action", ["____", True])
71
+ def test_map_arg_is_dict_with_invalid_na_action_raises(input_na_action):
72
+ # https://github.com/pandas-dev/pandas/issues/46588
73
+ s = Series([1, 2, 3])
74
+ msg = f"na_action must either be 'ignore' or None, {input_na_action} was passed"
75
+ with pytest.raises(ValueError, match=msg):
76
+ s.map({1: 2}, na_action=input_na_action)
77
+
78
+
79
+ def test_map_categorical_na_action():
80
+ values = Categorical(list("ABBABCD"), categories=list("DCBA"), ordered=True)
81
+ s = Series(values, name="XX", index=list("abcdefg"))
82
+ with pytest.raises(NotImplementedError, match=tm.EMPTY_STRING_PATTERN):
83
+ s.map(lambda x: x, na_action="ignore")
84
+
85
+
86
+ def test_map_datetimetz_na_action():
87
+ values = date_range("2011-01-01", "2011-01-02", freq="H").tz_localize("Asia/Tokyo")
88
+ s = Series(values, name="XX")
89
+ with pytest.raises(NotImplementedError, match=tm.EMPTY_STRING_PATTERN):
90
+ s.map(lambda x: x, na_action="ignore")
91
+
92
+
93
+ @pytest.mark.parametrize("method", ["apply", "agg", "transform"])
94
+ @pytest.mark.parametrize("func", [{"A": {"B": "sum"}}, {"A": {"B": ["sum"]}}])
95
+ def test_nested_renamer(frame_or_series, method, func):
96
+ # GH 35964
97
+ obj = frame_or_series({"A": [1]})
98
+ match = "nested renamer is not supported"
99
+ with pytest.raises(SpecificationError, match=match):
100
+ getattr(obj, method)(func)
101
+
102
+
103
+ @pytest.mark.parametrize(
104
+ "renamer",
105
+ [{"foo": ["min", "max"]}, {"foo": ["min", "max"], "bar": ["sum", "mean"]}],
106
+ )
107
+ def test_series_nested_renamer(renamer):
108
+ s = Series(range(6), dtype="int64", name="series")
109
+ msg = "nested renamer is not supported"
110
+ with pytest.raises(SpecificationError, match=msg):
111
+ s.agg(renamer)
112
+
113
+
114
+ def test_apply_dict_depr():
115
+ tsdf = DataFrame(
116
+ np.random.randn(10, 3),
117
+ columns=["A", "B", "C"],
118
+ index=date_range("1/1/2000", periods=10),
119
+ )
120
+ msg = "nested renamer is not supported"
121
+ with pytest.raises(SpecificationError, match=msg):
122
+ tsdf.A.agg({"foo": ["sum", "mean"]})
123
+
124
+
125
+ @pytest.mark.parametrize("method", ["agg", "transform"])
126
+ def test_dict_nested_renaming_depr(method):
127
+ df = DataFrame({"A": range(5), "B": 5})
128
+
129
+ # nested renaming
130
+ msg = r"nested renamer is not supported"
131
+ with pytest.raises(SpecificationError, match=msg):
132
+ getattr(df, method)({"A": {"foo": "min"}, "B": {"bar": "max"}})
133
+
134
+
135
+ @pytest.mark.parametrize("method", ["apply", "agg", "transform"])
136
+ @pytest.mark.parametrize("func", [{"B": "sum"}, {"B": ["sum"]}])
137
+ def test_missing_column(method, func):
138
+ # GH 40004
139
+ obj = DataFrame({"A": [1]})
140
+ match = re.escape("Column(s) ['B'] do not exist")
141
+ with pytest.raises(KeyError, match=match):
142
+ getattr(obj, method)(func)
143
+
144
+
145
+ def test_transform_mixed_column_name_dtypes():
146
+ # GH39025
147
+ df = DataFrame({"a": ["1"]})
148
+ msg = r"Column\(s\) \[1, 'b'\] do not exist"
149
+ with pytest.raises(KeyError, match=msg):
150
+ df.transform({"a": int, 1: str, "b": int})
151
+
152
+
153
+ @pytest.mark.parametrize(
154
+ "how, args", [("pct_change", ()), ("nsmallest", (1, ["a", "b"])), ("tail", 1)]
155
+ )
156
+ def test_apply_str_axis_1_raises(how, args):
157
+ # GH 39211 - some ops don't support axis=1
158
+ df = DataFrame({"a": [1, 2], "b": [3, 4]})
159
+ msg = f"Operation {how} does not support axis=1"
160
+ with pytest.raises(ValueError, match=msg):
161
+ df.apply(how, axis=1, args=args)
162
+
163
+
164
+ def test_transform_axis_1_raises():
165
+ # GH 35964
166
+ msg = "No axis named 1 for object type Series"
167
+ with pytest.raises(ValueError, match=msg):
168
+ Series([1]).transform("sum", axis=1)
169
+
170
+
171
+ def test_apply_modify_traceback():
172
+ data = DataFrame(
173
+ {
174
+ "A": [
175
+ "foo",
176
+ "foo",
177
+ "foo",
178
+ "foo",
179
+ "bar",
180
+ "bar",
181
+ "bar",
182
+ "bar",
183
+ "foo",
184
+ "foo",
185
+ "foo",
186
+ ],
187
+ "B": [
188
+ "one",
189
+ "one",
190
+ "one",
191
+ "two",
192
+ "one",
193
+ "one",
194
+ "one",
195
+ "two",
196
+ "two",
197
+ "two",
198
+ "one",
199
+ ],
200
+ "C": [
201
+ "dull",
202
+ "dull",
203
+ "shiny",
204
+ "dull",
205
+ "dull",
206
+ "shiny",
207
+ "shiny",
208
+ "dull",
209
+ "shiny",
210
+ "shiny",
211
+ "shiny",
212
+ ],
213
+ "D": np.random.randn(11),
214
+ "E": np.random.randn(11),
215
+ "F": np.random.randn(11),
216
+ }
217
+ )
218
+
219
+ data.loc[4, "C"] = np.nan
220
+
221
+ def transform(row):
222
+ if row["C"].startswith("shin") and row["A"] == "foo":
223
+ row["D"] = 7
224
+ return row
225
+
226
+ def transform2(row):
227
+ if notna(row["C"]) and row["C"].startswith("shin") and row["A"] == "foo":
228
+ row["D"] = 7
229
+ return row
230
+
231
+ msg = "'float' object has no attribute 'startswith'"
232
+ with pytest.raises(AttributeError, match=msg):
233
+ data.apply(transform, axis=1)
234
+
235
+
236
+ @pytest.mark.parametrize(
237
+ "df, func, expected",
238
+ tm.get_cython_table_params(
239
+ DataFrame([["a", "b"], ["b", "a"]]), [["cumprod", TypeError]]
240
+ ),
241
+ )
242
+ def test_agg_cython_table_raises_frame(df, func, expected, axis):
243
+ # GH 21224
244
+ msg = "can't multiply sequence by non-int of type 'str'"
245
+ with pytest.raises(expected, match=msg):
246
+ df.agg(func, axis=axis)
247
+
248
+
249
+ @pytest.mark.parametrize(
250
+ "series, func, expected",
251
+ chain(
252
+ tm.get_cython_table_params(
253
+ Series("a b c".split()),
254
+ [
255
+ ("mean", TypeError), # mean raises TypeError
256
+ ("prod", TypeError),
257
+ ("std", TypeError),
258
+ ("var", TypeError),
259
+ ("median", TypeError),
260
+ ("cumprod", TypeError),
261
+ ],
262
+ )
263
+ ),
264
+ )
265
+ def test_agg_cython_table_raises_series(series, func, expected):
266
+ # GH21224
267
+ msg = r"[Cc]ould not convert|can't multiply sequence by non-int of type"
268
+ with pytest.raises(expected, match=msg):
269
+ # e.g. Series('a b'.split()).cumprod() will raise
270
+ series.agg(func)
271
+
272
+
273
+ def test_agg_none_to_type():
274
+ # GH 40543
275
+ df = DataFrame({"a": [None]})
276
+ msg = re.escape("int() argument must be a string")
277
+ with pytest.raises(TypeError, match=msg):
278
+ df.agg({"a": lambda x: int(x.iloc[0])})
279
+
280
+
281
+ def test_transform_none_to_type():
282
+ # GH#34377
283
+ df = DataFrame({"a": [None]})
284
+ msg = "argument must be a"
285
+ with pytest.raises(TypeError, match=msg):
286
+ df.transform({"a": lambda x: int(x.iloc[0])})
287
+
288
+
289
+ @pytest.mark.parametrize(
290
+ "func",
291
+ [
292
+ lambda x: np.array([1, 2]).reshape(-1, 2),
293
+ lambda x: [1, 2],
294
+ lambda x: Series([1, 2]),
295
+ ],
296
+ )
297
+ def test_apply_broadcast_error(int_frame_const_col, func):
298
+ df = int_frame_const_col
299
+
300
+ # > 1 ndim
301
+ msg = "too many dims to broadcast|cannot broadcast result"
302
+ with pytest.raises(ValueError, match=msg):
303
+ df.apply(func, axis=1, result_type="broadcast")
304
+
305
+
306
+ def test_transform_and_agg_err_agg(axis, float_frame):
307
+ # cannot both transform and agg
308
+ msg = "cannot combine transform and aggregation operations"
309
+ with pytest.raises(ValueError, match=msg):
310
+ with np.errstate(all="ignore"):
311
+ float_frame.agg(["max", "sqrt"], axis=axis)
312
+
313
+
314
+ @pytest.mark.parametrize(
315
+ "func, msg",
316
+ [
317
+ (["sqrt", "max"], "cannot combine transform and aggregation"),
318
+ (
319
+ {"foo": np.sqrt, "bar": "sum"},
320
+ "cannot perform both aggregation and transformation",
321
+ ),
322
+ ],
323
+ )
324
+ def test_transform_and_agg_err_series(string_series, func, msg):
325
+ # we are trying to transform with an aggregator
326
+ with pytest.raises(ValueError, match=msg):
327
+ with np.errstate(all="ignore"):
328
+ string_series.agg(func)
329
+
330
+
331
+ @pytest.mark.parametrize("func", [["max", "min"], ["max", "sqrt"]])
332
+ def test_transform_wont_agg_frame(axis, float_frame, func):
333
+ # GH 35964
334
+ # cannot both transform and agg
335
+ msg = "Function did not transform"
336
+ with pytest.raises(ValueError, match=msg):
337
+ float_frame.transform(func, axis=axis)
338
+
339
+
340
+ @pytest.mark.parametrize("func", [["min", "max"], ["sqrt", "max"]])
341
+ def test_transform_wont_agg_series(string_series, func):
342
+ # GH 35964
343
+ # we are trying to transform with an aggregator
344
+ msg = "Function did not transform"
345
+
346
+ warn = RuntimeWarning if func[0] == "sqrt" else None
347
+ warn_msg = "invalid value encountered in sqrt"
348
+ with pytest.raises(ValueError, match=msg):
349
+ with tm.assert_produces_warning(warn, match=warn_msg, check_stacklevel=False):
350
+ string_series.transform(func)
351
+
352
+
353
+ @pytest.mark.parametrize(
354
+ "op_wrapper", [lambda x: x, lambda x: [x], lambda x: {"A": x}, lambda x: {"A": [x]}]
355
+ )
356
+ def test_transform_reducer_raises(all_reductions, frame_or_series, op_wrapper):
357
+ # GH 35964
358
+ op = op_wrapper(all_reductions)
359
+
360
+ obj = DataFrame({"A": [1, 2, 3]})
361
+ obj = tm.get_obj(obj, frame_or_series)
362
+
363
+ msg = "Function did not transform"
364
+ with pytest.raises(ValueError, match=msg):
365
+ obj.transform(op)
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_series_apply.py ADDED
@@ -0,0 +1,956 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import (
2
+ Counter,
3
+ defaultdict,
4
+ )
5
+ from decimal import Decimal
6
+ import math
7
+
8
+ import numpy as np
9
+ import pytest
10
+
11
+ import pandas as pd
12
+ from pandas import (
13
+ DataFrame,
14
+ Index,
15
+ MultiIndex,
16
+ Series,
17
+ concat,
18
+ isna,
19
+ timedelta_range,
20
+ )
21
+ import pandas._testing as tm
22
+ from pandas.tests.apply.common import series_transform_kernels
23
+
24
+
25
+ def test_series_map_box_timedelta():
26
+ # GH#11349
27
+ ser = Series(timedelta_range("1 day 1 s", periods=5, freq="h"))
28
+
29
+ def f(x):
30
+ return x.total_seconds()
31
+
32
+ ser.map(f)
33
+ ser.apply(f)
34
+ DataFrame(ser).applymap(f)
35
+
36
+
37
+ def test_apply(datetime_series):
38
+ with np.errstate(all="ignore"):
39
+ tm.assert_series_equal(datetime_series.apply(np.sqrt), np.sqrt(datetime_series))
40
+
41
+ # element-wise apply
42
+ tm.assert_series_equal(datetime_series.apply(math.exp), np.exp(datetime_series))
43
+
44
+ # empty series
45
+ s = Series(dtype=object, name="foo", index=Index([], name="bar"))
46
+ rs = s.apply(lambda x: x)
47
+ tm.assert_series_equal(s, rs)
48
+
49
+ # check all metadata (GH 9322)
50
+ assert s is not rs
51
+ assert s.index is rs.index
52
+ assert s.dtype == rs.dtype
53
+ assert s.name == rs.name
54
+
55
+ # index but no data
56
+ s = Series(index=[1, 2, 3], dtype=np.float64)
57
+ rs = s.apply(lambda x: x)
58
+ tm.assert_series_equal(s, rs)
59
+
60
+
61
+ def test_apply_same_length_inference_bug():
62
+ s = Series([1, 2])
63
+
64
+ def f(x):
65
+ return (x, x + 1)
66
+
67
+ result = s.apply(f)
68
+ expected = s.map(f)
69
+ tm.assert_series_equal(result, expected)
70
+
71
+ s = Series([1, 2, 3])
72
+ result = s.apply(f)
73
+ expected = s.map(f)
74
+ tm.assert_series_equal(result, expected)
75
+
76
+
77
+ def test_apply_dont_convert_dtype():
78
+ s = Series(np.random.randn(10))
79
+
80
+ def f(x):
81
+ return x if x > 0 else np.nan
82
+
83
+ result = s.apply(f, convert_dtype=False)
84
+ assert result.dtype == object
85
+
86
+
87
+ def test_apply_args():
88
+ s = Series(["foo,bar"])
89
+
90
+ result = s.apply(str.split, args=(",",))
91
+ assert result[0] == ["foo", "bar"]
92
+ assert isinstance(result[0], list)
93
+
94
+
95
+ @pytest.mark.parametrize(
96
+ "args, kwargs, increment",
97
+ [((), {}, 0), ((), {"a": 1}, 1), ((2, 3), {}, 32), ((1,), {"c": 2}, 201)],
98
+ )
99
+ def test_agg_args(args, kwargs, increment):
100
+ # GH 43357
101
+ def f(x, a=0, b=0, c=0):
102
+ return x + a + 10 * b + 100 * c
103
+
104
+ s = Series([1, 2])
105
+ result = s.agg(f, 0, *args, **kwargs)
106
+ expected = s + increment
107
+ tm.assert_series_equal(result, expected)
108
+
109
+
110
+ def test_agg_list_like_func_with_args():
111
+ # GH 50624
112
+
113
+ s = Series([1, 2, 3])
114
+
115
+ def foo1(x, a=1, c=0):
116
+ return x + a + c
117
+
118
+ def foo2(x, b=2, c=0):
119
+ return x + b + c
120
+
121
+ msg = r"foo1\(\) got an unexpected keyword argument 'b'"
122
+ with pytest.raises(TypeError, match=msg):
123
+ s.agg([foo1, foo2], 0, 3, b=3, c=4)
124
+
125
+ result = s.agg([foo1, foo2], 0, 3, c=4)
126
+ expected = DataFrame({"foo1": [8, 9, 10], "foo2": [8, 9, 10]})
127
+ tm.assert_frame_equal(result, expected)
128
+
129
+
130
+ def test_series_map_box_timestamps():
131
+ # GH#2689, GH#2627
132
+ ser = Series(pd.date_range("1/1/2000", periods=10))
133
+
134
+ def func(x):
135
+ return (x.hour, x.day, x.month)
136
+
137
+ # it works!
138
+ ser.map(func)
139
+ ser.apply(func)
140
+
141
+
142
+ def test_series_map_stringdtype(any_string_dtype):
143
+ # map test on StringDType, GH#40823
144
+ ser1 = Series(
145
+ data=["cat", "dog", "rabbit"],
146
+ index=["id1", "id2", "id3"],
147
+ dtype=any_string_dtype,
148
+ )
149
+ ser2 = Series(data=["id3", "id2", "id1", "id7000"], dtype=any_string_dtype)
150
+ result = ser2.map(ser1)
151
+ expected = Series(data=["rabbit", "dog", "cat", pd.NA], dtype=any_string_dtype)
152
+
153
+ tm.assert_series_equal(result, expected)
154
+
155
+
156
+ def test_apply_box():
157
+ # ufunc will not be boxed. Same test cases as the test_map_box
158
+ vals = [pd.Timestamp("2011-01-01"), pd.Timestamp("2011-01-02")]
159
+ s = Series(vals)
160
+ assert s.dtype == "datetime64[ns]"
161
+ # boxed value must be Timestamp instance
162
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.day}_{x.tz}")
163
+ exp = Series(["Timestamp_1_None", "Timestamp_2_None"])
164
+ tm.assert_series_equal(res, exp)
165
+
166
+ vals = [
167
+ pd.Timestamp("2011-01-01", tz="US/Eastern"),
168
+ pd.Timestamp("2011-01-02", tz="US/Eastern"),
169
+ ]
170
+ s = Series(vals)
171
+ assert s.dtype == "datetime64[ns, US/Eastern]"
172
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.day}_{x.tz}")
173
+ exp = Series(["Timestamp_1_US/Eastern", "Timestamp_2_US/Eastern"])
174
+ tm.assert_series_equal(res, exp)
175
+
176
+ # timedelta
177
+ vals = [pd.Timedelta("1 days"), pd.Timedelta("2 days")]
178
+ s = Series(vals)
179
+ assert s.dtype == "timedelta64[ns]"
180
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.days}")
181
+ exp = Series(["Timedelta_1", "Timedelta_2"])
182
+ tm.assert_series_equal(res, exp)
183
+
184
+ # period
185
+ vals = [pd.Period("2011-01-01", freq="M"), pd.Period("2011-01-02", freq="M")]
186
+ s = Series(vals)
187
+ assert s.dtype == "Period[M]"
188
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.freqstr}")
189
+ exp = Series(["Period_M", "Period_M"])
190
+ tm.assert_series_equal(res, exp)
191
+
192
+
193
+ def test_apply_datetimetz():
194
+ values = pd.date_range("2011-01-01", "2011-01-02", freq="H").tz_localize(
195
+ "Asia/Tokyo"
196
+ )
197
+ s = Series(values, name="XX")
198
+
199
+ result = s.apply(lambda x: x + pd.offsets.Day())
200
+ exp_values = pd.date_range("2011-01-02", "2011-01-03", freq="H").tz_localize(
201
+ "Asia/Tokyo"
202
+ )
203
+ exp = Series(exp_values, name="XX")
204
+ tm.assert_series_equal(result, exp)
205
+
206
+ result = s.apply(lambda x: x.hour)
207
+ exp = Series(list(range(24)) + [0], name="XX", dtype=np.int32)
208
+ tm.assert_series_equal(result, exp)
209
+
210
+ # not vectorized
211
+ def f(x):
212
+ if not isinstance(x, pd.Timestamp):
213
+ raise ValueError
214
+ return str(x.tz)
215
+
216
+ result = s.map(f)
217
+ exp = Series(["Asia/Tokyo"] * 25, name="XX")
218
+ tm.assert_series_equal(result, exp)
219
+
220
+
221
+ def test_apply_categorical():
222
+ values = pd.Categorical(list("ABBABCD"), categories=list("DCBA"), ordered=True)
223
+ ser = Series(values, name="XX", index=list("abcdefg"))
224
+ result = ser.apply(lambda x: x.lower())
225
+
226
+ # should be categorical dtype when the number of categories are
227
+ # the same
228
+ values = pd.Categorical(list("abbabcd"), categories=list("dcba"), ordered=True)
229
+ exp = Series(values, name="XX", index=list("abcdefg"))
230
+ tm.assert_series_equal(result, exp)
231
+ tm.assert_categorical_equal(result.values, exp.values)
232
+
233
+ result = ser.apply(lambda x: "A")
234
+ exp = Series(["A"] * 7, name="XX", index=list("abcdefg"))
235
+ tm.assert_series_equal(result, exp)
236
+ assert result.dtype == object
237
+
238
+
239
+ @pytest.mark.parametrize("series", [["1-1", "1-1", np.NaN], ["1-1", "1-2", np.NaN]])
240
+ def test_apply_categorical_with_nan_values(series):
241
+ # GH 20714 bug fixed in: GH 24275
242
+ s = Series(series, dtype="category")
243
+ result = s.apply(lambda x: x.split("-")[0])
244
+ result = result.astype(object)
245
+ expected = Series(["1", "1", np.NaN], dtype="category")
246
+ expected = expected.astype(object)
247
+ tm.assert_series_equal(result, expected)
248
+
249
+
250
+ def test_apply_empty_integer_series_with_datetime_index():
251
+ # GH 21245
252
+ s = Series([], index=pd.date_range(start="2018-01-01", periods=0), dtype=int)
253
+ result = s.apply(lambda x: x)
254
+ tm.assert_series_equal(result, s)
255
+
256
+
257
+ def test_transform(string_series):
258
+ # transforming functions
259
+
260
+ with np.errstate(all="ignore"):
261
+ f_sqrt = np.sqrt(string_series)
262
+ f_abs = np.abs(string_series)
263
+
264
+ # ufunc
265
+ result = string_series.apply(np.sqrt)
266
+ expected = f_sqrt.copy()
267
+ tm.assert_series_equal(result, expected)
268
+
269
+ # list-like
270
+ result = string_series.apply([np.sqrt])
271
+ expected = f_sqrt.to_frame().copy()
272
+ expected.columns = ["sqrt"]
273
+ tm.assert_frame_equal(result, expected)
274
+
275
+ result = string_series.apply(["sqrt"])
276
+ tm.assert_frame_equal(result, expected)
277
+
278
+ # multiple items in list
279
+ # these are in the order as if we are applying both functions per
280
+ # series and then concatting
281
+ expected = concat([f_sqrt, f_abs], axis=1)
282
+ expected.columns = ["sqrt", "absolute"]
283
+ result = string_series.apply([np.sqrt, np.abs])
284
+ tm.assert_frame_equal(result, expected)
285
+
286
+ # dict, provide renaming
287
+ expected = concat([f_sqrt, f_abs], axis=1)
288
+ expected.columns = ["foo", "bar"]
289
+ expected = expected.unstack().rename("series")
290
+
291
+ result = string_series.apply({"foo": np.sqrt, "bar": np.abs})
292
+ tm.assert_series_equal(result.reindex_like(expected), expected)
293
+
294
+
295
+ @pytest.mark.parametrize("op", series_transform_kernels)
296
+ def test_transform_partial_failure(op, request):
297
+ # GH 35964
298
+ if op in ("ffill", "bfill", "pad", "backfill", "shift"):
299
+ request.node.add_marker(
300
+ pytest.mark.xfail(reason=f"{op} is successful on any dtype")
301
+ )
302
+
303
+ # Using object makes most transform kernels fail
304
+ ser = Series(3 * [object])
305
+
306
+ if op in ("fillna", "ngroup"):
307
+ error = ValueError
308
+ msg = "Transform function failed"
309
+ else:
310
+ error = TypeError
311
+ msg = "|".join(
312
+ [
313
+ "not supported between instances of 'type' and 'type'",
314
+ "unsupported operand type",
315
+ ]
316
+ )
317
+
318
+ with pytest.raises(error, match=msg):
319
+ ser.transform([op, "shift"])
320
+
321
+ with pytest.raises(error, match=msg):
322
+ ser.transform({"A": op, "B": "shift"})
323
+
324
+ with pytest.raises(error, match=msg):
325
+ ser.transform({"A": [op], "B": ["shift"]})
326
+
327
+ with pytest.raises(error, match=msg):
328
+ ser.transform({"A": [op, "shift"], "B": [op]})
329
+
330
+
331
+ def test_transform_partial_failure_valueerror():
332
+ # GH 40211
333
+ def noop(x):
334
+ return x
335
+
336
+ def raising_op(_):
337
+ raise ValueError
338
+
339
+ ser = Series(3 * [object])
340
+ msg = "Transform function failed"
341
+
342
+ with pytest.raises(ValueError, match=msg):
343
+ ser.transform([noop, raising_op])
344
+
345
+ with pytest.raises(ValueError, match=msg):
346
+ ser.transform({"A": raising_op, "B": noop})
347
+
348
+ with pytest.raises(ValueError, match=msg):
349
+ ser.transform({"A": [raising_op], "B": [noop]})
350
+
351
+ with pytest.raises(ValueError, match=msg):
352
+ ser.transform({"A": [noop, raising_op], "B": [noop]})
353
+
354
+
355
+ def test_demo():
356
+ # demonstration tests
357
+ s = Series(range(6), dtype="int64", name="series")
358
+
359
+ result = s.agg(["min", "max"])
360
+ expected = Series([0, 5], index=["min", "max"], name="series")
361
+ tm.assert_series_equal(result, expected)
362
+
363
+ result = s.agg({"foo": "min"})
364
+ expected = Series([0], index=["foo"], name="series")
365
+ tm.assert_series_equal(result, expected)
366
+
367
+
368
+ def test_agg_apply_evaluate_lambdas_the_same(string_series):
369
+ # test that we are evaluating row-by-row first
370
+ # before vectorized evaluation
371
+ result = string_series.apply(lambda x: str(x))
372
+ expected = string_series.agg(lambda x: str(x))
373
+ tm.assert_series_equal(result, expected)
374
+
375
+ result = string_series.apply(str)
376
+ expected = string_series.agg(str)
377
+ tm.assert_series_equal(result, expected)
378
+
379
+
380
+ def test_with_nested_series(datetime_series):
381
+ # GH 2316
382
+ # .agg with a reducer and a transform, what to do
383
+ result = datetime_series.apply(lambda x: Series([x, x**2], index=["x", "x^2"]))
384
+ expected = DataFrame({"x": datetime_series, "x^2": datetime_series**2})
385
+ tm.assert_frame_equal(result, expected)
386
+
387
+ result = datetime_series.agg(lambda x: Series([x, x**2], index=["x", "x^2"]))
388
+ tm.assert_frame_equal(result, expected)
389
+
390
+
391
+ def test_replicate_describe(string_series):
392
+ # this also tests a result set that is all scalars
393
+ expected = string_series.describe()
394
+ result = string_series.apply(
395
+ {
396
+ "count": "count",
397
+ "mean": "mean",
398
+ "std": "std",
399
+ "min": "min",
400
+ "25%": lambda x: x.quantile(0.25),
401
+ "50%": "median",
402
+ "75%": lambda x: x.quantile(0.75),
403
+ "max": "max",
404
+ }
405
+ )
406
+ tm.assert_series_equal(result, expected)
407
+
408
+
409
+ def test_reduce(string_series):
410
+ # reductions with named functions
411
+ result = string_series.agg(["sum", "mean"])
412
+ expected = Series(
413
+ [string_series.sum(), string_series.mean()],
414
+ ["sum", "mean"],
415
+ name=string_series.name,
416
+ )
417
+ tm.assert_series_equal(result, expected)
418
+
419
+
420
+ @pytest.mark.parametrize("how", ["agg", "apply"])
421
+ def test_non_callable_aggregates(how):
422
+ # test agg using non-callable series attributes
423
+ # GH 39116 - expand to apply
424
+ s = Series([1, 2, None])
425
+
426
+ # Calling agg w/ just a string arg same as calling s.arg
427
+ result = getattr(s, how)("size")
428
+ expected = s.size
429
+ assert result == expected
430
+
431
+ # test when mixed w/ callable reducers
432
+ result = getattr(s, how)(["size", "count", "mean"])
433
+ expected = Series({"size": 3.0, "count": 2.0, "mean": 1.5})
434
+ tm.assert_series_equal(result, expected)
435
+
436
+
437
+ def test_series_apply_no_suffix_index():
438
+ # GH36189
439
+ s = Series([4] * 3)
440
+ result = s.apply(["sum", lambda x: x.sum(), lambda x: x.sum()])
441
+ expected = Series([12, 12, 12], index=["sum", "<lambda>", "<lambda>"])
442
+
443
+ tm.assert_series_equal(result, expected)
444
+
445
+
446
+ def test_map(datetime_series):
447
+ index, data = tm.getMixedTypeDict()
448
+
449
+ source = Series(data["B"], index=data["C"])
450
+ target = Series(data["C"][:4], index=data["D"][:4])
451
+
452
+ merged = target.map(source)
453
+
454
+ for k, v in merged.items():
455
+ assert v == source[target[k]]
456
+
457
+ # input could be a dict
458
+ merged = target.map(source.to_dict())
459
+
460
+ for k, v in merged.items():
461
+ assert v == source[target[k]]
462
+
463
+ # function
464
+ result = datetime_series.map(lambda x: x * 2)
465
+ tm.assert_series_equal(result, datetime_series * 2)
466
+
467
+ # GH 10324
468
+ a = Series([1, 2, 3, 4])
469
+ b = Series(["even", "odd", "even", "odd"], dtype="category")
470
+ c = Series(["even", "odd", "even", "odd"])
471
+
472
+ exp = Series(["odd", "even", "odd", np.nan], dtype="category")
473
+ tm.assert_series_equal(a.map(b), exp)
474
+ exp = Series(["odd", "even", "odd", np.nan])
475
+ tm.assert_series_equal(a.map(c), exp)
476
+
477
+ a = Series(["a", "b", "c", "d"])
478
+ b = Series([1, 2, 3, 4], index=pd.CategoricalIndex(["b", "c", "d", "e"]))
479
+ c = Series([1, 2, 3, 4], index=Index(["b", "c", "d", "e"]))
480
+
481
+ exp = Series([np.nan, 1, 2, 3])
482
+ tm.assert_series_equal(a.map(b), exp)
483
+ exp = Series([np.nan, 1, 2, 3])
484
+ tm.assert_series_equal(a.map(c), exp)
485
+
486
+ a = Series(["a", "b", "c", "d"])
487
+ b = Series(
488
+ ["B", "C", "D", "E"],
489
+ dtype="category",
490
+ index=pd.CategoricalIndex(["b", "c", "d", "e"]),
491
+ )
492
+ c = Series(["B", "C", "D", "E"], index=Index(["b", "c", "d", "e"]))
493
+
494
+ exp = Series(
495
+ pd.Categorical([np.nan, "B", "C", "D"], categories=["B", "C", "D", "E"])
496
+ )
497
+ tm.assert_series_equal(a.map(b), exp)
498
+ exp = Series([np.nan, "B", "C", "D"])
499
+ tm.assert_series_equal(a.map(c), exp)
500
+
501
+
502
+ def test_map_empty(request, index):
503
+ if isinstance(index, MultiIndex):
504
+ request.node.add_marker(
505
+ pytest.mark.xfail(
506
+ reason="Initializing a Series from a MultiIndex is not supported"
507
+ )
508
+ )
509
+
510
+ s = Series(index)
511
+ result = s.map({})
512
+
513
+ expected = Series(np.nan, index=s.index)
514
+ tm.assert_series_equal(result, expected)
515
+
516
+
517
+ def test_map_compat():
518
+ # related GH 8024
519
+ s = Series([True, True, False], index=[1, 2, 3])
520
+ result = s.map({True: "foo", False: "bar"})
521
+ expected = Series(["foo", "foo", "bar"], index=[1, 2, 3])
522
+ tm.assert_series_equal(result, expected)
523
+
524
+
525
+ def test_map_int():
526
+ left = Series({"a": 1.0, "b": 2.0, "c": 3.0, "d": 4})
527
+ right = Series({1: 11, 2: 22, 3: 33})
528
+
529
+ assert left.dtype == np.float_
530
+ assert issubclass(right.dtype.type, np.integer)
531
+
532
+ merged = left.map(right)
533
+ assert merged.dtype == np.float_
534
+ assert isna(merged["d"])
535
+ assert not isna(merged["c"])
536
+
537
+
538
+ def test_map_type_inference():
539
+ s = Series(range(3))
540
+ s2 = s.map(lambda x: np.where(x == 0, 0, 1))
541
+ assert issubclass(s2.dtype.type, np.integer)
542
+
543
+
544
+ def test_map_decimal(string_series):
545
+ result = string_series.map(lambda x: Decimal(str(x)))
546
+ assert result.dtype == np.object_
547
+ assert isinstance(result[0], Decimal)
548
+
549
+
550
+ def test_map_na_exclusion():
551
+ s = Series([1.5, np.nan, 3, np.nan, 5])
552
+
553
+ result = s.map(lambda x: x * 2, na_action="ignore")
554
+ exp = s * 2
555
+ tm.assert_series_equal(result, exp)
556
+
557
+
558
+ def test_map_dict_with_tuple_keys():
559
+ """
560
+ Due to new MultiIndex-ing behaviour in v0.14.0,
561
+ dicts with tuple keys passed to map were being
562
+ converted to a multi-index, preventing tuple values
563
+ from being mapped properly.
564
+ """
565
+ # GH 18496
566
+ df = DataFrame({"a": [(1,), (2,), (3, 4), (5, 6)]})
567
+ label_mappings = {(1,): "A", (2,): "B", (3, 4): "A", (5, 6): "B"}
568
+
569
+ df["labels"] = df["a"].map(label_mappings)
570
+ df["expected_labels"] = Series(["A", "B", "A", "B"], index=df.index)
571
+ # All labels should be filled now
572
+ tm.assert_series_equal(df["labels"], df["expected_labels"], check_names=False)
573
+
574
+
575
+ def test_map_counter():
576
+ s = Series(["a", "b", "c"], index=[1, 2, 3])
577
+ counter = Counter()
578
+ counter["b"] = 5
579
+ counter["c"] += 1
580
+ result = s.map(counter)
581
+ expected = Series([0, 5, 1], index=[1, 2, 3])
582
+ tm.assert_series_equal(result, expected)
583
+
584
+
585
+ def test_map_defaultdict():
586
+ s = Series([1, 2, 3], index=["a", "b", "c"])
587
+ default_dict = defaultdict(lambda: "blank")
588
+ default_dict[1] = "stuff"
589
+ result = s.map(default_dict)
590
+ expected = Series(["stuff", "blank", "blank"], index=["a", "b", "c"])
591
+ tm.assert_series_equal(result, expected)
592
+
593
+
594
+ def test_map_dict_na_key():
595
+ # https://github.com/pandas-dev/pandas/issues/17648
596
+ # Checks that np.nan key is appropriately mapped
597
+ s = Series([1, 2, np.nan])
598
+ expected = Series(["a", "b", "c"])
599
+ result = s.map({1: "a", 2: "b", np.nan: "c"})
600
+ tm.assert_series_equal(result, expected)
601
+
602
+
603
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
604
+ def test_map_defaultdict_na_key(na_action):
605
+ # GH 48813
606
+ s = Series([1, 2, np.nan])
607
+ default_map = defaultdict(lambda: "missing", {1: "a", 2: "b", np.nan: "c"})
608
+ result = s.map(default_map, na_action=na_action)
609
+ expected = Series({0: "a", 1: "b", 2: "c" if na_action is None else np.nan})
610
+ tm.assert_series_equal(result, expected)
611
+
612
+
613
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
614
+ def test_map_defaultdict_missing_key(na_action):
615
+ # GH 48813
616
+ s = Series([1, 2, np.nan])
617
+ default_map = defaultdict(lambda: "missing", {1: "a", 2: "b", 3: "c"})
618
+ result = s.map(default_map, na_action=na_action)
619
+ expected = Series({0: "a", 1: "b", 2: "missing" if na_action is None else np.nan})
620
+ tm.assert_series_equal(result, expected)
621
+
622
+
623
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
624
+ def test_map_defaultdict_unmutated(na_action):
625
+ # GH 48813
626
+ s = Series([1, 2, np.nan])
627
+ default_map = defaultdict(lambda: "missing", {1: "a", 2: "b", np.nan: "c"})
628
+ expected_default_map = default_map.copy()
629
+ s.map(default_map, na_action=na_action)
630
+ assert default_map == expected_default_map
631
+
632
+
633
+ @pytest.mark.parametrize("arg_func", [dict, Series])
634
+ def test_map_dict_ignore_na(arg_func):
635
+ # GH#47527
636
+ mapping = arg_func({1: 10, np.nan: 42})
637
+ ser = Series([1, np.nan, 2])
638
+ result = ser.map(mapping, na_action="ignore")
639
+ expected = Series([10, np.nan, np.nan])
640
+ tm.assert_series_equal(result, expected)
641
+
642
+
643
+ def test_map_defaultdict_ignore_na():
644
+ # GH#47527
645
+ mapping = defaultdict(int, {1: 10, np.nan: 42})
646
+ ser = Series([1, np.nan, 2])
647
+ result = ser.map(mapping)
648
+ expected = Series([10, 42, 0])
649
+ tm.assert_series_equal(result, expected)
650
+
651
+
652
+ def test_map_categorical_na_ignore():
653
+ # GH#47527
654
+ values = pd.Categorical([1, np.nan, 2], categories=[10, 1])
655
+ ser = Series(values)
656
+ result = ser.map({1: 10, np.nan: 42})
657
+ expected = Series([10, np.nan, np.nan])
658
+ tm.assert_series_equal(result, expected)
659
+
660
+
661
+ def test_map_dict_subclass_with_missing():
662
+ """
663
+ Test Series.map with a dictionary subclass that defines __missing__,
664
+ i.e. sets a default value (GH #15999).
665
+ """
666
+
667
+ class DictWithMissing(dict):
668
+ def __missing__(self, key):
669
+ return "missing"
670
+
671
+ s = Series([1, 2, 3])
672
+ dictionary = DictWithMissing({3: "three"})
673
+ result = s.map(dictionary)
674
+ expected = Series(["missing", "missing", "three"])
675
+ tm.assert_series_equal(result, expected)
676
+
677
+
678
+ def test_map_dict_subclass_without_missing():
679
+ class DictWithoutMissing(dict):
680
+ pass
681
+
682
+ s = Series([1, 2, 3])
683
+ dictionary = DictWithoutMissing({3: "three"})
684
+ result = s.map(dictionary)
685
+ expected = Series([np.nan, np.nan, "three"])
686
+ tm.assert_series_equal(result, expected)
687
+
688
+
689
+ def test_map_abc_mapping(non_dict_mapping_subclass):
690
+ # https://github.com/pandas-dev/pandas/issues/29733
691
+ # Check collections.abc.Mapping support as mapper for Series.map
692
+ s = Series([1, 2, 3])
693
+ not_a_dictionary = non_dict_mapping_subclass({3: "three"})
694
+ result = s.map(not_a_dictionary)
695
+ expected = Series([np.nan, np.nan, "three"])
696
+ tm.assert_series_equal(result, expected)
697
+
698
+
699
+ def test_map_abc_mapping_with_missing(non_dict_mapping_subclass):
700
+ # https://github.com/pandas-dev/pandas/issues/29733
701
+ # Check collections.abc.Mapping support as mapper for Series.map
702
+ class NonDictMappingWithMissing(non_dict_mapping_subclass):
703
+ def __missing__(self, key):
704
+ return "missing"
705
+
706
+ s = Series([1, 2, 3])
707
+ not_a_dictionary = NonDictMappingWithMissing({3: "three"})
708
+ result = s.map(not_a_dictionary)
709
+ # __missing__ is a dict concept, not a Mapping concept,
710
+ # so it should not change the result!
711
+ expected = Series([np.nan, np.nan, "three"])
712
+ tm.assert_series_equal(result, expected)
713
+
714
+
715
+ def test_map_box():
716
+ vals = [pd.Timestamp("2011-01-01"), pd.Timestamp("2011-01-02")]
717
+ s = Series(vals)
718
+ assert s.dtype == "datetime64[ns]"
719
+ # boxed value must be Timestamp instance
720
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.day}_{x.tz}")
721
+ exp = Series(["Timestamp_1_None", "Timestamp_2_None"])
722
+ tm.assert_series_equal(res, exp)
723
+
724
+ vals = [
725
+ pd.Timestamp("2011-01-01", tz="US/Eastern"),
726
+ pd.Timestamp("2011-01-02", tz="US/Eastern"),
727
+ ]
728
+ s = Series(vals)
729
+ assert s.dtype == "datetime64[ns, US/Eastern]"
730
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.day}_{x.tz}")
731
+ exp = Series(["Timestamp_1_US/Eastern", "Timestamp_2_US/Eastern"])
732
+ tm.assert_series_equal(res, exp)
733
+
734
+ # timedelta
735
+ vals = [pd.Timedelta("1 days"), pd.Timedelta("2 days")]
736
+ s = Series(vals)
737
+ assert s.dtype == "timedelta64[ns]"
738
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.days}")
739
+ exp = Series(["Timedelta_1", "Timedelta_2"])
740
+ tm.assert_series_equal(res, exp)
741
+
742
+ # period
743
+ vals = [pd.Period("2011-01-01", freq="M"), pd.Period("2011-01-02", freq="M")]
744
+ s = Series(vals)
745
+ assert s.dtype == "Period[M]"
746
+ res = s.apply(lambda x: f"{type(x).__name__}_{x.freqstr}")
747
+ exp = Series(["Period_M", "Period_M"])
748
+ tm.assert_series_equal(res, exp)
749
+
750
+
751
+ def test_map_categorical():
752
+ values = pd.Categorical(list("ABBABCD"), categories=list("DCBA"), ordered=True)
753
+ s = Series(values, name="XX", index=list("abcdefg"))
754
+
755
+ result = s.map(lambda x: x.lower())
756
+ exp_values = pd.Categorical(list("abbabcd"), categories=list("dcba"), ordered=True)
757
+ exp = Series(exp_values, name="XX", index=list("abcdefg"))
758
+ tm.assert_series_equal(result, exp)
759
+ tm.assert_categorical_equal(result.values, exp_values)
760
+
761
+ result = s.map(lambda x: "A")
762
+ exp = Series(["A"] * 7, name="XX", index=list("abcdefg"))
763
+ tm.assert_series_equal(result, exp)
764
+ assert result.dtype == object
765
+
766
+
767
+ def test_map_datetimetz():
768
+ values = pd.date_range("2011-01-01", "2011-01-02", freq="H").tz_localize(
769
+ "Asia/Tokyo"
770
+ )
771
+ s = Series(values, name="XX")
772
+
773
+ # keep tz
774
+ result = s.map(lambda x: x + pd.offsets.Day())
775
+ exp_values = pd.date_range("2011-01-02", "2011-01-03", freq="H").tz_localize(
776
+ "Asia/Tokyo"
777
+ )
778
+ exp = Series(exp_values, name="XX")
779
+ tm.assert_series_equal(result, exp)
780
+
781
+ result = s.map(lambda x: x.hour)
782
+ exp = Series(list(range(24)) + [0], name="XX", dtype=np.int32)
783
+ tm.assert_series_equal(result, exp)
784
+
785
+ # not vectorized
786
+ def f(x):
787
+ if not isinstance(x, pd.Timestamp):
788
+ raise ValueError
789
+ return str(x.tz)
790
+
791
+ result = s.map(f)
792
+ exp = Series(["Asia/Tokyo"] * 25, name="XX")
793
+ tm.assert_series_equal(result, exp)
794
+
795
+
796
+ @pytest.mark.parametrize(
797
+ "vals,mapping,exp",
798
+ [
799
+ (list("abc"), {np.nan: "not NaN"}, [np.nan] * 3 + ["not NaN"]),
800
+ (list("abc"), {"a": "a letter"}, ["a letter"] + [np.nan] * 3),
801
+ (list(range(3)), {0: 42}, [42] + [np.nan] * 3),
802
+ ],
803
+ )
804
+ def test_map_missing_mixed(vals, mapping, exp):
805
+ # GH20495
806
+ s = Series(vals + [np.nan])
807
+ result = s.map(mapping)
808
+
809
+ tm.assert_series_equal(result, Series(exp))
810
+
811
+
812
+ @pytest.mark.parametrize(
813
+ "dti,exp",
814
+ [
815
+ (
816
+ Series([1, 2], index=pd.DatetimeIndex([0, 31536000000])),
817
+ DataFrame(np.repeat([[1, 2]], 2, axis=0), dtype="int64"),
818
+ ),
819
+ (
820
+ tm.makeTimeSeries(nper=30),
821
+ DataFrame(np.repeat([[1, 2]], 30, axis=0), dtype="int64"),
822
+ ),
823
+ ],
824
+ )
825
+ @pytest.mark.parametrize("aware", [True, False])
826
+ def test_apply_series_on_date_time_index_aware_series(dti, exp, aware):
827
+ # GH 25959
828
+ # Calling apply on a localized time series should not cause an error
829
+ if aware:
830
+ index = dti.tz_localize("UTC").index
831
+ else:
832
+ index = dti.index
833
+ result = Series(index).apply(lambda x: Series([1, 2]))
834
+ tm.assert_frame_equal(result, exp)
835
+
836
+
837
+ def test_apply_scalar_on_date_time_index_aware_series():
838
+ # GH 25959
839
+ # Calling apply on a localized time series should not cause an error
840
+ series = tm.makeTimeSeries(nper=30).tz_localize("UTC")
841
+ result = Series(series.index).apply(lambda x: 1)
842
+ tm.assert_series_equal(result, Series(np.ones(30), dtype="int64"))
843
+
844
+
845
+ def test_map_float_to_string_precision():
846
+ # GH 13228
847
+ ser = Series(1 / 3)
848
+ result = ser.map(lambda val: str(val)).to_dict()
849
+ expected = {0: "0.3333333333333333"}
850
+ assert result == expected
851
+
852
+
853
+ def test_apply_to_timedelta():
854
+ list_of_valid_strings = ["00:00:01", "00:00:02"]
855
+ a = pd.to_timedelta(list_of_valid_strings)
856
+ b = Series(list_of_valid_strings).apply(pd.to_timedelta)
857
+ tm.assert_series_equal(Series(a), b)
858
+
859
+ list_of_strings = ["00:00:01", np.nan, pd.NaT, pd.NaT]
860
+
861
+ a = pd.to_timedelta(list_of_strings)
862
+ ser = Series(list_of_strings)
863
+ b = ser.apply(pd.to_timedelta)
864
+ tm.assert_series_equal(Series(a), b)
865
+
866
+
867
+ @pytest.mark.parametrize(
868
+ "ops, names",
869
+ [
870
+ ([np.sum], ["sum"]),
871
+ ([np.sum, np.mean], ["sum", "mean"]),
872
+ (np.array([np.sum]), ["sum"]),
873
+ (np.array([np.sum, np.mean]), ["sum", "mean"]),
874
+ ],
875
+ )
876
+ @pytest.mark.parametrize("how", ["agg", "apply"])
877
+ def test_apply_listlike_reducer(string_series, ops, names, how):
878
+ # GH 39140
879
+ expected = Series({name: op(string_series) for name, op in zip(names, ops)})
880
+ expected.name = "series"
881
+ result = getattr(string_series, how)(ops)
882
+ tm.assert_series_equal(result, expected)
883
+
884
+
885
+ @pytest.mark.parametrize(
886
+ "ops",
887
+ [
888
+ {"A": np.sum},
889
+ {"A": np.sum, "B": np.mean},
890
+ Series({"A": np.sum}),
891
+ Series({"A": np.sum, "B": np.mean}),
892
+ ],
893
+ )
894
+ @pytest.mark.parametrize("how", ["agg", "apply"])
895
+ def test_apply_dictlike_reducer(string_series, ops, how):
896
+ # GH 39140
897
+ expected = Series({name: op(string_series) for name, op in ops.items()})
898
+ expected.name = string_series.name
899
+ result = getattr(string_series, how)(ops)
900
+ tm.assert_series_equal(result, expected)
901
+
902
+
903
+ @pytest.mark.parametrize(
904
+ "ops, names",
905
+ [
906
+ ([np.sqrt], ["sqrt"]),
907
+ ([np.abs, np.sqrt], ["absolute", "sqrt"]),
908
+ (np.array([np.sqrt]), ["sqrt"]),
909
+ (np.array([np.abs, np.sqrt]), ["absolute", "sqrt"]),
910
+ ],
911
+ )
912
+ def test_apply_listlike_transformer(string_series, ops, names):
913
+ # GH 39140
914
+ with np.errstate(all="ignore"):
915
+ expected = concat([op(string_series) for op in ops], axis=1)
916
+ expected.columns = names
917
+ result = string_series.apply(ops)
918
+ tm.assert_frame_equal(result, expected)
919
+
920
+
921
+ @pytest.mark.parametrize(
922
+ "ops",
923
+ [
924
+ {"A": np.sqrt},
925
+ {"A": np.sqrt, "B": np.exp},
926
+ Series({"A": np.sqrt}),
927
+ Series({"A": np.sqrt, "B": np.exp}),
928
+ ],
929
+ )
930
+ def test_apply_dictlike_transformer(string_series, ops):
931
+ # GH 39140
932
+ with np.errstate(all="ignore"):
933
+ expected = concat({name: op(string_series) for name, op in ops.items()})
934
+ expected.name = string_series.name
935
+ result = string_series.apply(ops)
936
+ tm.assert_series_equal(result, expected)
937
+
938
+
939
+ def test_apply_retains_column_name():
940
+ # GH 16380
941
+ df = DataFrame({"x": range(3)}, Index(range(3), name="x"))
942
+ result = df.x.apply(lambda x: Series(range(x + 1), Index(range(x + 1), name="y")))
943
+ expected = DataFrame(
944
+ [[0.0, np.nan, np.nan], [0.0, 1.0, np.nan], [0.0, 1.0, 2.0]],
945
+ columns=Index(range(3), name="y"),
946
+ index=Index(range(3), name="x"),
947
+ )
948
+ tm.assert_frame_equal(result, expected)
949
+
950
+
951
+ def test_apply_type():
952
+ # GH 46719
953
+ s = Series([3, "string", float], index=["a", "b", "c"])
954
+ result = s.apply(type)
955
+ expected = Series([int, str, type], index=["a", "b", "c"])
956
+ tm.assert_series_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_series_apply_relabeling.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import pandas._testing as tm
3
+
4
+
5
+ def test_relabel_no_duplicated_method():
6
+ # this is to test there is no duplicated method used in agg
7
+ df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4]})
8
+
9
+ result = df["A"].agg(foo="sum")
10
+ expected = df["A"].agg({"foo": "sum"})
11
+ tm.assert_series_equal(result, expected)
12
+
13
+ result = df["B"].agg(foo="min", bar="max")
14
+ expected = df["B"].agg({"foo": "min", "bar": "max"})
15
+ tm.assert_series_equal(result, expected)
16
+
17
+ result = df["B"].agg(foo=sum, bar=min, cat="max")
18
+ expected = df["B"].agg({"foo": sum, "bar": min, "cat": "max"})
19
+ tm.assert_series_equal(result, expected)
20
+
21
+
22
+ def test_relabel_duplicated_method():
23
+ # this is to test with nested renaming, duplicated method can be used
24
+ # if they are assigned with different new names
25
+ df = pd.DataFrame({"A": [1, 2, 1, 2], "B": [1, 2, 3, 4]})
26
+
27
+ result = df["A"].agg(foo="sum", bar="sum")
28
+ expected = pd.Series([6, 6], index=["foo", "bar"], name="A")
29
+ tm.assert_series_equal(result, expected)
30
+
31
+ result = df["B"].agg(foo=min, bar="min")
32
+ expected = pd.Series([1, 1], index=["foo", "bar"], name="B")
33
+ tm.assert_series_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_series_transform.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DataFrame,
6
+ MultiIndex,
7
+ Series,
8
+ concat,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ @pytest.mark.parametrize(
14
+ "ops, names",
15
+ [
16
+ ([np.sqrt], ["sqrt"]),
17
+ ([np.abs, np.sqrt], ["absolute", "sqrt"]),
18
+ (np.array([np.sqrt]), ["sqrt"]),
19
+ (np.array([np.abs, np.sqrt]), ["absolute", "sqrt"]),
20
+ ],
21
+ )
22
+ def test_transform_listlike(string_series, ops, names):
23
+ # GH 35964
24
+ with np.errstate(all="ignore"):
25
+ expected = concat([op(string_series) for op in ops], axis=1)
26
+ expected.columns = names
27
+ result = string_series.transform(ops)
28
+ tm.assert_frame_equal(result, expected)
29
+
30
+
31
+ @pytest.mark.parametrize("box", [dict, Series])
32
+ def test_transform_dictlike(string_series, box):
33
+ # GH 35964
34
+ with np.errstate(all="ignore"):
35
+ expected = concat([np.sqrt(string_series), np.abs(string_series)], axis=1)
36
+ expected.columns = ["foo", "bar"]
37
+ result = string_series.transform(box({"foo": np.sqrt, "bar": np.abs}))
38
+ tm.assert_frame_equal(result, expected)
39
+
40
+
41
+ def test_transform_dictlike_mixed():
42
+ # GH 40018 - mix of lists and non-lists in values of a dictionary
43
+ df = Series([1, 4])
44
+ result = df.transform({"b": ["sqrt", "abs"], "c": "sqrt"})
45
+ expected = DataFrame(
46
+ [[1.0, 1, 1.0], [2.0, 4, 2.0]],
47
+ columns=MultiIndex([("b", "c"), ("sqrt", "abs")], [(0, 0, 1), (0, 1, 0)]),
48
+ )
49
+ tm.assert_frame_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/apply/test_str.py ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import chain
2
+ import operator
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas.core.dtypes.common import is_number
8
+
9
+ from pandas import (
10
+ DataFrame,
11
+ Series,
12
+ )
13
+ import pandas._testing as tm
14
+ from pandas.tests.apply.common import (
15
+ frame_transform_kernels,
16
+ series_transform_kernels,
17
+ )
18
+
19
+
20
+ @pytest.mark.parametrize("func", ["sum", "mean", "min", "max", "std"])
21
+ @pytest.mark.parametrize(
22
+ "args,kwds",
23
+ [
24
+ pytest.param([], {}, id="no_args_or_kwds"),
25
+ pytest.param([1], {}, id="axis_from_args"),
26
+ pytest.param([], {"axis": 1}, id="axis_from_kwds"),
27
+ pytest.param([], {"numeric_only": True}, id="optional_kwds"),
28
+ pytest.param([1, True], {"numeric_only": True}, id="args_and_kwds"),
29
+ ],
30
+ )
31
+ @pytest.mark.parametrize("how", ["agg", "apply"])
32
+ def test_apply_with_string_funcs(request, float_frame, func, args, kwds, how):
33
+ if len(args) > 1 and how == "agg":
34
+ request.node.add_marker(
35
+ pytest.mark.xfail(
36
+ raises=TypeError,
37
+ reason="agg/apply signature mismatch - agg passes 2nd "
38
+ "argument to func",
39
+ )
40
+ )
41
+ result = getattr(float_frame, how)(func, *args, **kwds)
42
+ expected = getattr(float_frame, func)(*args, **kwds)
43
+ tm.assert_series_equal(result, expected)
44
+
45
+
46
+ @pytest.mark.parametrize("arg", ["sum", "mean", "min", "max", "std"])
47
+ def test_with_string_args(datetime_series, arg):
48
+ result = datetime_series.apply(arg)
49
+ expected = getattr(datetime_series, arg)()
50
+ assert result == expected
51
+
52
+
53
+ @pytest.mark.parametrize("op", ["mean", "median", "std", "var"])
54
+ @pytest.mark.parametrize("how", ["agg", "apply"])
55
+ def test_apply_np_reducer(op, how):
56
+ # GH 39116
57
+ float_frame = DataFrame({"a": [1, 2], "b": [3, 4]})
58
+ result = getattr(float_frame, how)(op)
59
+ # pandas ddof defaults to 1, numpy to 0
60
+ kwargs = {"ddof": 1} if op in ("std", "var") else {}
61
+ expected = Series(
62
+ getattr(np, op)(float_frame, axis=0, **kwargs), index=float_frame.columns
63
+ )
64
+ tm.assert_series_equal(result, expected)
65
+
66
+
67
+ @pytest.mark.parametrize(
68
+ "op", ["abs", "ceil", "cos", "cumsum", "exp", "log", "sqrt", "square"]
69
+ )
70
+ @pytest.mark.parametrize("how", ["transform", "apply"])
71
+ def test_apply_np_transformer(float_frame, op, how):
72
+ # GH 39116
73
+
74
+ # float_frame will _usually_ have negative values, which will
75
+ # trigger the warning here, but let's put one in just to be sure
76
+ float_frame.iloc[0, 0] = -1.0
77
+ warn = None
78
+ if op in ["log", "sqrt"]:
79
+ warn = RuntimeWarning
80
+
81
+ with tm.assert_produces_warning(warn, check_stacklevel=False):
82
+ # float_frame fixture is defined in conftest.py, so we don't check the
83
+ # stacklevel as otherwise the test would fail.
84
+ result = getattr(float_frame, how)(op)
85
+ expected = getattr(np, op)(float_frame)
86
+ tm.assert_frame_equal(result, expected)
87
+
88
+
89
+ @pytest.mark.parametrize(
90
+ "series, func, expected",
91
+ chain(
92
+ tm.get_cython_table_params(
93
+ Series(dtype=np.float64),
94
+ [
95
+ ("sum", 0),
96
+ ("max", np.nan),
97
+ ("min", np.nan),
98
+ ("all", True),
99
+ ("any", False),
100
+ ("mean", np.nan),
101
+ ("prod", 1),
102
+ ("std", np.nan),
103
+ ("var", np.nan),
104
+ ("median", np.nan),
105
+ ],
106
+ ),
107
+ tm.get_cython_table_params(
108
+ Series([np.nan, 1, 2, 3]),
109
+ [
110
+ ("sum", 6),
111
+ ("max", 3),
112
+ ("min", 1),
113
+ ("all", True),
114
+ ("any", True),
115
+ ("mean", 2),
116
+ ("prod", 6),
117
+ ("std", 1),
118
+ ("var", 1),
119
+ ("median", 2),
120
+ ],
121
+ ),
122
+ tm.get_cython_table_params(
123
+ Series("a b c".split()),
124
+ [
125
+ ("sum", "abc"),
126
+ ("max", "c"),
127
+ ("min", "a"),
128
+ ("all", True),
129
+ ("any", True),
130
+ ],
131
+ ),
132
+ ),
133
+ )
134
+ def test_agg_cython_table_series(series, func, expected):
135
+ # GH21224
136
+ # test reducing functions in
137
+ # pandas.core.base.SelectionMixin._cython_table
138
+ result = series.agg(func)
139
+ if is_number(expected):
140
+ assert np.isclose(result, expected, equal_nan=True)
141
+ else:
142
+ assert result == expected
143
+
144
+
145
+ @pytest.mark.parametrize(
146
+ "series, func, expected",
147
+ chain(
148
+ tm.get_cython_table_params(
149
+ Series(dtype=np.float64),
150
+ [
151
+ ("cumprod", Series([], dtype=np.float64)),
152
+ ("cumsum", Series([], dtype=np.float64)),
153
+ ],
154
+ ),
155
+ tm.get_cython_table_params(
156
+ Series([np.nan, 1, 2, 3]),
157
+ [
158
+ ("cumprod", Series([np.nan, 1, 2, 6])),
159
+ ("cumsum", Series([np.nan, 1, 3, 6])),
160
+ ],
161
+ ),
162
+ tm.get_cython_table_params(
163
+ Series("a b c".split()), [("cumsum", Series(["a", "ab", "abc"]))]
164
+ ),
165
+ ),
166
+ )
167
+ def test_agg_cython_table_transform_series(series, func, expected):
168
+ # GH21224
169
+ # test transforming functions in
170
+ # pandas.core.base.SelectionMixin._cython_table (cumprod, cumsum)
171
+ result = series.agg(func)
172
+ tm.assert_series_equal(result, expected)
173
+
174
+
175
+ @pytest.mark.parametrize(
176
+ "df, func, expected",
177
+ chain(
178
+ tm.get_cython_table_params(
179
+ DataFrame(),
180
+ [
181
+ ("sum", Series(dtype="float64")),
182
+ ("max", Series(dtype="float64")),
183
+ ("min", Series(dtype="float64")),
184
+ ("all", Series(dtype=bool)),
185
+ ("any", Series(dtype=bool)),
186
+ ("mean", Series(dtype="float64")),
187
+ ("prod", Series(dtype="float64")),
188
+ ("std", Series(dtype="float64")),
189
+ ("var", Series(dtype="float64")),
190
+ ("median", Series(dtype="float64")),
191
+ ],
192
+ ),
193
+ tm.get_cython_table_params(
194
+ DataFrame([[np.nan, 1], [1, 2]]),
195
+ [
196
+ ("sum", Series([1.0, 3])),
197
+ ("max", Series([1.0, 2])),
198
+ ("min", Series([1.0, 1])),
199
+ ("all", Series([True, True])),
200
+ ("any", Series([True, True])),
201
+ ("mean", Series([1, 1.5])),
202
+ ("prod", Series([1.0, 2])),
203
+ ("std", Series([np.nan, 0.707107])),
204
+ ("var", Series([np.nan, 0.5])),
205
+ ("median", Series([1, 1.5])),
206
+ ],
207
+ ),
208
+ ),
209
+ )
210
+ def test_agg_cython_table_frame(df, func, expected, axis):
211
+ # GH 21224
212
+ # test reducing functions in
213
+ # pandas.core.base.SelectionMixin._cython_table
214
+ result = df.agg(func, axis=axis)
215
+ tm.assert_series_equal(result, expected)
216
+
217
+
218
+ @pytest.mark.parametrize(
219
+ "df, func, expected",
220
+ chain(
221
+ tm.get_cython_table_params(
222
+ DataFrame(), [("cumprod", DataFrame()), ("cumsum", DataFrame())]
223
+ ),
224
+ tm.get_cython_table_params(
225
+ DataFrame([[np.nan, 1], [1, 2]]),
226
+ [
227
+ ("cumprod", DataFrame([[np.nan, 1], [1, 2]])),
228
+ ("cumsum", DataFrame([[np.nan, 1], [1, 3]])),
229
+ ],
230
+ ),
231
+ ),
232
+ )
233
+ def test_agg_cython_table_transform_frame(df, func, expected, axis):
234
+ # GH 21224
235
+ # test transforming functions in
236
+ # pandas.core.base.SelectionMixin._cython_table (cumprod, cumsum)
237
+ if axis in ("columns", 1):
238
+ # operating blockwise doesn't let us preserve dtypes
239
+ expected = expected.astype("float64")
240
+
241
+ result = df.agg(func, axis=axis)
242
+ tm.assert_frame_equal(result, expected)
243
+
244
+
245
+ @pytest.mark.parametrize("op", series_transform_kernels)
246
+ def test_transform_groupby_kernel_series(request, string_series, op):
247
+ # GH 35964
248
+ if op == "ngroup":
249
+ request.node.add_marker(
250
+ pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
251
+ )
252
+ args = [0.0] if op == "fillna" else []
253
+ ones = np.ones(string_series.shape[0])
254
+ expected = string_series.groupby(ones).transform(op, *args)
255
+ result = string_series.transform(op, 0, *args)
256
+ tm.assert_series_equal(result, expected)
257
+
258
+
259
+ @pytest.mark.parametrize("op", frame_transform_kernels)
260
+ def test_transform_groupby_kernel_frame(request, axis, float_frame, op):
261
+ if op == "ngroup":
262
+ request.node.add_marker(
263
+ pytest.mark.xfail(raises=ValueError, reason="ngroup not valid for NDFrame")
264
+ )
265
+
266
+ # GH 35964
267
+
268
+ args = [0.0] if op == "fillna" else []
269
+ if axis in (0, "index"):
270
+ ones = np.ones(float_frame.shape[0])
271
+ else:
272
+ ones = np.ones(float_frame.shape[1])
273
+ expected = float_frame.groupby(ones, axis=axis).transform(op, *args)
274
+ result = float_frame.transform(op, axis, *args)
275
+ tm.assert_frame_equal(result, expected)
276
+
277
+ # same thing, but ensuring we have multiple blocks
278
+ assert "E" not in float_frame.columns
279
+ float_frame["E"] = float_frame["A"].copy()
280
+ assert len(float_frame._mgr.arrays) > 1
281
+
282
+ if axis in (0, "index"):
283
+ ones = np.ones(float_frame.shape[0])
284
+ else:
285
+ ones = np.ones(float_frame.shape[1])
286
+ expected2 = float_frame.groupby(ones, axis=axis).transform(op, *args)
287
+ result2 = float_frame.transform(op, axis, *args)
288
+ tm.assert_frame_equal(result2, expected2)
289
+
290
+
291
+ @pytest.mark.parametrize("method", ["abs", "shift", "pct_change", "cumsum", "rank"])
292
+ def test_transform_method_name(method):
293
+ # GH 19760
294
+ df = DataFrame({"A": [-1, 2]})
295
+ result = df.transform(method)
296
+ expected = operator.methodcaller(method)(df)
297
+ tm.assert_frame_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__init__.py ADDED
File without changes
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (179 Bytes). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/common.cpython-310.pyc ADDED
Binary file (3.81 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (5.57 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_array_ops.cpython-310.pyc ADDED
Binary file (1.17 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_categorical.cpython-310.pyc ADDED
Binary file (1.18 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_datetime64.cpython-310.pyc ADDED
Binary file (58.3 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_interval.cpython-310.pyc ADDED
Binary file (8.77 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_numeric.cpython-310.pyc ADDED
Binary file (42.2 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_object.cpython-310.pyc ADDED
Binary file (10.9 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_period.cpython-310.pyc ADDED
Binary file (44.6 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_timedelta64.cpython-310.pyc ADDED
Binary file (54.5 kB). View file
 
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/common.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Assertion helpers for arithmetic tests.
3
+ """
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas import (
8
+ DataFrame,
9
+ Index,
10
+ Series,
11
+ array,
12
+ )
13
+ import pandas._testing as tm
14
+ from pandas.core.arrays import (
15
+ BooleanArray,
16
+ PandasArray,
17
+ )
18
+
19
+
20
+ def assert_cannot_add(left, right, msg="cannot add"):
21
+ """
22
+ Helper to assert that left and right cannot be added.
23
+
24
+ Parameters
25
+ ----------
26
+ left : object
27
+ right : object
28
+ msg : str, default "cannot add"
29
+ """
30
+ with pytest.raises(TypeError, match=msg):
31
+ left + right
32
+ with pytest.raises(TypeError, match=msg):
33
+ right + left
34
+
35
+
36
+ def assert_invalid_addsub_type(left, right, msg=None):
37
+ """
38
+ Helper to assert that left and right can be neither added nor subtracted.
39
+
40
+ Parameters
41
+ ----------
42
+ left : object
43
+ right : object
44
+ msg : str or None, default None
45
+ """
46
+ with pytest.raises(TypeError, match=msg):
47
+ left + right
48
+ with pytest.raises(TypeError, match=msg):
49
+ right + left
50
+ with pytest.raises(TypeError, match=msg):
51
+ left - right
52
+ with pytest.raises(TypeError, match=msg):
53
+ right - left
54
+
55
+
56
+ def get_upcast_box(left, right, is_cmp: bool = False):
57
+ """
58
+ Get the box to use for 'expected' in an arithmetic or comparison operation.
59
+
60
+ Parameters
61
+ left : Any
62
+ right : Any
63
+ is_cmp : bool, default False
64
+ Whether the operation is a comparison method.
65
+ """
66
+
67
+ if isinstance(left, DataFrame) or isinstance(right, DataFrame):
68
+ return DataFrame
69
+ if isinstance(left, Series) or isinstance(right, Series):
70
+ if is_cmp and isinstance(left, Index):
71
+ # Index does not defer for comparisons
72
+ return np.array
73
+ return Series
74
+ if isinstance(left, Index) or isinstance(right, Index):
75
+ if is_cmp:
76
+ return np.array
77
+ return Index
78
+ return tm.to_array
79
+
80
+
81
+ def assert_invalid_comparison(left, right, box):
82
+ """
83
+ Assert that comparison operations with mismatched types behave correctly.
84
+
85
+ Parameters
86
+ ----------
87
+ left : np.ndarray, ExtensionArray, Index, or Series
88
+ right : object
89
+ box : {pd.DataFrame, pd.Series, pd.Index, pd.array, tm.to_array}
90
+ """
91
+ # Not for tznaive-tzaware comparison
92
+
93
+ # Note: not quite the same as how we do this for tm.box_expected
94
+ xbox = box if box not in [Index, array] else np.array
95
+
96
+ def xbox2(x):
97
+ # Eventually we'd like this to be tighter, but for now we'll
98
+ # just exclude PandasArray[bool]
99
+ if isinstance(x, PandasArray):
100
+ return x._ndarray
101
+ if isinstance(x, BooleanArray):
102
+ # NB: we are assuming no pd.NAs for now
103
+ return x.astype(bool)
104
+ return x
105
+
106
+ # rev_box: box to use for reversed comparisons
107
+ rev_box = xbox
108
+ if isinstance(right, Index) and isinstance(left, Series):
109
+ rev_box = np.array
110
+
111
+ result = xbox2(left == right)
112
+ expected = xbox(np.zeros(result.shape, dtype=np.bool_))
113
+
114
+ tm.assert_equal(result, expected)
115
+
116
+ result = xbox2(right == left)
117
+ tm.assert_equal(result, rev_box(expected))
118
+
119
+ result = xbox2(left != right)
120
+ tm.assert_equal(result, ~expected)
121
+
122
+ result = xbox2(right != left)
123
+ tm.assert_equal(result, rev_box(~expected))
124
+
125
+ msg = "|".join(
126
+ [
127
+ "Invalid comparison between",
128
+ "Cannot compare type",
129
+ "not supported between",
130
+ "invalid type promotion",
131
+ (
132
+ # GH#36706 npdev 1.20.0 2020-09-28
133
+ r"The DTypes <class 'numpy.dtype\[datetime64\]'> and "
134
+ r"<class 'numpy.dtype\[int64\]'> do not have a common DType. "
135
+ "For example they cannot be stored in a single array unless the "
136
+ "dtype is `object`."
137
+ ),
138
+ ]
139
+ )
140
+ with pytest.raises(TypeError, match=msg):
141
+ left < right
142
+ with pytest.raises(TypeError, match=msg):
143
+ left <= right
144
+ with pytest.raises(TypeError, match=msg):
145
+ left > right
146
+ with pytest.raises(TypeError, match=msg):
147
+ left >= right
148
+ with pytest.raises(TypeError, match=msg):
149
+ right < left
150
+ with pytest.raises(TypeError, match=msg):
151
+ right <= left
152
+ with pytest.raises(TypeError, match=msg):
153
+ right > left
154
+ with pytest.raises(TypeError, match=msg):
155
+ right >= left
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/conftest.py ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ Index,
7
+ RangeIndex,
8
+ )
9
+ import pandas._testing as tm
10
+ from pandas.core.computation import expressions as expr
11
+
12
+
13
+ @pytest.fixture(autouse=True, params=[0, 1000000], ids=["numexpr", "python"])
14
+ def switch_numexpr_min_elements(request):
15
+ _MIN_ELEMENTS = expr._MIN_ELEMENTS
16
+ expr._MIN_ELEMENTS = request.param
17
+ yield request.param
18
+ expr._MIN_ELEMENTS = _MIN_ELEMENTS
19
+
20
+
21
+ # ------------------------------------------------------------------
22
+
23
+
24
+ # doctest with +SKIP for one fixture fails during setup with
25
+ # 'DoctestItem' object has no attribute 'callspec'
26
+ # due to switch_numexpr_min_elements fixture
27
+ @pytest.fixture(params=[1, np.array(1, dtype=np.int64)])
28
+ def one(request):
29
+ """
30
+ Several variants of integer value 1. The zero-dim integer array
31
+ behaves like an integer.
32
+
33
+ This fixture can be used to check that datetimelike indexes handle
34
+ addition and subtraction of integers and zero-dimensional arrays
35
+ of integers.
36
+
37
+ Examples
38
+ --------
39
+ dti = pd.date_range('2016-01-01', periods=2, freq='H')
40
+ dti
41
+ DatetimeIndex(['2016-01-01 00:00:00', '2016-01-01 01:00:00'],
42
+ dtype='datetime64[ns]', freq='H')
43
+ dti + one
44
+ DatetimeIndex(['2016-01-01 01:00:00', '2016-01-01 02:00:00'],
45
+ dtype='datetime64[ns]', freq='H')
46
+ """
47
+ return request.param
48
+
49
+
50
+ zeros = [
51
+ box_cls([0] * 5, dtype=dtype)
52
+ for box_cls in [Index, np.array, pd.array]
53
+ for dtype in [np.int64, np.uint64, np.float64]
54
+ ]
55
+ zeros.extend([box_cls([-0.0] * 5, dtype=np.float64) for box_cls in [Index, np.array]])
56
+ zeros.extend([np.array(0, dtype=dtype) for dtype in [np.int64, np.uint64, np.float64]])
57
+ zeros.extend([np.array(-0.0, dtype=np.float64)])
58
+ zeros.extend([0, 0.0, -0.0])
59
+
60
+
61
+ # doctest with +SKIP for zero fixture fails during setup with
62
+ # 'DoctestItem' object has no attribute 'callspec'
63
+ # due to switch_numexpr_min_elements fixture
64
+ @pytest.fixture(params=zeros)
65
+ def zero(request):
66
+ """
67
+ Several types of scalar zeros and length 5 vectors of zeros.
68
+
69
+ This fixture can be used to check that numeric-dtype indexes handle
70
+ division by any zero numeric-dtype.
71
+
72
+ Uses vector of length 5 for broadcasting with `numeric_idx` fixture,
73
+ which creates numeric-dtype vectors also of length 5.
74
+
75
+ Examples
76
+ --------
77
+ arr = RangeIndex(5)
78
+ arr / zeros
79
+ Index([nan, inf, inf, inf, inf], dtype='float64')
80
+ """
81
+ return request.param
82
+
83
+
84
+ # ------------------------------------------------------------------
85
+ # Vector Fixtures
86
+
87
+
88
+ @pytest.fixture(
89
+ params=[
90
+ # TODO: add more dtypes here
91
+ Index(np.arange(5, dtype="float64")),
92
+ Index(np.arange(5, dtype="int64")),
93
+ Index(np.arange(5, dtype="uint64")),
94
+ RangeIndex(5),
95
+ ],
96
+ ids=lambda x: type(x).__name__,
97
+ )
98
+ def numeric_idx(request):
99
+ """
100
+ Several types of numeric-dtypes Index objects
101
+ """
102
+ return request.param
103
+
104
+
105
+ # ------------------------------------------------------------------
106
+ # Scalar Fixtures
107
+
108
+
109
+ @pytest.fixture(
110
+ params=[
111
+ pd.Timedelta("10m7s").to_pytimedelta(),
112
+ pd.Timedelta("10m7s"),
113
+ pd.Timedelta("10m7s").to_timedelta64(),
114
+ ],
115
+ ids=lambda x: type(x).__name__,
116
+ )
117
+ def scalar_td(request):
118
+ """
119
+ Several variants of Timedelta scalars representing 10 minutes and 7 seconds.
120
+ """
121
+ return request.param
122
+
123
+
124
+ @pytest.fixture(
125
+ params=[
126
+ pd.offsets.Day(3),
127
+ pd.offsets.Hour(72),
128
+ pd.Timedelta(days=3).to_pytimedelta(),
129
+ pd.Timedelta("72:00:00"),
130
+ np.timedelta64(3, "D"),
131
+ np.timedelta64(72, "h"),
132
+ ],
133
+ ids=lambda x: type(x).__name__,
134
+ )
135
+ def three_days(request):
136
+ """
137
+ Several timedelta-like and DateOffset objects that each represent
138
+ a 3-day timedelta
139
+ """
140
+ return request.param
141
+
142
+
143
+ @pytest.fixture(
144
+ params=[
145
+ pd.offsets.Hour(2),
146
+ pd.offsets.Minute(120),
147
+ pd.Timedelta(hours=2).to_pytimedelta(),
148
+ pd.Timedelta(seconds=2 * 3600),
149
+ np.timedelta64(2, "h"),
150
+ np.timedelta64(120, "m"),
151
+ ],
152
+ ids=lambda x: type(x).__name__,
153
+ )
154
+ def two_hours(request):
155
+ """
156
+ Several timedelta-like and DateOffset objects that each represent
157
+ a 2-hour timedelta
158
+ """
159
+ return request.param
160
+
161
+
162
+ _common_mismatch = [
163
+ pd.offsets.YearBegin(2),
164
+ pd.offsets.MonthBegin(1),
165
+ pd.offsets.Minute(),
166
+ ]
167
+
168
+
169
+ @pytest.fixture(
170
+ params=[
171
+ pd.Timedelta(minutes=30).to_pytimedelta(),
172
+ np.timedelta64(30, "s"),
173
+ pd.Timedelta(seconds=30),
174
+ ]
175
+ + _common_mismatch
176
+ )
177
+ def not_hourly(request):
178
+ """
179
+ Several timedelta-like and DateOffset instances that are _not_
180
+ compatible with Hourly frequencies.
181
+ """
182
+ return request.param
183
+
184
+
185
+ @pytest.fixture(
186
+ params=[
187
+ np.timedelta64(4, "h"),
188
+ pd.Timedelta(hours=23).to_pytimedelta(),
189
+ pd.Timedelta("23:00:00"),
190
+ ]
191
+ + _common_mismatch
192
+ )
193
+ def not_daily(request):
194
+ """
195
+ Several timedelta-like and DateOffset instances that are _not_
196
+ compatible with Daily frequencies.
197
+ """
198
+ return request.param
199
+
200
+
201
+ @pytest.fixture(
202
+ params=[
203
+ np.timedelta64(365, "D"),
204
+ pd.Timedelta(days=365).to_pytimedelta(),
205
+ pd.Timedelta(days=365),
206
+ ]
207
+ + _common_mismatch
208
+ )
209
+ def mismatched_freq(request):
210
+ """
211
+ Several timedelta-like and DateOffset instances that are _not_
212
+ compatible with Monthly or Annual frequencies.
213
+ """
214
+ return request.param
215
+
216
+
217
+ # ------------------------------------------------------------------
218
+
219
+
220
+ @pytest.fixture(
221
+ params=[Index, pd.Series, tm.to_array, np.array, list], ids=lambda x: x.__name__
222
+ )
223
+ def box_1d_array(request):
224
+ """
225
+ Fixture to test behavior for Index, Series, tm.to_array, numpy Array and list
226
+ classes
227
+ """
228
+ return request.param
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_array_ops.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas._testing as tm
7
+ from pandas.core.ops.array_ops import (
8
+ comparison_op,
9
+ na_logical_op,
10
+ )
11
+
12
+
13
+ def test_na_logical_op_2d():
14
+ left = np.arange(8).reshape(4, 2)
15
+ right = left.astype(object)
16
+ right[0, 0] = np.nan
17
+
18
+ # Check that we fall back to the vec_binop branch
19
+ with pytest.raises(TypeError, match="unsupported operand type"):
20
+ operator.or_(left, right)
21
+
22
+ result = na_logical_op(left, right, operator.or_)
23
+ expected = right
24
+ tm.assert_numpy_array_equal(result, expected)
25
+
26
+
27
+ def test_object_comparison_2d():
28
+ left = np.arange(9).reshape(3, 3).astype(object)
29
+ right = left.T
30
+
31
+ result = comparison_op(left, right, operator.eq)
32
+ expected = np.eye(3).astype(bool)
33
+ tm.assert_numpy_array_equal(result, expected)
34
+
35
+ # Ensure that cython doesn't raise on non-writeable arg, which
36
+ # we can get from np.broadcast_to
37
+ right.flags.writeable = False
38
+ result = comparison_op(left, right, operator.ne)
39
+ tm.assert_numpy_array_equal(result, ~expected)
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_categorical.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Categorical,
5
+ Series,
6
+ )
7
+ import pandas._testing as tm
8
+
9
+
10
+ class TestCategoricalComparisons:
11
+ def test_categorical_nan_equality(self):
12
+ cat = Series(Categorical(["a", "b", "c", np.nan]))
13
+ expected = Series([True, True, True, False])
14
+ result = cat == cat
15
+ tm.assert_series_equal(result, expected)
16
+
17
+ def test_categorical_tuple_equality(self):
18
+ # GH 18050
19
+ ser = Series([(0, 0), (0, 1), (0, 0), (1, 0), (1, 1)])
20
+ expected = Series([True, False, True, False, False])
21
+ result = ser == (0, 0)
22
+ tm.assert_series_equal(result, expected)
23
+
24
+ result = ser.astype("category") == (0, 0)
25
+ tm.assert_series_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_datetime64.py ADDED
@@ -0,0 +1,2475 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Arithmetic tests for DataFrame/Series/Index/Array classes that should
2
+ # behave identically.
3
+ # Specifically for datetime64 and datetime64tz dtypes
4
+ from datetime import (
5
+ datetime,
6
+ time,
7
+ timedelta,
8
+ )
9
+ from itertools import (
10
+ product,
11
+ starmap,
12
+ )
13
+ import operator
14
+ import warnings
15
+
16
+ import numpy as np
17
+ import pytest
18
+ import pytz
19
+
20
+ from pandas._libs.tslibs.conversion import localize_pydatetime
21
+ from pandas._libs.tslibs.offsets import shift_months
22
+ from pandas.errors import PerformanceWarning
23
+
24
+ import pandas as pd
25
+ from pandas import (
26
+ DateOffset,
27
+ DatetimeIndex,
28
+ NaT,
29
+ Period,
30
+ Series,
31
+ Timedelta,
32
+ TimedeltaIndex,
33
+ Timestamp,
34
+ date_range,
35
+ )
36
+ import pandas._testing as tm
37
+ from pandas.core.ops import roperator
38
+ from pandas.tests.arithmetic.common import (
39
+ assert_cannot_add,
40
+ assert_invalid_addsub_type,
41
+ assert_invalid_comparison,
42
+ get_upcast_box,
43
+ )
44
+
45
+ # ------------------------------------------------------------------
46
+ # Comparisons
47
+
48
+
49
+ class TestDatetime64ArrayLikeComparisons:
50
+ # Comparison tests for datetime64 vectors fully parametrized over
51
+ # DataFrame/Series/DatetimeIndex/DatetimeArray. Ideally all comparison
52
+ # tests will eventually end up here.
53
+
54
+ def test_compare_zerodim(self, tz_naive_fixture, box_with_array):
55
+ # Test comparison with zero-dimensional array is unboxed
56
+ tz = tz_naive_fixture
57
+ box = box_with_array
58
+ dti = date_range("20130101", periods=3, tz=tz)
59
+
60
+ other = np.array(dti.to_numpy()[0])
61
+
62
+ dtarr = tm.box_expected(dti, box)
63
+ xbox = get_upcast_box(dtarr, other, True)
64
+ result = dtarr <= other
65
+ expected = np.array([True, False, False])
66
+ expected = tm.box_expected(expected, xbox)
67
+ tm.assert_equal(result, expected)
68
+
69
+ @pytest.mark.parametrize(
70
+ "other",
71
+ [
72
+ "foo",
73
+ -1,
74
+ 99,
75
+ 4.0,
76
+ object(),
77
+ timedelta(days=2),
78
+ # GH#19800, GH#19301 datetime.date comparison raises to
79
+ # match DatetimeIndex/Timestamp. This also matches the behavior
80
+ # of stdlib datetime.datetime
81
+ datetime(2001, 1, 1).date(),
82
+ # GH#19301 None and NaN are *not* cast to NaT for comparisons
83
+ None,
84
+ np.nan,
85
+ ],
86
+ )
87
+ def test_dt64arr_cmp_scalar_invalid(self, other, tz_naive_fixture, box_with_array):
88
+ # GH#22074, GH#15966
89
+ tz = tz_naive_fixture
90
+
91
+ rng = date_range("1/1/2000", periods=10, tz=tz)
92
+ dtarr = tm.box_expected(rng, box_with_array)
93
+ assert_invalid_comparison(dtarr, other, box_with_array)
94
+
95
+ @pytest.mark.parametrize(
96
+ "other",
97
+ [
98
+ # GH#4968 invalid date/int comparisons
99
+ list(range(10)),
100
+ np.arange(10),
101
+ np.arange(10).astype(np.float32),
102
+ np.arange(10).astype(object),
103
+ pd.timedelta_range("1ns", periods=10).array,
104
+ np.array(pd.timedelta_range("1ns", periods=10)),
105
+ list(pd.timedelta_range("1ns", periods=10)),
106
+ pd.timedelta_range("1 Day", periods=10).astype(object),
107
+ pd.period_range("1971-01-01", freq="D", periods=10).array,
108
+ pd.period_range("1971-01-01", freq="D", periods=10).astype(object),
109
+ ],
110
+ )
111
+ def test_dt64arr_cmp_arraylike_invalid(
112
+ self, other, tz_naive_fixture, box_with_array
113
+ ):
114
+ tz = tz_naive_fixture
115
+
116
+ dta = date_range("1970-01-01", freq="ns", periods=10, tz=tz)._data
117
+ obj = tm.box_expected(dta, box_with_array)
118
+ assert_invalid_comparison(obj, other, box_with_array)
119
+
120
+ def test_dt64arr_cmp_mixed_invalid(self, tz_naive_fixture):
121
+ tz = tz_naive_fixture
122
+
123
+ dta = date_range("1970-01-01", freq="h", periods=5, tz=tz)._data
124
+
125
+ other = np.array([0, 1, 2, dta[3], Timedelta(days=1)])
126
+ result = dta == other
127
+ expected = np.array([False, False, False, True, False])
128
+ tm.assert_numpy_array_equal(result, expected)
129
+
130
+ result = dta != other
131
+ tm.assert_numpy_array_equal(result, ~expected)
132
+
133
+ msg = "Invalid comparison between|Cannot compare type|not supported between"
134
+ with pytest.raises(TypeError, match=msg):
135
+ dta < other
136
+ with pytest.raises(TypeError, match=msg):
137
+ dta > other
138
+ with pytest.raises(TypeError, match=msg):
139
+ dta <= other
140
+ with pytest.raises(TypeError, match=msg):
141
+ dta >= other
142
+
143
+ def test_dt64arr_nat_comparison(self, tz_naive_fixture, box_with_array):
144
+ # GH#22242, GH#22163 DataFrame considered NaT == ts incorrectly
145
+ tz = tz_naive_fixture
146
+ box = box_with_array
147
+
148
+ ts = Timestamp("2021-01-01", tz=tz)
149
+ ser = Series([ts, NaT])
150
+
151
+ obj = tm.box_expected(ser, box)
152
+ xbox = get_upcast_box(obj, ts, True)
153
+
154
+ expected = Series([True, False], dtype=np.bool_)
155
+ expected = tm.box_expected(expected, xbox)
156
+
157
+ result = obj == ts
158
+ tm.assert_equal(result, expected)
159
+
160
+
161
+ class TestDatetime64SeriesComparison:
162
+ # TODO: moved from tests.series.test_operators; needs cleanup
163
+
164
+ @pytest.mark.parametrize(
165
+ "pair",
166
+ [
167
+ (
168
+ [Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
169
+ [NaT, NaT, Timestamp("2011-01-03")],
170
+ ),
171
+ (
172
+ [Timedelta("1 days"), NaT, Timedelta("3 days")],
173
+ [NaT, NaT, Timedelta("3 days")],
174
+ ),
175
+ (
176
+ [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
177
+ [NaT, NaT, Period("2011-03", freq="M")],
178
+ ),
179
+ ],
180
+ )
181
+ @pytest.mark.parametrize("reverse", [True, False])
182
+ @pytest.mark.parametrize("dtype", [None, object])
183
+ @pytest.mark.parametrize(
184
+ "op, expected",
185
+ [
186
+ (operator.eq, Series([False, False, True])),
187
+ (operator.ne, Series([True, True, False])),
188
+ (operator.lt, Series([False, False, False])),
189
+ (operator.gt, Series([False, False, False])),
190
+ (operator.ge, Series([False, False, True])),
191
+ (operator.le, Series([False, False, True])),
192
+ ],
193
+ )
194
+ def test_nat_comparisons(
195
+ self,
196
+ dtype,
197
+ index_or_series,
198
+ reverse,
199
+ pair,
200
+ op,
201
+ expected,
202
+ ):
203
+ box = index_or_series
204
+ lhs, rhs = pair
205
+ if reverse:
206
+ # add lhs / rhs switched data
207
+ lhs, rhs = rhs, lhs
208
+
209
+ left = Series(lhs, dtype=dtype)
210
+ right = box(rhs, dtype=dtype)
211
+
212
+ result = op(left, right)
213
+
214
+ tm.assert_series_equal(result, expected)
215
+
216
+ @pytest.mark.parametrize(
217
+ "data",
218
+ [
219
+ [Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
220
+ [Timedelta("1 days"), NaT, Timedelta("3 days")],
221
+ [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
222
+ ],
223
+ )
224
+ @pytest.mark.parametrize("dtype", [None, object])
225
+ def test_nat_comparisons_scalar(self, dtype, data, box_with_array):
226
+ box = box_with_array
227
+
228
+ left = Series(data, dtype=dtype)
229
+ left = tm.box_expected(left, box)
230
+ xbox = get_upcast_box(left, NaT, True)
231
+
232
+ expected = [False, False, False]
233
+ expected = tm.box_expected(expected, xbox)
234
+ if box is pd.array and dtype is object:
235
+ expected = pd.array(expected, dtype="bool")
236
+
237
+ tm.assert_equal(left == NaT, expected)
238
+ tm.assert_equal(NaT == left, expected)
239
+
240
+ expected = [True, True, True]
241
+ expected = tm.box_expected(expected, xbox)
242
+ if box is pd.array and dtype is object:
243
+ expected = pd.array(expected, dtype="bool")
244
+ tm.assert_equal(left != NaT, expected)
245
+ tm.assert_equal(NaT != left, expected)
246
+
247
+ expected = [False, False, False]
248
+ expected = tm.box_expected(expected, xbox)
249
+ if box is pd.array and dtype is object:
250
+ expected = pd.array(expected, dtype="bool")
251
+ tm.assert_equal(left < NaT, expected)
252
+ tm.assert_equal(NaT > left, expected)
253
+ tm.assert_equal(left <= NaT, expected)
254
+ tm.assert_equal(NaT >= left, expected)
255
+
256
+ tm.assert_equal(left > NaT, expected)
257
+ tm.assert_equal(NaT < left, expected)
258
+ tm.assert_equal(left >= NaT, expected)
259
+ tm.assert_equal(NaT <= left, expected)
260
+
261
+ @pytest.mark.parametrize("val", [datetime(2000, 1, 4), datetime(2000, 1, 5)])
262
+ def test_series_comparison_scalars(self, val):
263
+ series = Series(date_range("1/1/2000", periods=10))
264
+
265
+ result = series > val
266
+ expected = Series([x > val for x in series])
267
+ tm.assert_series_equal(result, expected)
268
+
269
+ @pytest.mark.parametrize(
270
+ "left,right", [("lt", "gt"), ("le", "ge"), ("eq", "eq"), ("ne", "ne")]
271
+ )
272
+ def test_timestamp_compare_series(self, left, right):
273
+ # see gh-4982
274
+ # Make sure we can compare Timestamps on the right AND left hand side.
275
+ ser = Series(date_range("20010101", periods=10), name="dates")
276
+ s_nat = ser.copy(deep=True)
277
+
278
+ ser[0] = Timestamp("nat")
279
+ ser[3] = Timestamp("nat")
280
+
281
+ left_f = getattr(operator, left)
282
+ right_f = getattr(operator, right)
283
+
284
+ # No NaT
285
+ expected = left_f(ser, Timestamp("20010109"))
286
+ result = right_f(Timestamp("20010109"), ser)
287
+ tm.assert_series_equal(result, expected)
288
+
289
+ # NaT
290
+ expected = left_f(ser, Timestamp("nat"))
291
+ result = right_f(Timestamp("nat"), ser)
292
+ tm.assert_series_equal(result, expected)
293
+
294
+ # Compare to Timestamp with series containing NaT
295
+ expected = left_f(s_nat, Timestamp("20010109"))
296
+ result = right_f(Timestamp("20010109"), s_nat)
297
+ tm.assert_series_equal(result, expected)
298
+
299
+ # Compare to NaT with series containing NaT
300
+ expected = left_f(s_nat, NaT)
301
+ result = right_f(NaT, s_nat)
302
+ tm.assert_series_equal(result, expected)
303
+
304
+ def test_dt64arr_timestamp_equality(self, box_with_array):
305
+ # GH#11034
306
+ box = box_with_array
307
+
308
+ ser = Series([Timestamp("2000-01-29 01:59:00"), Timestamp("2000-01-30"), NaT])
309
+ ser = tm.box_expected(ser, box)
310
+ xbox = get_upcast_box(ser, ser, True)
311
+
312
+ result = ser != ser
313
+ expected = tm.box_expected([False, False, True], xbox)
314
+ tm.assert_equal(result, expected)
315
+
316
+ if box is pd.DataFrame:
317
+ # alignment for frame vs series comparisons deprecated
318
+ # in GH#46795 enforced 2.0
319
+ with pytest.raises(ValueError, match="not aligned"):
320
+ ser != ser[0]
321
+
322
+ else:
323
+ result = ser != ser[0]
324
+ expected = tm.box_expected([False, True, True], xbox)
325
+ tm.assert_equal(result, expected)
326
+
327
+ if box is pd.DataFrame:
328
+ # alignment for frame vs series comparisons deprecated
329
+ # in GH#46795 enforced 2.0
330
+ with pytest.raises(ValueError, match="not aligned"):
331
+ ser != ser[2]
332
+ else:
333
+ result = ser != ser[2]
334
+ expected = tm.box_expected([True, True, True], xbox)
335
+ tm.assert_equal(result, expected)
336
+
337
+ result = ser == ser
338
+ expected = tm.box_expected([True, True, False], xbox)
339
+ tm.assert_equal(result, expected)
340
+
341
+ if box is pd.DataFrame:
342
+ # alignment for frame vs series comparisons deprecated
343
+ # in GH#46795 enforced 2.0
344
+ with pytest.raises(ValueError, match="not aligned"):
345
+ ser == ser[0]
346
+ else:
347
+ result = ser == ser[0]
348
+ expected = tm.box_expected([True, False, False], xbox)
349
+ tm.assert_equal(result, expected)
350
+
351
+ if box is pd.DataFrame:
352
+ # alignment for frame vs series comparisons deprecated
353
+ # in GH#46795 enforced 2.0
354
+ with pytest.raises(ValueError, match="not aligned"):
355
+ ser == ser[2]
356
+ else:
357
+ result = ser == ser[2]
358
+ expected = tm.box_expected([False, False, False], xbox)
359
+ tm.assert_equal(result, expected)
360
+
361
+ @pytest.mark.parametrize(
362
+ "datetimelike",
363
+ [
364
+ Timestamp("20130101"),
365
+ datetime(2013, 1, 1),
366
+ np.datetime64("2013-01-01T00:00", "ns"),
367
+ ],
368
+ )
369
+ @pytest.mark.parametrize(
370
+ "op,expected",
371
+ [
372
+ (operator.lt, [True, False, False, False]),
373
+ (operator.le, [True, True, False, False]),
374
+ (operator.eq, [False, True, False, False]),
375
+ (operator.gt, [False, False, False, True]),
376
+ ],
377
+ )
378
+ def test_dt64_compare_datetime_scalar(self, datetimelike, op, expected):
379
+ # GH#17965, test for ability to compare datetime64[ns] columns
380
+ # to datetimelike
381
+ ser = Series(
382
+ [
383
+ Timestamp("20120101"),
384
+ Timestamp("20130101"),
385
+ np.nan,
386
+ Timestamp("20130103"),
387
+ ],
388
+ name="A",
389
+ )
390
+ result = op(ser, datetimelike)
391
+ expected = Series(expected, name="A")
392
+ tm.assert_series_equal(result, expected)
393
+
394
+
395
+ class TestDatetimeIndexComparisons:
396
+ # TODO: moved from tests.indexes.test_base; parametrize and de-duplicate
397
+ def test_comparators(self, comparison_op):
398
+ index = tm.makeDateIndex(100)
399
+ element = index[len(index) // 2]
400
+ element = Timestamp(element).to_datetime64()
401
+
402
+ arr = np.array(index)
403
+ arr_result = comparison_op(arr, element)
404
+ index_result = comparison_op(index, element)
405
+
406
+ assert isinstance(index_result, np.ndarray)
407
+ tm.assert_numpy_array_equal(arr_result, index_result)
408
+
409
+ @pytest.mark.parametrize(
410
+ "other",
411
+ [datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
412
+ )
413
+ def test_dti_cmp_datetimelike(self, other, tz_naive_fixture):
414
+ tz = tz_naive_fixture
415
+ dti = date_range("2016-01-01", periods=2, tz=tz)
416
+ if tz is not None:
417
+ if isinstance(other, np.datetime64):
418
+ # no tzaware version available
419
+ return
420
+ other = localize_pydatetime(other, dti.tzinfo)
421
+
422
+ result = dti == other
423
+ expected = np.array([True, False])
424
+ tm.assert_numpy_array_equal(result, expected)
425
+
426
+ result = dti > other
427
+ expected = np.array([False, True])
428
+ tm.assert_numpy_array_equal(result, expected)
429
+
430
+ result = dti >= other
431
+ expected = np.array([True, True])
432
+ tm.assert_numpy_array_equal(result, expected)
433
+
434
+ result = dti < other
435
+ expected = np.array([False, False])
436
+ tm.assert_numpy_array_equal(result, expected)
437
+
438
+ result = dti <= other
439
+ expected = np.array([True, False])
440
+ tm.assert_numpy_array_equal(result, expected)
441
+
442
+ @pytest.mark.parametrize("dtype", [None, object])
443
+ def test_dti_cmp_nat(self, dtype, box_with_array):
444
+ left = DatetimeIndex([Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")])
445
+ right = DatetimeIndex([NaT, NaT, Timestamp("2011-01-03")])
446
+
447
+ left = tm.box_expected(left, box_with_array)
448
+ right = tm.box_expected(right, box_with_array)
449
+ xbox = get_upcast_box(left, right, True)
450
+
451
+ lhs, rhs = left, right
452
+ if dtype is object:
453
+ lhs, rhs = left.astype(object), right.astype(object)
454
+
455
+ result = rhs == lhs
456
+ expected = np.array([False, False, True])
457
+ expected = tm.box_expected(expected, xbox)
458
+ tm.assert_equal(result, expected)
459
+
460
+ result = lhs != rhs
461
+ expected = np.array([True, True, False])
462
+ expected = tm.box_expected(expected, xbox)
463
+ tm.assert_equal(result, expected)
464
+
465
+ expected = np.array([False, False, False])
466
+ expected = tm.box_expected(expected, xbox)
467
+ tm.assert_equal(lhs == NaT, expected)
468
+ tm.assert_equal(NaT == rhs, expected)
469
+
470
+ expected = np.array([True, True, True])
471
+ expected = tm.box_expected(expected, xbox)
472
+ tm.assert_equal(lhs != NaT, expected)
473
+ tm.assert_equal(NaT != lhs, expected)
474
+
475
+ expected = np.array([False, False, False])
476
+ expected = tm.box_expected(expected, xbox)
477
+ tm.assert_equal(lhs < NaT, expected)
478
+ tm.assert_equal(NaT > lhs, expected)
479
+
480
+ def test_dti_cmp_nat_behaves_like_float_cmp_nan(self):
481
+ fidx1 = pd.Index([1.0, np.nan, 3.0, np.nan, 5.0, 7.0])
482
+ fidx2 = pd.Index([2.0, 3.0, np.nan, np.nan, 6.0, 7.0])
483
+
484
+ didx1 = DatetimeIndex(
485
+ ["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
486
+ )
487
+ didx2 = DatetimeIndex(
488
+ ["2014-02-01", "2014-03-01", NaT, NaT, "2014-06-01", "2014-07-01"]
489
+ )
490
+ darr = np.array(
491
+ [
492
+ np.datetime64("2014-02-01 00:00"),
493
+ np.datetime64("2014-03-01 00:00"),
494
+ np.datetime64("nat"),
495
+ np.datetime64("nat"),
496
+ np.datetime64("2014-06-01 00:00"),
497
+ np.datetime64("2014-07-01 00:00"),
498
+ ]
499
+ )
500
+
501
+ cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]
502
+
503
+ # Check pd.NaT is handles as the same as np.nan
504
+ with tm.assert_produces_warning(None):
505
+ for idx1, idx2 in cases:
506
+ result = idx1 < idx2
507
+ expected = np.array([True, False, False, False, True, False])
508
+ tm.assert_numpy_array_equal(result, expected)
509
+
510
+ result = idx2 > idx1
511
+ expected = np.array([True, False, False, False, True, False])
512
+ tm.assert_numpy_array_equal(result, expected)
513
+
514
+ result = idx1 <= idx2
515
+ expected = np.array([True, False, False, False, True, True])
516
+ tm.assert_numpy_array_equal(result, expected)
517
+
518
+ result = idx2 >= idx1
519
+ expected = np.array([True, False, False, False, True, True])
520
+ tm.assert_numpy_array_equal(result, expected)
521
+
522
+ result = idx1 == idx2
523
+ expected = np.array([False, False, False, False, False, True])
524
+ tm.assert_numpy_array_equal(result, expected)
525
+
526
+ result = idx1 != idx2
527
+ expected = np.array([True, True, True, True, True, False])
528
+ tm.assert_numpy_array_equal(result, expected)
529
+
530
+ with tm.assert_produces_warning(None):
531
+ for idx1, val in [(fidx1, np.nan), (didx1, NaT)]:
532
+ result = idx1 < val
533
+ expected = np.array([False, False, False, False, False, False])
534
+ tm.assert_numpy_array_equal(result, expected)
535
+ result = idx1 > val
536
+ tm.assert_numpy_array_equal(result, expected)
537
+
538
+ result = idx1 <= val
539
+ tm.assert_numpy_array_equal(result, expected)
540
+ result = idx1 >= val
541
+ tm.assert_numpy_array_equal(result, expected)
542
+
543
+ result = idx1 == val
544
+ tm.assert_numpy_array_equal(result, expected)
545
+
546
+ result = idx1 != val
547
+ expected = np.array([True, True, True, True, True, True])
548
+ tm.assert_numpy_array_equal(result, expected)
549
+
550
+ # Check pd.NaT is handles as the same as np.nan
551
+ with tm.assert_produces_warning(None):
552
+ for idx1, val in [(fidx1, 3), (didx1, datetime(2014, 3, 1))]:
553
+ result = idx1 < val
554
+ expected = np.array([True, False, False, False, False, False])
555
+ tm.assert_numpy_array_equal(result, expected)
556
+ result = idx1 > val
557
+ expected = np.array([False, False, False, False, True, True])
558
+ tm.assert_numpy_array_equal(result, expected)
559
+
560
+ result = idx1 <= val
561
+ expected = np.array([True, False, True, False, False, False])
562
+ tm.assert_numpy_array_equal(result, expected)
563
+ result = idx1 >= val
564
+ expected = np.array([False, False, True, False, True, True])
565
+ tm.assert_numpy_array_equal(result, expected)
566
+
567
+ result = idx1 == val
568
+ expected = np.array([False, False, True, False, False, False])
569
+ tm.assert_numpy_array_equal(result, expected)
570
+
571
+ result = idx1 != val
572
+ expected = np.array([True, True, False, True, True, True])
573
+ tm.assert_numpy_array_equal(result, expected)
574
+
575
+ def test_comparison_tzawareness_compat(self, comparison_op, box_with_array):
576
+ # GH#18162
577
+ op = comparison_op
578
+ box = box_with_array
579
+
580
+ dr = date_range("2016-01-01", periods=6)
581
+ dz = dr.tz_localize("US/Pacific")
582
+
583
+ dr = tm.box_expected(dr, box)
584
+ dz = tm.box_expected(dz, box)
585
+
586
+ if box is pd.DataFrame:
587
+ tolist = lambda x: x.astype(object).values.tolist()[0]
588
+ else:
589
+ tolist = list
590
+
591
+ if op not in [operator.eq, operator.ne]:
592
+ msg = (
593
+ r"Invalid comparison between dtype=datetime64\[ns.*\] "
594
+ "and (Timestamp|DatetimeArray|list|ndarray)"
595
+ )
596
+ with pytest.raises(TypeError, match=msg):
597
+ op(dr, dz)
598
+
599
+ with pytest.raises(TypeError, match=msg):
600
+ op(dr, tolist(dz))
601
+ with pytest.raises(TypeError, match=msg):
602
+ op(dr, np.array(tolist(dz), dtype=object))
603
+ with pytest.raises(TypeError, match=msg):
604
+ op(dz, dr)
605
+
606
+ with pytest.raises(TypeError, match=msg):
607
+ op(dz, tolist(dr))
608
+ with pytest.raises(TypeError, match=msg):
609
+ op(dz, np.array(tolist(dr), dtype=object))
610
+
611
+ # The aware==aware and naive==naive comparisons should *not* raise
612
+ assert np.all(dr == dr)
613
+ assert np.all(dr == tolist(dr))
614
+ assert np.all(tolist(dr) == dr)
615
+ assert np.all(np.array(tolist(dr), dtype=object) == dr)
616
+ assert np.all(dr == np.array(tolist(dr), dtype=object))
617
+
618
+ assert np.all(dz == dz)
619
+ assert np.all(dz == tolist(dz))
620
+ assert np.all(tolist(dz) == dz)
621
+ assert np.all(np.array(tolist(dz), dtype=object) == dz)
622
+ assert np.all(dz == np.array(tolist(dz), dtype=object))
623
+
624
+ def test_comparison_tzawareness_compat_scalars(self, comparison_op, box_with_array):
625
+ # GH#18162
626
+ op = comparison_op
627
+
628
+ dr = date_range("2016-01-01", periods=6)
629
+ dz = dr.tz_localize("US/Pacific")
630
+
631
+ dr = tm.box_expected(dr, box_with_array)
632
+ dz = tm.box_expected(dz, box_with_array)
633
+
634
+ # Check comparisons against scalar Timestamps
635
+ ts = Timestamp("2000-03-14 01:59")
636
+ ts_tz = Timestamp("2000-03-14 01:59", tz="Europe/Amsterdam")
637
+
638
+ assert np.all(dr > ts)
639
+ msg = r"Invalid comparison between dtype=datetime64\[ns.*\] and Timestamp"
640
+ if op not in [operator.eq, operator.ne]:
641
+ with pytest.raises(TypeError, match=msg):
642
+ op(dr, ts_tz)
643
+
644
+ assert np.all(dz > ts_tz)
645
+ if op not in [operator.eq, operator.ne]:
646
+ with pytest.raises(TypeError, match=msg):
647
+ op(dz, ts)
648
+
649
+ if op not in [operator.eq, operator.ne]:
650
+ # GH#12601: Check comparison against Timestamps and DatetimeIndex
651
+ with pytest.raises(TypeError, match=msg):
652
+ op(ts, dz)
653
+
654
+ @pytest.mark.parametrize(
655
+ "other",
656
+ [datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
657
+ )
658
+ # Bug in NumPy? https://github.com/numpy/numpy/issues/13841
659
+ # Raising in __eq__ will fallback to NumPy, which warns, fails,
660
+ # then re-raises the original exception. So we just need to ignore.
661
+ @pytest.mark.filterwarnings("ignore:elementwise comp:DeprecationWarning")
662
+ def test_scalar_comparison_tzawareness(
663
+ self, comparison_op, other, tz_aware_fixture, box_with_array
664
+ ):
665
+ op = comparison_op
666
+ tz = tz_aware_fixture
667
+ dti = date_range("2016-01-01", periods=2, tz=tz)
668
+
669
+ dtarr = tm.box_expected(dti, box_with_array)
670
+ xbox = get_upcast_box(dtarr, other, True)
671
+ if op in [operator.eq, operator.ne]:
672
+ exbool = op is operator.ne
673
+ expected = np.array([exbool, exbool], dtype=bool)
674
+ expected = tm.box_expected(expected, xbox)
675
+
676
+ result = op(dtarr, other)
677
+ tm.assert_equal(result, expected)
678
+
679
+ result = op(other, dtarr)
680
+ tm.assert_equal(result, expected)
681
+ else:
682
+ msg = (
683
+ r"Invalid comparison between dtype=datetime64\[ns, .*\] "
684
+ f"and {type(other).__name__}"
685
+ )
686
+ with pytest.raises(TypeError, match=msg):
687
+ op(dtarr, other)
688
+ with pytest.raises(TypeError, match=msg):
689
+ op(other, dtarr)
690
+
691
+ def test_nat_comparison_tzawareness(self, comparison_op):
692
+ # GH#19276
693
+ # tzaware DatetimeIndex should not raise when compared to NaT
694
+ op = comparison_op
695
+
696
+ dti = DatetimeIndex(
697
+ ["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
698
+ )
699
+ expected = np.array([op == operator.ne] * len(dti))
700
+ result = op(dti, NaT)
701
+ tm.assert_numpy_array_equal(result, expected)
702
+
703
+ result = op(dti.tz_localize("US/Pacific"), NaT)
704
+ tm.assert_numpy_array_equal(result, expected)
705
+
706
+ def test_dti_cmp_str(self, tz_naive_fixture):
707
+ # GH#22074
708
+ # regardless of tz, we expect these comparisons are valid
709
+ tz = tz_naive_fixture
710
+ rng = date_range("1/1/2000", periods=10, tz=tz)
711
+ other = "1/1/2000"
712
+
713
+ result = rng == other
714
+ expected = np.array([True] + [False] * 9)
715
+ tm.assert_numpy_array_equal(result, expected)
716
+
717
+ result = rng != other
718
+ expected = np.array([False] + [True] * 9)
719
+ tm.assert_numpy_array_equal(result, expected)
720
+
721
+ result = rng < other
722
+ expected = np.array([False] * 10)
723
+ tm.assert_numpy_array_equal(result, expected)
724
+
725
+ result = rng <= other
726
+ expected = np.array([True] + [False] * 9)
727
+ tm.assert_numpy_array_equal(result, expected)
728
+
729
+ result = rng > other
730
+ expected = np.array([False] + [True] * 9)
731
+ tm.assert_numpy_array_equal(result, expected)
732
+
733
+ result = rng >= other
734
+ expected = np.array([True] * 10)
735
+ tm.assert_numpy_array_equal(result, expected)
736
+
737
+ def test_dti_cmp_list(self):
738
+ rng = date_range("1/1/2000", periods=10)
739
+
740
+ result = rng == list(rng)
741
+ expected = rng == rng
742
+ tm.assert_numpy_array_equal(result, expected)
743
+
744
+ @pytest.mark.parametrize(
745
+ "other",
746
+ [
747
+ pd.timedelta_range("1D", periods=10),
748
+ pd.timedelta_range("1D", periods=10).to_series(),
749
+ pd.timedelta_range("1D", periods=10).asi8.view("m8[ns]"),
750
+ ],
751
+ ids=lambda x: type(x).__name__,
752
+ )
753
+ def test_dti_cmp_tdi_tzawareness(self, other):
754
+ # GH#22074
755
+ # reversion test that we _don't_ call _assert_tzawareness_compat
756
+ # when comparing against TimedeltaIndex
757
+ dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
758
+
759
+ result = dti == other
760
+ expected = np.array([False] * 10)
761
+ tm.assert_numpy_array_equal(result, expected)
762
+
763
+ result = dti != other
764
+ expected = np.array([True] * 10)
765
+ tm.assert_numpy_array_equal(result, expected)
766
+ msg = "Invalid comparison between"
767
+ with pytest.raises(TypeError, match=msg):
768
+ dti < other
769
+ with pytest.raises(TypeError, match=msg):
770
+ dti <= other
771
+ with pytest.raises(TypeError, match=msg):
772
+ dti > other
773
+ with pytest.raises(TypeError, match=msg):
774
+ dti >= other
775
+
776
+ def test_dti_cmp_object_dtype(self):
777
+ # GH#22074
778
+ dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
779
+
780
+ other = dti.astype("O")
781
+
782
+ result = dti == other
783
+ expected = np.array([True] * 10)
784
+ tm.assert_numpy_array_equal(result, expected)
785
+
786
+ other = dti.tz_localize(None)
787
+ result = dti != other
788
+ tm.assert_numpy_array_equal(result, expected)
789
+
790
+ other = np.array(list(dti[:5]) + [Timedelta(days=1)] * 5)
791
+ result = dti == other
792
+ expected = np.array([True] * 5 + [False] * 5)
793
+ tm.assert_numpy_array_equal(result, expected)
794
+ msg = ">=' not supported between instances of 'Timestamp' and 'Timedelta'"
795
+ with pytest.raises(TypeError, match=msg):
796
+ dti >= other
797
+
798
+
799
+ # ------------------------------------------------------------------
800
+ # Arithmetic
801
+
802
+
803
+ class TestDatetime64Arithmetic:
804
+ # This class is intended for "finished" tests that are fully parametrized
805
+ # over DataFrame/Series/Index/DatetimeArray
806
+
807
+ # -------------------------------------------------------------
808
+ # Addition/Subtraction of timedelta-like
809
+
810
+ @pytest.mark.arm_slow
811
+ def test_dt64arr_add_timedeltalike_scalar(
812
+ self, tz_naive_fixture, two_hours, box_with_array
813
+ ):
814
+ # GH#22005, GH#22163 check DataFrame doesn't raise TypeError
815
+ tz = tz_naive_fixture
816
+
817
+ rng = date_range("2000-01-01", "2000-02-01", tz=tz)
818
+ expected = date_range("2000-01-01 02:00", "2000-02-01 02:00", tz=tz)
819
+
820
+ rng = tm.box_expected(rng, box_with_array)
821
+ expected = tm.box_expected(expected, box_with_array)
822
+
823
+ result = rng + two_hours
824
+ tm.assert_equal(result, expected)
825
+
826
+ result = two_hours + rng
827
+ tm.assert_equal(result, expected)
828
+
829
+ rng += two_hours
830
+ tm.assert_equal(rng, expected)
831
+
832
+ def test_dt64arr_sub_timedeltalike_scalar(
833
+ self, tz_naive_fixture, two_hours, box_with_array
834
+ ):
835
+ tz = tz_naive_fixture
836
+
837
+ rng = date_range("2000-01-01", "2000-02-01", tz=tz)
838
+ expected = date_range("1999-12-31 22:00", "2000-01-31 22:00", tz=tz)
839
+
840
+ rng = tm.box_expected(rng, box_with_array)
841
+ expected = tm.box_expected(expected, box_with_array)
842
+
843
+ result = rng - two_hours
844
+ tm.assert_equal(result, expected)
845
+
846
+ rng -= two_hours
847
+ tm.assert_equal(rng, expected)
848
+
849
+ def test_dt64_array_sub_dt_with_different_timezone(self, box_with_array):
850
+ t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
851
+ t1 = tm.box_expected(t1, box_with_array)
852
+ t2 = Timestamp("20130101").tz_localize("CET")
853
+ tnaive = Timestamp(20130101)
854
+
855
+ result = t1 - t2
856
+ expected = TimedeltaIndex(
857
+ ["0 days 06:00:00", "1 days 06:00:00", "2 days 06:00:00"]
858
+ )
859
+ expected = tm.box_expected(expected, box_with_array)
860
+ tm.assert_equal(result, expected)
861
+
862
+ result = t2 - t1
863
+ expected = TimedeltaIndex(
864
+ ["-1 days +18:00:00", "-2 days +18:00:00", "-3 days +18:00:00"]
865
+ )
866
+ expected = tm.box_expected(expected, box_with_array)
867
+ tm.assert_equal(result, expected)
868
+
869
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
870
+ with pytest.raises(TypeError, match=msg):
871
+ t1 - tnaive
872
+
873
+ with pytest.raises(TypeError, match=msg):
874
+ tnaive - t1
875
+
876
+ def test_dt64_array_sub_dt64_array_with_different_timezone(self, box_with_array):
877
+ t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
878
+ t1 = tm.box_expected(t1, box_with_array)
879
+ t2 = date_range("20130101", periods=3).tz_localize("CET")
880
+ t2 = tm.box_expected(t2, box_with_array)
881
+ tnaive = date_range("20130101", periods=3)
882
+
883
+ result = t1 - t2
884
+ expected = TimedeltaIndex(
885
+ ["0 days 06:00:00", "0 days 06:00:00", "0 days 06:00:00"]
886
+ )
887
+ expected = tm.box_expected(expected, box_with_array)
888
+ tm.assert_equal(result, expected)
889
+
890
+ result = t2 - t1
891
+ expected = TimedeltaIndex(
892
+ ["-1 days +18:00:00", "-1 days +18:00:00", "-1 days +18:00:00"]
893
+ )
894
+ expected = tm.box_expected(expected, box_with_array)
895
+ tm.assert_equal(result, expected)
896
+
897
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
898
+ with pytest.raises(TypeError, match=msg):
899
+ t1 - tnaive
900
+
901
+ with pytest.raises(TypeError, match=msg):
902
+ tnaive - t1
903
+
904
+ def test_dt64arr_add_sub_td64_nat(self, box_with_array, tz_naive_fixture):
905
+ # GH#23320 special handling for timedelta64("NaT")
906
+ tz = tz_naive_fixture
907
+
908
+ dti = date_range("1994-04-01", periods=9, tz=tz, freq="QS")
909
+ other = np.timedelta64("NaT")
910
+ expected = DatetimeIndex(["NaT"] * 9, tz=tz)
911
+
912
+ obj = tm.box_expected(dti, box_with_array)
913
+ expected = tm.box_expected(expected, box_with_array)
914
+
915
+ result = obj + other
916
+ tm.assert_equal(result, expected)
917
+ result = other + obj
918
+ tm.assert_equal(result, expected)
919
+ result = obj - other
920
+ tm.assert_equal(result, expected)
921
+ msg = "cannot subtract"
922
+ with pytest.raises(TypeError, match=msg):
923
+ other - obj
924
+
925
+ def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array):
926
+ tz = tz_naive_fixture
927
+ dti = date_range("2016-01-01", periods=3, tz=tz)
928
+ tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
929
+ tdarr = tdi.values
930
+
931
+ expected = date_range("2015-12-31", "2016-01-02", periods=3, tz=tz)
932
+
933
+ dtarr = tm.box_expected(dti, box_with_array)
934
+ expected = tm.box_expected(expected, box_with_array)
935
+
936
+ result = dtarr + tdarr
937
+ tm.assert_equal(result, expected)
938
+ result = tdarr + dtarr
939
+ tm.assert_equal(result, expected)
940
+
941
+ expected = date_range("2016-01-02", "2016-01-04", periods=3, tz=tz)
942
+ expected = tm.box_expected(expected, box_with_array)
943
+
944
+ result = dtarr - tdarr
945
+ tm.assert_equal(result, expected)
946
+ msg = "cannot subtract|(bad|unsupported) operand type for unary"
947
+ with pytest.raises(TypeError, match=msg):
948
+ tdarr - dtarr
949
+
950
+ # -----------------------------------------------------------------
951
+ # Subtraction of datetime-like scalars
952
+
953
+ @pytest.mark.parametrize(
954
+ "ts",
955
+ [
956
+ Timestamp("2013-01-01"),
957
+ Timestamp("2013-01-01").to_pydatetime(),
958
+ Timestamp("2013-01-01").to_datetime64(),
959
+ # GH#7996, GH#22163 ensure non-nano datetime64 is converted to nano
960
+ # for DataFrame operation
961
+ np.datetime64("2013-01-01", "D"),
962
+ ],
963
+ )
964
+ def test_dt64arr_sub_dtscalar(self, box_with_array, ts):
965
+ # GH#8554, GH#22163 DataFrame op should _not_ return dt64 dtype
966
+ idx = date_range("2013-01-01", periods=3)._with_freq(None)
967
+ idx = tm.box_expected(idx, box_with_array)
968
+
969
+ expected = TimedeltaIndex(["0 Days", "1 Day", "2 Days"])
970
+ expected = tm.box_expected(expected, box_with_array)
971
+
972
+ result = idx - ts
973
+ tm.assert_equal(result, expected)
974
+
975
+ result = ts - idx
976
+ tm.assert_equal(result, -expected)
977
+ tm.assert_equal(result, -expected)
978
+
979
+ def test_dt64arr_sub_timestamp_tzaware(self, box_with_array):
980
+ ser = date_range("2014-03-17", periods=2, freq="D", tz="US/Eastern")
981
+ ser = ser._with_freq(None)
982
+ ts = ser[0]
983
+
984
+ ser = tm.box_expected(ser, box_with_array)
985
+
986
+ delta_series = Series([np.timedelta64(0, "D"), np.timedelta64(1, "D")])
987
+ expected = tm.box_expected(delta_series, box_with_array)
988
+
989
+ tm.assert_equal(ser - ts, expected)
990
+ tm.assert_equal(ts - ser, -expected)
991
+
992
+ def test_dt64arr_sub_NaT(self, box_with_array):
993
+ # GH#18808
994
+ dti = DatetimeIndex([NaT, Timestamp("19900315")])
995
+ ser = tm.box_expected(dti, box_with_array)
996
+
997
+ result = ser - NaT
998
+ expected = Series([NaT, NaT], dtype="timedelta64[ns]")
999
+ expected = tm.box_expected(expected, box_with_array)
1000
+ tm.assert_equal(result, expected)
1001
+
1002
+ dti_tz = dti.tz_localize("Asia/Tokyo")
1003
+ ser_tz = tm.box_expected(dti_tz, box_with_array)
1004
+
1005
+ result = ser_tz - NaT
1006
+ expected = Series([NaT, NaT], dtype="timedelta64[ns]")
1007
+ expected = tm.box_expected(expected, box_with_array)
1008
+ tm.assert_equal(result, expected)
1009
+
1010
+ # -------------------------------------------------------------
1011
+ # Subtraction of datetime-like array-like
1012
+
1013
+ def test_dt64arr_sub_dt64object_array(self, box_with_array, tz_naive_fixture):
1014
+ dti = date_range("2016-01-01", periods=3, tz=tz_naive_fixture)
1015
+ expected = dti - dti
1016
+
1017
+ obj = tm.box_expected(dti, box_with_array)
1018
+ expected = tm.box_expected(expected, box_with_array).astype(object)
1019
+
1020
+ with tm.assert_produces_warning(PerformanceWarning):
1021
+ result = obj - obj.astype(object)
1022
+ tm.assert_equal(result, expected)
1023
+
1024
+ def test_dt64arr_naive_sub_dt64ndarray(self, box_with_array):
1025
+ dti = date_range("2016-01-01", periods=3, tz=None)
1026
+ dt64vals = dti.values
1027
+
1028
+ dtarr = tm.box_expected(dti, box_with_array)
1029
+
1030
+ expected = dtarr - dtarr
1031
+ result = dtarr - dt64vals
1032
+ tm.assert_equal(result, expected)
1033
+ result = dt64vals - dtarr
1034
+ tm.assert_equal(result, expected)
1035
+
1036
+ def test_dt64arr_aware_sub_dt64ndarray_raises(
1037
+ self, tz_aware_fixture, box_with_array
1038
+ ):
1039
+ tz = tz_aware_fixture
1040
+ dti = date_range("2016-01-01", periods=3, tz=tz)
1041
+ dt64vals = dti.values
1042
+
1043
+ dtarr = tm.box_expected(dti, box_with_array)
1044
+ msg = "Cannot subtract tz-naive and tz-aware datetime"
1045
+ with pytest.raises(TypeError, match=msg):
1046
+ dtarr - dt64vals
1047
+ with pytest.raises(TypeError, match=msg):
1048
+ dt64vals - dtarr
1049
+
1050
+ # -------------------------------------------------------------
1051
+ # Addition of datetime-like others (invalid)
1052
+
1053
+ def test_dt64arr_add_dtlike_raises(self, tz_naive_fixture, box_with_array):
1054
+ # GH#22163 ensure DataFrame doesn't cast Timestamp to i8
1055
+ # GH#9631
1056
+ tz = tz_naive_fixture
1057
+
1058
+ dti = date_range("2016-01-01", periods=3, tz=tz)
1059
+ if tz is None:
1060
+ dti2 = dti.tz_localize("US/Eastern")
1061
+ else:
1062
+ dti2 = dti.tz_localize(None)
1063
+ dtarr = tm.box_expected(dti, box_with_array)
1064
+
1065
+ assert_cannot_add(dtarr, dti.values)
1066
+ assert_cannot_add(dtarr, dti)
1067
+ assert_cannot_add(dtarr, dtarr)
1068
+ assert_cannot_add(dtarr, dti[0])
1069
+ assert_cannot_add(dtarr, dti[0].to_pydatetime())
1070
+ assert_cannot_add(dtarr, dti[0].to_datetime64())
1071
+ assert_cannot_add(dtarr, dti2[0])
1072
+ assert_cannot_add(dtarr, dti2[0].to_pydatetime())
1073
+ assert_cannot_add(dtarr, np.datetime64("2011-01-01", "D"))
1074
+
1075
+ # -------------------------------------------------------------
1076
+ # Other Invalid Addition/Subtraction
1077
+
1078
+ # Note: freq here includes both Tick and non-Tick offsets; this is
1079
+ # relevant because historically integer-addition was allowed if we had
1080
+ # a freq.
1081
+ @pytest.mark.parametrize("freq", ["H", "D", "W", "M", "MS", "Q", "B", None])
1082
+ @pytest.mark.parametrize("dtype", [None, "uint8"])
1083
+ def test_dt64arr_addsub_intlike(
1084
+ self, dtype, box_with_array, freq, tz_naive_fixture
1085
+ ):
1086
+ # GH#19959, GH#19123, GH#19012
1087
+ tz = tz_naive_fixture
1088
+ if box_with_array is pd.DataFrame:
1089
+ # alignment headaches
1090
+ return
1091
+
1092
+ if freq is None:
1093
+ dti = DatetimeIndex(["NaT", "2017-04-05 06:07:08"], tz=tz)
1094
+ else:
1095
+ dti = date_range("2016-01-01", periods=2, freq=freq, tz=tz)
1096
+
1097
+ obj = box_with_array(dti)
1098
+ other = np.array([4, -1])
1099
+ if dtype is not None:
1100
+ other = other.astype(dtype)
1101
+
1102
+ msg = "|".join(
1103
+ [
1104
+ "Addition/subtraction of integers",
1105
+ "cannot subtract DatetimeArray from",
1106
+ # IntegerArray
1107
+ "can only perform ops with numeric values",
1108
+ "unsupported operand type.*Categorical",
1109
+ r"unsupported operand type\(s\) for -: 'int' and 'Timestamp'",
1110
+ ]
1111
+ )
1112
+ assert_invalid_addsub_type(obj, 1, msg)
1113
+ assert_invalid_addsub_type(obj, np.int64(2), msg)
1114
+ assert_invalid_addsub_type(obj, np.array(3, dtype=np.int64), msg)
1115
+ assert_invalid_addsub_type(obj, other, msg)
1116
+ assert_invalid_addsub_type(obj, np.array(other), msg)
1117
+ assert_invalid_addsub_type(obj, pd.array(other), msg)
1118
+ assert_invalid_addsub_type(obj, pd.Categorical(other), msg)
1119
+ assert_invalid_addsub_type(obj, pd.Index(other), msg)
1120
+ assert_invalid_addsub_type(obj, Series(other), msg)
1121
+
1122
+ @pytest.mark.parametrize(
1123
+ "other",
1124
+ [
1125
+ 3.14,
1126
+ np.array([2.0, 3.0]),
1127
+ # GH#13078 datetime +/- Period is invalid
1128
+ Period("2011-01-01", freq="D"),
1129
+ # https://github.com/pandas-dev/pandas/issues/10329
1130
+ time(1, 2, 3),
1131
+ ],
1132
+ )
1133
+ @pytest.mark.parametrize("dti_freq", [None, "D"])
1134
+ def test_dt64arr_add_sub_invalid(self, dti_freq, other, box_with_array):
1135
+ dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
1136
+ dtarr = tm.box_expected(dti, box_with_array)
1137
+ msg = "|".join(
1138
+ [
1139
+ "unsupported operand type",
1140
+ "cannot (add|subtract)",
1141
+ "cannot use operands with types",
1142
+ "ufunc '?(add|subtract)'? cannot use operands with types",
1143
+ "Concatenation operation is not implemented for NumPy arrays",
1144
+ ]
1145
+ )
1146
+ assert_invalid_addsub_type(dtarr, other, msg)
1147
+
1148
+ @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "H"])
1149
+ @pytest.mark.parametrize("dti_freq", [None, "D"])
1150
+ def test_dt64arr_add_sub_parr(
1151
+ self, dti_freq, pi_freq, box_with_array, box_with_array2
1152
+ ):
1153
+ # GH#20049 subtracting PeriodIndex should raise TypeError
1154
+ dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
1155
+ pi = dti.to_period(pi_freq)
1156
+
1157
+ dtarr = tm.box_expected(dti, box_with_array)
1158
+ parr = tm.box_expected(pi, box_with_array2)
1159
+ msg = "|".join(
1160
+ [
1161
+ "cannot (add|subtract)",
1162
+ "unsupported operand",
1163
+ "descriptor.*requires",
1164
+ "ufunc.*cannot use operands",
1165
+ ]
1166
+ )
1167
+ assert_invalid_addsub_type(dtarr, parr, msg)
1168
+
1169
+ def test_dt64arr_addsub_time_objects_raises(self, box_with_array, tz_naive_fixture):
1170
+ # https://github.com/pandas-dev/pandas/issues/10329
1171
+
1172
+ tz = tz_naive_fixture
1173
+
1174
+ obj1 = date_range("2012-01-01", periods=3, tz=tz)
1175
+ obj2 = [time(i, i, i) for i in range(3)]
1176
+
1177
+ obj1 = tm.box_expected(obj1, box_with_array)
1178
+ obj2 = tm.box_expected(obj2, box_with_array)
1179
+
1180
+ msg = "|".join(
1181
+ [
1182
+ "unsupported operand",
1183
+ "cannot subtract DatetimeArray from ndarray",
1184
+ ]
1185
+ )
1186
+
1187
+ with warnings.catch_warnings(record=True):
1188
+ # pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
1189
+ # applied to Series or DatetimeIndex
1190
+ # we aren't testing that here, so ignore.
1191
+ warnings.simplefilter("ignore", PerformanceWarning)
1192
+
1193
+ assert_invalid_addsub_type(obj1, obj2, msg=msg)
1194
+
1195
+ # -------------------------------------------------------------
1196
+ # Other invalid operations
1197
+
1198
+ @pytest.mark.parametrize(
1199
+ "dt64_series",
1200
+ [
1201
+ Series([Timestamp("19900315"), Timestamp("19900315")]),
1202
+ Series([NaT, Timestamp("19900315")]),
1203
+ Series([NaT, NaT], dtype="datetime64[ns]"),
1204
+ ],
1205
+ )
1206
+ @pytest.mark.parametrize("one", [1, 1.0, np.array(1)])
1207
+ def test_dt64_mul_div_numeric_invalid(self, one, dt64_series, box_with_array):
1208
+ obj = tm.box_expected(dt64_series, box_with_array)
1209
+
1210
+ msg = "cannot perform .* with this index type"
1211
+
1212
+ # multiplication
1213
+ with pytest.raises(TypeError, match=msg):
1214
+ obj * one
1215
+ with pytest.raises(TypeError, match=msg):
1216
+ one * obj
1217
+
1218
+ # division
1219
+ with pytest.raises(TypeError, match=msg):
1220
+ obj / one
1221
+ with pytest.raises(TypeError, match=msg):
1222
+ one / obj
1223
+
1224
+
1225
+ class TestDatetime64DateOffsetArithmetic:
1226
+ # -------------------------------------------------------------
1227
+ # Tick DateOffsets
1228
+
1229
+ # TODO: parametrize over timezone?
1230
+ def test_dt64arr_series_add_tick_DateOffset(self, box_with_array):
1231
+ # GH#4532
1232
+ # operate with pd.offsets
1233
+ ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
1234
+ expected = Series(
1235
+ [Timestamp("20130101 9:01:05"), Timestamp("20130101 9:02:05")]
1236
+ )
1237
+
1238
+ ser = tm.box_expected(ser, box_with_array)
1239
+ expected = tm.box_expected(expected, box_with_array)
1240
+
1241
+ result = ser + pd.offsets.Second(5)
1242
+ tm.assert_equal(result, expected)
1243
+
1244
+ result2 = pd.offsets.Second(5) + ser
1245
+ tm.assert_equal(result2, expected)
1246
+
1247
+ def test_dt64arr_series_sub_tick_DateOffset(self, box_with_array):
1248
+ # GH#4532
1249
+ # operate with pd.offsets
1250
+ ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
1251
+ expected = Series(
1252
+ [Timestamp("20130101 9:00:55"), Timestamp("20130101 9:01:55")]
1253
+ )
1254
+
1255
+ ser = tm.box_expected(ser, box_with_array)
1256
+ expected = tm.box_expected(expected, box_with_array)
1257
+
1258
+ result = ser - pd.offsets.Second(5)
1259
+ tm.assert_equal(result, expected)
1260
+
1261
+ result2 = -pd.offsets.Second(5) + ser
1262
+ tm.assert_equal(result2, expected)
1263
+ msg = "(bad|unsupported) operand type for unary"
1264
+ with pytest.raises(TypeError, match=msg):
1265
+ pd.offsets.Second(5) - ser
1266
+
1267
+ @pytest.mark.parametrize(
1268
+ "cls_name", ["Day", "Hour", "Minute", "Second", "Milli", "Micro", "Nano"]
1269
+ )
1270
+ def test_dt64arr_add_sub_tick_DateOffset_smoke(self, cls_name, box_with_array):
1271
+ # GH#4532
1272
+ # smoke tests for valid DateOffsets
1273
+ ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
1274
+ ser = tm.box_expected(ser, box_with_array)
1275
+
1276
+ offset_cls = getattr(pd.offsets, cls_name)
1277
+ ser + offset_cls(5)
1278
+ offset_cls(5) + ser
1279
+ ser - offset_cls(5)
1280
+
1281
+ def test_dti_add_tick_tzaware(self, tz_aware_fixture, box_with_array):
1282
+ # GH#21610, GH#22163 ensure DataFrame doesn't return object-dtype
1283
+ tz = tz_aware_fixture
1284
+ if tz == "US/Pacific":
1285
+ dates = date_range("2012-11-01", periods=3, tz=tz)
1286
+ offset = dates + pd.offsets.Hour(5)
1287
+ assert dates[0] + pd.offsets.Hour(5) == offset[0]
1288
+
1289
+ dates = date_range("2010-11-01 00:00", periods=3, tz=tz, freq="H")
1290
+ expected = DatetimeIndex(
1291
+ ["2010-11-01 05:00", "2010-11-01 06:00", "2010-11-01 07:00"],
1292
+ freq="H",
1293
+ tz=tz,
1294
+ )
1295
+
1296
+ dates = tm.box_expected(dates, box_with_array)
1297
+ expected = tm.box_expected(expected, box_with_array)
1298
+
1299
+ for scalar in [pd.offsets.Hour(5), np.timedelta64(5, "h"), timedelta(hours=5)]:
1300
+ offset = dates + scalar
1301
+ tm.assert_equal(offset, expected)
1302
+ offset = scalar + dates
1303
+ tm.assert_equal(offset, expected)
1304
+
1305
+ roundtrip = offset - scalar
1306
+ tm.assert_equal(roundtrip, dates)
1307
+
1308
+ msg = "|".join(
1309
+ ["bad operand type for unary -", "cannot subtract DatetimeArray"]
1310
+ )
1311
+ with pytest.raises(TypeError, match=msg):
1312
+ scalar - dates
1313
+
1314
+ # -------------------------------------------------------------
1315
+ # RelativeDelta DateOffsets
1316
+
1317
+ def test_dt64arr_add_sub_relativedelta_offsets(self, box_with_array):
1318
+ # GH#10699
1319
+ vec = DatetimeIndex(
1320
+ [
1321
+ Timestamp("2000-01-05 00:15:00"),
1322
+ Timestamp("2000-01-31 00:23:00"),
1323
+ Timestamp("2000-01-01"),
1324
+ Timestamp("2000-03-31"),
1325
+ Timestamp("2000-02-29"),
1326
+ Timestamp("2000-12-31"),
1327
+ Timestamp("2000-05-15"),
1328
+ Timestamp("2001-06-15"),
1329
+ ]
1330
+ )
1331
+ vec = tm.box_expected(vec, box_with_array)
1332
+ vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
1333
+
1334
+ # DateOffset relativedelta fastpath
1335
+ relative_kwargs = [
1336
+ ("years", 2),
1337
+ ("months", 5),
1338
+ ("days", 3),
1339
+ ("hours", 5),
1340
+ ("minutes", 10),
1341
+ ("seconds", 2),
1342
+ ("microseconds", 5),
1343
+ ]
1344
+ for i, (unit, value) in enumerate(relative_kwargs):
1345
+ off = DateOffset(**{unit: value})
1346
+
1347
+ expected = DatetimeIndex([x + off for x in vec_items])
1348
+ expected = tm.box_expected(expected, box_with_array)
1349
+ tm.assert_equal(expected, vec + off)
1350
+
1351
+ expected = DatetimeIndex([x - off for x in vec_items])
1352
+ expected = tm.box_expected(expected, box_with_array)
1353
+ tm.assert_equal(expected, vec - off)
1354
+
1355
+ off = DateOffset(**dict(relative_kwargs[: i + 1]))
1356
+
1357
+ expected = DatetimeIndex([x + off for x in vec_items])
1358
+ expected = tm.box_expected(expected, box_with_array)
1359
+ tm.assert_equal(expected, vec + off)
1360
+
1361
+ expected = DatetimeIndex([x - off for x in vec_items])
1362
+ expected = tm.box_expected(expected, box_with_array)
1363
+ tm.assert_equal(expected, vec - off)
1364
+ msg = "(bad|unsupported) operand type for unary"
1365
+ with pytest.raises(TypeError, match=msg):
1366
+ off - vec
1367
+
1368
+ # -------------------------------------------------------------
1369
+ # Non-Tick, Non-RelativeDelta DateOffsets
1370
+
1371
+ # TODO: redundant with test_dt64arr_add_sub_DateOffset? that includes
1372
+ # tz-aware cases which this does not
1373
+ @pytest.mark.parametrize(
1374
+ "cls_and_kwargs",
1375
+ [
1376
+ "YearBegin",
1377
+ ("YearBegin", {"month": 5}),
1378
+ "YearEnd",
1379
+ ("YearEnd", {"month": 5}),
1380
+ "MonthBegin",
1381
+ "MonthEnd",
1382
+ "SemiMonthEnd",
1383
+ "SemiMonthBegin",
1384
+ "Week",
1385
+ ("Week", {"weekday": 3}),
1386
+ "Week",
1387
+ ("Week", {"weekday": 6}),
1388
+ "BusinessDay",
1389
+ "BDay",
1390
+ "QuarterEnd",
1391
+ "QuarterBegin",
1392
+ "CustomBusinessDay",
1393
+ "CDay",
1394
+ "CBMonthEnd",
1395
+ "CBMonthBegin",
1396
+ "BMonthBegin",
1397
+ "BMonthEnd",
1398
+ "BusinessHour",
1399
+ "BYearBegin",
1400
+ "BYearEnd",
1401
+ "BQuarterBegin",
1402
+ ("LastWeekOfMonth", {"weekday": 2}),
1403
+ (
1404
+ "FY5253Quarter",
1405
+ {
1406
+ "qtr_with_extra_week": 1,
1407
+ "startingMonth": 1,
1408
+ "weekday": 2,
1409
+ "variation": "nearest",
1410
+ },
1411
+ ),
1412
+ ("FY5253", {"weekday": 0, "startingMonth": 2, "variation": "nearest"}),
1413
+ ("WeekOfMonth", {"weekday": 2, "week": 2}),
1414
+ "Easter",
1415
+ ("DateOffset", {"day": 4}),
1416
+ ("DateOffset", {"month": 5}),
1417
+ ],
1418
+ )
1419
+ @pytest.mark.parametrize("normalize", [True, False])
1420
+ @pytest.mark.parametrize("n", [0, 5])
1421
+ def test_dt64arr_add_sub_DateOffsets(
1422
+ self, box_with_array, n, normalize, cls_and_kwargs
1423
+ ):
1424
+ # GH#10699
1425
+ # assert vectorized operation matches pointwise operations
1426
+
1427
+ if isinstance(cls_and_kwargs, tuple):
1428
+ # If cls_name param is a tuple, then 2nd entry is kwargs for
1429
+ # the offset constructor
1430
+ cls_name, kwargs = cls_and_kwargs
1431
+ else:
1432
+ cls_name = cls_and_kwargs
1433
+ kwargs = {}
1434
+
1435
+ if n == 0 and cls_name in [
1436
+ "WeekOfMonth",
1437
+ "LastWeekOfMonth",
1438
+ "FY5253Quarter",
1439
+ "FY5253",
1440
+ ]:
1441
+ # passing n = 0 is invalid for these offset classes
1442
+ return
1443
+
1444
+ vec = DatetimeIndex(
1445
+ [
1446
+ Timestamp("2000-01-05 00:15:00"),
1447
+ Timestamp("2000-01-31 00:23:00"),
1448
+ Timestamp("2000-01-01"),
1449
+ Timestamp("2000-03-31"),
1450
+ Timestamp("2000-02-29"),
1451
+ Timestamp("2000-12-31"),
1452
+ Timestamp("2000-05-15"),
1453
+ Timestamp("2001-06-15"),
1454
+ ]
1455
+ )
1456
+ vec = tm.box_expected(vec, box_with_array)
1457
+ vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
1458
+
1459
+ offset_cls = getattr(pd.offsets, cls_name)
1460
+
1461
+ with warnings.catch_warnings(record=True):
1462
+ # pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
1463
+ # applied to Series or DatetimeIndex
1464
+ # we aren't testing that here, so ignore.
1465
+ warnings.simplefilter("ignore", PerformanceWarning)
1466
+
1467
+ offset = offset_cls(n, normalize=normalize, **kwargs)
1468
+
1469
+ expected = DatetimeIndex([x + offset for x in vec_items])
1470
+ expected = tm.box_expected(expected, box_with_array)
1471
+ tm.assert_equal(expected, vec + offset)
1472
+
1473
+ expected = DatetimeIndex([x - offset for x in vec_items])
1474
+ expected = tm.box_expected(expected, box_with_array)
1475
+ tm.assert_equal(expected, vec - offset)
1476
+
1477
+ expected = DatetimeIndex([offset + x for x in vec_items])
1478
+ expected = tm.box_expected(expected, box_with_array)
1479
+ tm.assert_equal(expected, offset + vec)
1480
+ msg = "(bad|unsupported) operand type for unary"
1481
+ with pytest.raises(TypeError, match=msg):
1482
+ offset - vec
1483
+
1484
+ def test_dt64arr_add_sub_DateOffset(self, box_with_array):
1485
+ # GH#10699
1486
+ s = date_range("2000-01-01", "2000-01-31", name="a")
1487
+ s = tm.box_expected(s, box_with_array)
1488
+ result = s + DateOffset(years=1)
1489
+ result2 = DateOffset(years=1) + s
1490
+ exp = date_range("2001-01-01", "2001-01-31", name="a")._with_freq(None)
1491
+ exp = tm.box_expected(exp, box_with_array)
1492
+ tm.assert_equal(result, exp)
1493
+ tm.assert_equal(result2, exp)
1494
+
1495
+ result = s - DateOffset(years=1)
1496
+ exp = date_range("1999-01-01", "1999-01-31", name="a")._with_freq(None)
1497
+ exp = tm.box_expected(exp, box_with_array)
1498
+ tm.assert_equal(result, exp)
1499
+
1500
+ s = DatetimeIndex(
1501
+ [
1502
+ Timestamp("2000-01-15 00:15:00", tz="US/Central"),
1503
+ Timestamp("2000-02-15", tz="US/Central"),
1504
+ ],
1505
+ name="a",
1506
+ )
1507
+ s = tm.box_expected(s, box_with_array)
1508
+ result = s + pd.offsets.Day()
1509
+ result2 = pd.offsets.Day() + s
1510
+ exp = DatetimeIndex(
1511
+ [
1512
+ Timestamp("2000-01-16 00:15:00", tz="US/Central"),
1513
+ Timestamp("2000-02-16", tz="US/Central"),
1514
+ ],
1515
+ name="a",
1516
+ )
1517
+ exp = tm.box_expected(exp, box_with_array)
1518
+ tm.assert_equal(result, exp)
1519
+ tm.assert_equal(result2, exp)
1520
+
1521
+ s = DatetimeIndex(
1522
+ [
1523
+ Timestamp("2000-01-15 00:15:00", tz="US/Central"),
1524
+ Timestamp("2000-02-15", tz="US/Central"),
1525
+ ],
1526
+ name="a",
1527
+ )
1528
+ s = tm.box_expected(s, box_with_array)
1529
+ result = s + pd.offsets.MonthEnd()
1530
+ result2 = pd.offsets.MonthEnd() + s
1531
+ exp = DatetimeIndex(
1532
+ [
1533
+ Timestamp("2000-01-31 00:15:00", tz="US/Central"),
1534
+ Timestamp("2000-02-29", tz="US/Central"),
1535
+ ],
1536
+ name="a",
1537
+ )
1538
+ exp = tm.box_expected(exp, box_with_array)
1539
+ tm.assert_equal(result, exp)
1540
+ tm.assert_equal(result2, exp)
1541
+
1542
+ @pytest.mark.parametrize(
1543
+ "other",
1544
+ [
1545
+ np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)]),
1546
+ np.array([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()]),
1547
+ np.array( # matching offsets
1548
+ [pd.offsets.DateOffset(years=1), pd.offsets.DateOffset(years=1)]
1549
+ ),
1550
+ ],
1551
+ )
1552
+ @pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
1553
+ @pytest.mark.parametrize("box_other", [True, False])
1554
+ def test_dt64arr_add_sub_offset_array(
1555
+ self, tz_naive_fixture, box_with_array, box_other, op, other
1556
+ ):
1557
+ # GH#18849
1558
+ # GH#10699 array of offsets
1559
+
1560
+ tz = tz_naive_fixture
1561
+ dti = date_range("2017-01-01", periods=2, tz=tz)
1562
+ dtarr = tm.box_expected(dti, box_with_array)
1563
+
1564
+ other = np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)])
1565
+ expected = DatetimeIndex([op(dti[n], other[n]) for n in range(len(dti))])
1566
+ expected = tm.box_expected(expected, box_with_array).astype(object)
1567
+
1568
+ if box_other:
1569
+ other = tm.box_expected(other, box_with_array)
1570
+ if box_with_array is pd.array and op is roperator.radd:
1571
+ # We expect a PandasArray, not ndarray[object] here
1572
+ expected = pd.array(expected, dtype=object)
1573
+
1574
+ with tm.assert_produces_warning(PerformanceWarning):
1575
+ res = op(dtarr, other)
1576
+
1577
+ tm.assert_equal(res, expected)
1578
+
1579
+ @pytest.mark.parametrize(
1580
+ "op, offset, exp, exp_freq",
1581
+ [
1582
+ (
1583
+ "__add__",
1584
+ DateOffset(months=3, days=10),
1585
+ [
1586
+ Timestamp("2014-04-11"),
1587
+ Timestamp("2015-04-11"),
1588
+ Timestamp("2016-04-11"),
1589
+ Timestamp("2017-04-11"),
1590
+ ],
1591
+ None,
1592
+ ),
1593
+ (
1594
+ "__add__",
1595
+ DateOffset(months=3),
1596
+ [
1597
+ Timestamp("2014-04-01"),
1598
+ Timestamp("2015-04-01"),
1599
+ Timestamp("2016-04-01"),
1600
+ Timestamp("2017-04-01"),
1601
+ ],
1602
+ "AS-APR",
1603
+ ),
1604
+ (
1605
+ "__sub__",
1606
+ DateOffset(months=3, days=10),
1607
+ [
1608
+ Timestamp("2013-09-21"),
1609
+ Timestamp("2014-09-21"),
1610
+ Timestamp("2015-09-21"),
1611
+ Timestamp("2016-09-21"),
1612
+ ],
1613
+ None,
1614
+ ),
1615
+ (
1616
+ "__sub__",
1617
+ DateOffset(months=3),
1618
+ [
1619
+ Timestamp("2013-10-01"),
1620
+ Timestamp("2014-10-01"),
1621
+ Timestamp("2015-10-01"),
1622
+ Timestamp("2016-10-01"),
1623
+ ],
1624
+ "AS-OCT",
1625
+ ),
1626
+ ],
1627
+ )
1628
+ def test_dti_add_sub_nonzero_mth_offset(
1629
+ self, op, offset, exp, exp_freq, tz_aware_fixture, box_with_array
1630
+ ):
1631
+ # GH 26258
1632
+ tz = tz_aware_fixture
1633
+ date = date_range(start="01 Jan 2014", end="01 Jan 2017", freq="AS", tz=tz)
1634
+ date = tm.box_expected(date, box_with_array, False)
1635
+ mth = getattr(date, op)
1636
+ result = mth(offset)
1637
+
1638
+ expected = DatetimeIndex(exp, tz=tz)
1639
+ expected = tm.box_expected(expected, box_with_array, False)
1640
+ tm.assert_equal(result, expected)
1641
+
1642
+
1643
+ class TestDatetime64OverflowHandling:
1644
+ # TODO: box + de-duplicate
1645
+
1646
+ def test_dt64_overflow_masking(self, box_with_array):
1647
+ # GH#25317
1648
+ left = Series([Timestamp("1969-12-31")])
1649
+ right = Series([NaT])
1650
+
1651
+ left = tm.box_expected(left, box_with_array)
1652
+ right = tm.box_expected(right, box_with_array)
1653
+
1654
+ expected = TimedeltaIndex([NaT])
1655
+ expected = tm.box_expected(expected, box_with_array)
1656
+
1657
+ result = left - right
1658
+ tm.assert_equal(result, expected)
1659
+
1660
+ def test_dt64_series_arith_overflow(self):
1661
+ # GH#12534, fixed by GH#19024
1662
+ dt = Timestamp("1700-01-31")
1663
+ td = Timedelta("20000 Days")
1664
+ dti = date_range("1949-09-30", freq="100Y", periods=4)
1665
+ ser = Series(dti)
1666
+ msg = "Overflow in int64 addition"
1667
+ with pytest.raises(OverflowError, match=msg):
1668
+ ser - dt
1669
+ with pytest.raises(OverflowError, match=msg):
1670
+ dt - ser
1671
+ with pytest.raises(OverflowError, match=msg):
1672
+ ser + td
1673
+ with pytest.raises(OverflowError, match=msg):
1674
+ td + ser
1675
+
1676
+ ser.iloc[-1] = NaT
1677
+ expected = Series(
1678
+ ["2004-10-03", "2104-10-04", "2204-10-04", "NaT"], dtype="datetime64[ns]"
1679
+ )
1680
+ res = ser + td
1681
+ tm.assert_series_equal(res, expected)
1682
+ res = td + ser
1683
+ tm.assert_series_equal(res, expected)
1684
+
1685
+ ser.iloc[1:] = NaT
1686
+ expected = Series(["91279 Days", "NaT", "NaT", "NaT"], dtype="timedelta64[ns]")
1687
+ res = ser - dt
1688
+ tm.assert_series_equal(res, expected)
1689
+ res = dt - ser
1690
+ tm.assert_series_equal(res, -expected)
1691
+
1692
+ def test_datetimeindex_sub_timestamp_overflow(self):
1693
+ dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max])
1694
+ dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min])
1695
+
1696
+ tsneg = Timestamp("1950-01-01").as_unit("ns")
1697
+ ts_neg_variants = [
1698
+ tsneg,
1699
+ tsneg.to_pydatetime(),
1700
+ tsneg.to_datetime64().astype("datetime64[ns]"),
1701
+ tsneg.to_datetime64().astype("datetime64[D]"),
1702
+ ]
1703
+
1704
+ tspos = Timestamp("1980-01-01").as_unit("ns")
1705
+ ts_pos_variants = [
1706
+ tspos,
1707
+ tspos.to_pydatetime(),
1708
+ tspos.to_datetime64().astype("datetime64[ns]"),
1709
+ tspos.to_datetime64().astype("datetime64[D]"),
1710
+ ]
1711
+ msg = "Overflow in int64 addition"
1712
+ for variant in ts_neg_variants:
1713
+ with pytest.raises(OverflowError, match=msg):
1714
+ dtimax - variant
1715
+
1716
+ expected = Timestamp.max._value - tspos._value
1717
+ for variant in ts_pos_variants:
1718
+ res = dtimax - variant
1719
+ assert res[1]._value == expected
1720
+
1721
+ expected = Timestamp.min._value - tsneg._value
1722
+ for variant in ts_neg_variants:
1723
+ res = dtimin - variant
1724
+ assert res[1]._value == expected
1725
+
1726
+ for variant in ts_pos_variants:
1727
+ with pytest.raises(OverflowError, match=msg):
1728
+ dtimin - variant
1729
+
1730
+ def test_datetimeindex_sub_datetimeindex_overflow(self):
1731
+ # GH#22492, GH#22508
1732
+ dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max])
1733
+ dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min])
1734
+
1735
+ ts_neg = pd.to_datetime(["1950-01-01", "1950-01-01"])
1736
+ ts_pos = pd.to_datetime(["1980-01-01", "1980-01-01"])
1737
+
1738
+ # General tests
1739
+ expected = Timestamp.max._value - ts_pos[1]._value
1740
+ result = dtimax - ts_pos
1741
+ assert result[1]._value == expected
1742
+
1743
+ expected = Timestamp.min._value - ts_neg[1]._value
1744
+ result = dtimin - ts_neg
1745
+ assert result[1]._value == expected
1746
+ msg = "Overflow in int64 addition"
1747
+ with pytest.raises(OverflowError, match=msg):
1748
+ dtimax - ts_neg
1749
+
1750
+ with pytest.raises(OverflowError, match=msg):
1751
+ dtimin - ts_pos
1752
+
1753
+ # Edge cases
1754
+ tmin = pd.to_datetime([Timestamp.min])
1755
+ t1 = tmin + Timedelta.max + Timedelta("1us")
1756
+ with pytest.raises(OverflowError, match=msg):
1757
+ t1 - tmin
1758
+
1759
+ tmax = pd.to_datetime([Timestamp.max])
1760
+ t2 = tmax + Timedelta.min - Timedelta("1us")
1761
+ with pytest.raises(OverflowError, match=msg):
1762
+ tmax - t2
1763
+
1764
+
1765
+ class TestTimestampSeriesArithmetic:
1766
+ def test_empty_series_add_sub(self, box_with_array):
1767
+ # GH#13844
1768
+ a = Series(dtype="M8[ns]")
1769
+ b = Series(dtype="m8[ns]")
1770
+ a = box_with_array(a)
1771
+ b = box_with_array(b)
1772
+ tm.assert_equal(a, a + b)
1773
+ tm.assert_equal(a, a - b)
1774
+ tm.assert_equal(a, b + a)
1775
+ msg = "cannot subtract"
1776
+ with pytest.raises(TypeError, match=msg):
1777
+ b - a
1778
+
1779
+ def test_operators_datetimelike(self):
1780
+ # ## timedelta64 ###
1781
+ td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
1782
+ td1.iloc[2] = np.nan
1783
+
1784
+ # ## datetime64 ###
1785
+ dt1 = Series(
1786
+ [
1787
+ Timestamp("20111230"),
1788
+ Timestamp("20120101"),
1789
+ Timestamp("20120103"),
1790
+ ]
1791
+ )
1792
+ dt1.iloc[2] = np.nan
1793
+ dt2 = Series(
1794
+ [
1795
+ Timestamp("20111231"),
1796
+ Timestamp("20120102"),
1797
+ Timestamp("20120104"),
1798
+ ]
1799
+ )
1800
+ dt1 - dt2
1801
+ dt2 - dt1
1802
+
1803
+ # datetime64 with timetimedelta
1804
+ dt1 + td1
1805
+ td1 + dt1
1806
+ dt1 - td1
1807
+
1808
+ # timetimedelta with datetime64
1809
+ td1 + dt1
1810
+ dt1 + td1
1811
+
1812
+ def test_dt64ser_sub_datetime_dtype(self):
1813
+ ts = Timestamp(datetime(1993, 1, 7, 13, 30, 00))
1814
+ dt = datetime(1993, 6, 22, 13, 30)
1815
+ ser = Series([ts])
1816
+ result = pd.to_timedelta(np.abs(ser - dt))
1817
+ assert result.dtype == "timedelta64[ns]"
1818
+
1819
+ # -------------------------------------------------------------
1820
+ # TODO: This next block of tests came from tests.series.test_operators,
1821
+ # needs to be de-duplicated and parametrized over `box` classes
1822
+
1823
+ @pytest.mark.parametrize(
1824
+ "left, right, op_fail",
1825
+ [
1826
+ [
1827
+ [Timestamp("20111230"), Timestamp("20120101"), NaT],
1828
+ [Timestamp("20111231"), Timestamp("20120102"), Timestamp("20120104")],
1829
+ ["__sub__", "__rsub__"],
1830
+ ],
1831
+ [
1832
+ [Timestamp("20111230"), Timestamp("20120101"), NaT],
1833
+ [timedelta(minutes=5, seconds=3), timedelta(minutes=5, seconds=3), NaT],
1834
+ ["__add__", "__radd__", "__sub__"],
1835
+ ],
1836
+ [
1837
+ [
1838
+ Timestamp("20111230", tz="US/Eastern"),
1839
+ Timestamp("20111230", tz="US/Eastern"),
1840
+ NaT,
1841
+ ],
1842
+ [timedelta(minutes=5, seconds=3), NaT, timedelta(minutes=5, seconds=3)],
1843
+ ["__add__", "__radd__", "__sub__"],
1844
+ ],
1845
+ ],
1846
+ )
1847
+ def test_operators_datetimelike_invalid(
1848
+ self, left, right, op_fail, all_arithmetic_operators
1849
+ ):
1850
+ # these are all TypeError ops
1851
+ op_str = all_arithmetic_operators
1852
+ arg1 = Series(left)
1853
+ arg2 = Series(right)
1854
+ # check that we are getting a TypeError
1855
+ # with 'operate' (from core/ops.py) for the ops that are not
1856
+ # defined
1857
+ op = getattr(arg1, op_str, None)
1858
+ # Previously, _validate_for_numeric_binop in core/indexes/base.py
1859
+ # did this for us.
1860
+ if op_str not in op_fail:
1861
+ with pytest.raises(
1862
+ TypeError, match="operate|[cC]annot|unsupported operand"
1863
+ ):
1864
+ op(arg2)
1865
+ else:
1866
+ # Smoke test
1867
+ op(arg2)
1868
+
1869
+ def test_sub_single_tz(self):
1870
+ # GH#12290
1871
+ s1 = Series([Timestamp("2016-02-10", tz="America/Sao_Paulo")])
1872
+ s2 = Series([Timestamp("2016-02-08", tz="America/Sao_Paulo")])
1873
+ result = s1 - s2
1874
+ expected = Series([Timedelta("2days")])
1875
+ tm.assert_series_equal(result, expected)
1876
+ result = s2 - s1
1877
+ expected = Series([Timedelta("-2days")])
1878
+ tm.assert_series_equal(result, expected)
1879
+
1880
+ def test_dt64tz_series_sub_dtitz(self):
1881
+ # GH#19071 subtracting tzaware DatetimeIndex from tzaware Series
1882
+ # (with same tz) raises, fixed by #19024
1883
+ dti = date_range("1999-09-30", periods=10, tz="US/Pacific")
1884
+ ser = Series(dti)
1885
+ expected = Series(TimedeltaIndex(["0days"] * 10))
1886
+
1887
+ res = dti - ser
1888
+ tm.assert_series_equal(res, expected)
1889
+ res = ser - dti
1890
+ tm.assert_series_equal(res, expected)
1891
+
1892
+ def test_sub_datetime_compat(self):
1893
+ # see GH#14088
1894
+ s = Series([datetime(2016, 8, 23, 12, tzinfo=pytz.utc), NaT])
1895
+ dt = datetime(2016, 8, 22, 12, tzinfo=pytz.utc)
1896
+ exp = Series([Timedelta("1 days"), NaT])
1897
+ tm.assert_series_equal(s - dt, exp)
1898
+ tm.assert_series_equal(s - Timestamp(dt), exp)
1899
+
1900
+ def test_dt64_series_add_mixed_tick_DateOffset(self):
1901
+ # GH#4532
1902
+ # operate with pd.offsets
1903
+ s = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
1904
+
1905
+ result = s + pd.offsets.Milli(5)
1906
+ result2 = pd.offsets.Milli(5) + s
1907
+ expected = Series(
1908
+ [Timestamp("20130101 9:01:00.005"), Timestamp("20130101 9:02:00.005")]
1909
+ )
1910
+ tm.assert_series_equal(result, expected)
1911
+ tm.assert_series_equal(result2, expected)
1912
+
1913
+ result = s + pd.offsets.Minute(5) + pd.offsets.Milli(5)
1914
+ expected = Series(
1915
+ [Timestamp("20130101 9:06:00.005"), Timestamp("20130101 9:07:00.005")]
1916
+ )
1917
+ tm.assert_series_equal(result, expected)
1918
+
1919
+ def test_datetime64_ops_nat(self):
1920
+ # GH#11349
1921
+ datetime_series = Series([NaT, Timestamp("19900315")])
1922
+ nat_series_dtype_timestamp = Series([NaT, NaT], dtype="datetime64[ns]")
1923
+ single_nat_dtype_datetime = Series([NaT], dtype="datetime64[ns]")
1924
+
1925
+ # subtraction
1926
+ tm.assert_series_equal(-NaT + datetime_series, nat_series_dtype_timestamp)
1927
+ msg = "bad operand type for unary -: 'DatetimeArray'"
1928
+ with pytest.raises(TypeError, match=msg):
1929
+ -single_nat_dtype_datetime + datetime_series
1930
+
1931
+ tm.assert_series_equal(
1932
+ -NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
1933
+ )
1934
+ with pytest.raises(TypeError, match=msg):
1935
+ -single_nat_dtype_datetime + nat_series_dtype_timestamp
1936
+
1937
+ # addition
1938
+ tm.assert_series_equal(
1939
+ nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
1940
+ )
1941
+ tm.assert_series_equal(
1942
+ NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
1943
+ )
1944
+
1945
+ tm.assert_series_equal(
1946
+ nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
1947
+ )
1948
+ tm.assert_series_equal(
1949
+ NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
1950
+ )
1951
+
1952
+ # -------------------------------------------------------------
1953
+ # Timezone-Centric Tests
1954
+
1955
+ def test_operators_datetimelike_with_timezones(self):
1956
+ tz = "US/Eastern"
1957
+ dt1 = Series(date_range("2000-01-01 09:00:00", periods=5, tz=tz), name="foo")
1958
+ dt2 = dt1.copy()
1959
+ dt2.iloc[2] = np.nan
1960
+
1961
+ td1 = Series(pd.timedelta_range("1 days 1 min", periods=5, freq="H"))
1962
+ td2 = td1.copy()
1963
+ td2.iloc[1] = np.nan
1964
+ assert td2._values.freq is None
1965
+
1966
+ result = dt1 + td1[0]
1967
+ exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
1968
+ tm.assert_series_equal(result, exp)
1969
+
1970
+ result = dt2 + td2[0]
1971
+ exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
1972
+ tm.assert_series_equal(result, exp)
1973
+
1974
+ # odd numpy behavior with scalar timedeltas
1975
+ result = td1[0] + dt1
1976
+ exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
1977
+ tm.assert_series_equal(result, exp)
1978
+
1979
+ result = td2[0] + dt2
1980
+ exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
1981
+ tm.assert_series_equal(result, exp)
1982
+
1983
+ result = dt1 - td1[0]
1984
+ exp = (dt1.dt.tz_localize(None) - td1[0]).dt.tz_localize(tz)
1985
+ tm.assert_series_equal(result, exp)
1986
+ msg = "(bad|unsupported) operand type for unary"
1987
+ with pytest.raises(TypeError, match=msg):
1988
+ td1[0] - dt1
1989
+
1990
+ result = dt2 - td2[0]
1991
+ exp = (dt2.dt.tz_localize(None) - td2[0]).dt.tz_localize(tz)
1992
+ tm.assert_series_equal(result, exp)
1993
+ with pytest.raises(TypeError, match=msg):
1994
+ td2[0] - dt2
1995
+
1996
+ result = dt1 + td1
1997
+ exp = (dt1.dt.tz_localize(None) + td1).dt.tz_localize(tz)
1998
+ tm.assert_series_equal(result, exp)
1999
+
2000
+ result = dt2 + td2
2001
+ exp = (dt2.dt.tz_localize(None) + td2).dt.tz_localize(tz)
2002
+ tm.assert_series_equal(result, exp)
2003
+
2004
+ result = dt1 - td1
2005
+ exp = (dt1.dt.tz_localize(None) - td1).dt.tz_localize(tz)
2006
+ tm.assert_series_equal(result, exp)
2007
+
2008
+ result = dt2 - td2
2009
+ exp = (dt2.dt.tz_localize(None) - td2).dt.tz_localize(tz)
2010
+ tm.assert_series_equal(result, exp)
2011
+ msg = "cannot (add|subtract)"
2012
+ with pytest.raises(TypeError, match=msg):
2013
+ td1 - dt1
2014
+ with pytest.raises(TypeError, match=msg):
2015
+ td2 - dt2
2016
+
2017
+
2018
+ class TestDatetimeIndexArithmetic:
2019
+ # -------------------------------------------------------------
2020
+ # Binary operations DatetimeIndex and TimedeltaIndex/array
2021
+
2022
+ def test_dti_add_tdi(self, tz_naive_fixture):
2023
+ # GH#17558
2024
+ tz = tz_naive_fixture
2025
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2026
+ tdi = pd.timedelta_range("0 days", periods=10)
2027
+ expected = date_range("2017-01-01", periods=10, tz=tz)
2028
+ expected = expected._with_freq(None)
2029
+
2030
+ # add with TimedeltaIndex
2031
+ result = dti + tdi
2032
+ tm.assert_index_equal(result, expected)
2033
+
2034
+ result = tdi + dti
2035
+ tm.assert_index_equal(result, expected)
2036
+
2037
+ # add with timedelta64 array
2038
+ result = dti + tdi.values
2039
+ tm.assert_index_equal(result, expected)
2040
+
2041
+ result = tdi.values + dti
2042
+ tm.assert_index_equal(result, expected)
2043
+
2044
+ def test_dti_iadd_tdi(self, tz_naive_fixture):
2045
+ # GH#17558
2046
+ tz = tz_naive_fixture
2047
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2048
+ tdi = pd.timedelta_range("0 days", periods=10)
2049
+ expected = date_range("2017-01-01", periods=10, tz=tz)
2050
+ expected = expected._with_freq(None)
2051
+
2052
+ # iadd with TimedeltaIndex
2053
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2054
+ result += tdi
2055
+ tm.assert_index_equal(result, expected)
2056
+
2057
+ result = pd.timedelta_range("0 days", periods=10)
2058
+ result += dti
2059
+ tm.assert_index_equal(result, expected)
2060
+
2061
+ # iadd with timedelta64 array
2062
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2063
+ result += tdi.values
2064
+ tm.assert_index_equal(result, expected)
2065
+
2066
+ result = pd.timedelta_range("0 days", periods=10)
2067
+ result += dti
2068
+ tm.assert_index_equal(result, expected)
2069
+
2070
+ def test_dti_sub_tdi(self, tz_naive_fixture):
2071
+ # GH#17558
2072
+ tz = tz_naive_fixture
2073
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2074
+ tdi = pd.timedelta_range("0 days", periods=10)
2075
+ expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D")
2076
+ expected = expected._with_freq(None)
2077
+
2078
+ # sub with TimedeltaIndex
2079
+ result = dti - tdi
2080
+ tm.assert_index_equal(result, expected)
2081
+
2082
+ msg = "cannot subtract .*TimedeltaArray"
2083
+ with pytest.raises(TypeError, match=msg):
2084
+ tdi - dti
2085
+
2086
+ # sub with timedelta64 array
2087
+ result = dti - tdi.values
2088
+ tm.assert_index_equal(result, expected)
2089
+
2090
+ msg = "cannot subtract a datelike from a TimedeltaArray"
2091
+ with pytest.raises(TypeError, match=msg):
2092
+ tdi.values - dti
2093
+
2094
+ def test_dti_isub_tdi(self, tz_naive_fixture):
2095
+ # GH#17558
2096
+ tz = tz_naive_fixture
2097
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2098
+ tdi = pd.timedelta_range("0 days", periods=10)
2099
+ expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D")
2100
+ expected = expected._with_freq(None)
2101
+
2102
+ # isub with TimedeltaIndex
2103
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2104
+ result -= tdi
2105
+ tm.assert_index_equal(result, expected)
2106
+
2107
+ # DTA.__isub__ GH#43904
2108
+ dta = dti._data.copy()
2109
+ dta -= tdi
2110
+ tm.assert_datetime_array_equal(dta, expected._data)
2111
+
2112
+ out = dti._data.copy()
2113
+ np.subtract(out, tdi, out=out)
2114
+ tm.assert_datetime_array_equal(out, expected._data)
2115
+
2116
+ msg = "cannot subtract a datelike from a TimedeltaArray"
2117
+ with pytest.raises(TypeError, match=msg):
2118
+ tdi -= dti
2119
+
2120
+ # isub with timedelta64 array
2121
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2122
+ result -= tdi.values
2123
+ tm.assert_index_equal(result, expected)
2124
+
2125
+ with pytest.raises(TypeError, match=msg):
2126
+ tdi.values -= dti
2127
+
2128
+ with pytest.raises(TypeError, match=msg):
2129
+ tdi._values -= dti
2130
+
2131
+ # -------------------------------------------------------------
2132
+ # Binary Operations DatetimeIndex and datetime-like
2133
+ # TODO: A couple other tests belong in this section. Move them in
2134
+ # A PR where there isn't already a giant diff.
2135
+
2136
+ # -------------------------------------------------------------
2137
+
2138
+ def test_dta_add_sub_index(self, tz_naive_fixture):
2139
+ # Check that DatetimeArray defers to Index classes
2140
+ dti = date_range("20130101", periods=3, tz=tz_naive_fixture)
2141
+ dta = dti.array
2142
+ result = dta - dti
2143
+ expected = dti - dti
2144
+ tm.assert_index_equal(result, expected)
2145
+
2146
+ tdi = result
2147
+ result = dta + tdi
2148
+ expected = dti + tdi
2149
+ tm.assert_index_equal(result, expected)
2150
+
2151
+ result = dta - tdi
2152
+ expected = dti - tdi
2153
+ tm.assert_index_equal(result, expected)
2154
+
2155
+ def test_sub_dti_dti(self):
2156
+ # previously performed setop (deprecated in 0.16.0), now changed to
2157
+ # return subtraction -> TimeDeltaIndex (GH ...)
2158
+
2159
+ dti = date_range("20130101", periods=3)
2160
+ dti_tz = date_range("20130101", periods=3).tz_localize("US/Eastern")
2161
+ expected = TimedeltaIndex([0, 0, 0])
2162
+
2163
+ result = dti - dti
2164
+ tm.assert_index_equal(result, expected)
2165
+
2166
+ result = dti_tz - dti_tz
2167
+ tm.assert_index_equal(result, expected)
2168
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
2169
+ with pytest.raises(TypeError, match=msg):
2170
+ dti_tz - dti
2171
+
2172
+ with pytest.raises(TypeError, match=msg):
2173
+ dti - dti_tz
2174
+
2175
+ # isub
2176
+ dti -= dti
2177
+ tm.assert_index_equal(dti, expected)
2178
+
2179
+ # different length raises ValueError
2180
+ dti1 = date_range("20130101", periods=3)
2181
+ dti2 = date_range("20130101", periods=4)
2182
+ msg = "cannot add indices of unequal length"
2183
+ with pytest.raises(ValueError, match=msg):
2184
+ dti1 - dti2
2185
+
2186
+ # NaN propagation
2187
+ dti1 = DatetimeIndex(["2012-01-01", np.nan, "2012-01-03"])
2188
+ dti2 = DatetimeIndex(["2012-01-02", "2012-01-03", np.nan])
2189
+ expected = TimedeltaIndex(["1 days", np.nan, np.nan])
2190
+ result = dti2 - dti1
2191
+ tm.assert_index_equal(result, expected)
2192
+
2193
+ # -------------------------------------------------------------------
2194
+ # TODO: Most of this block is moved from series or frame tests, needs
2195
+ # cleanup, box-parametrization, and de-duplication
2196
+
2197
+ @pytest.mark.parametrize("op", [operator.add, operator.sub])
2198
+ def test_timedelta64_equal_timedelta_supported_ops(self, op, box_with_array):
2199
+ ser = Series(
2200
+ [
2201
+ Timestamp("20130301"),
2202
+ Timestamp("20130228 23:00:00"),
2203
+ Timestamp("20130228 22:00:00"),
2204
+ Timestamp("20130228 21:00:00"),
2205
+ ]
2206
+ )
2207
+ obj = box_with_array(ser)
2208
+
2209
+ intervals = ["D", "h", "m", "s", "us"]
2210
+
2211
+ def timedelta64(*args):
2212
+ # see casting notes in NumPy gh-12927
2213
+ return np.sum(list(starmap(np.timedelta64, zip(args, intervals))))
2214
+
2215
+ for d, h, m, s, us in product(*([range(2)] * 5)):
2216
+ nptd = timedelta64(d, h, m, s, us)
2217
+ pytd = timedelta(days=d, hours=h, minutes=m, seconds=s, microseconds=us)
2218
+ lhs = op(obj, nptd)
2219
+ rhs = op(obj, pytd)
2220
+
2221
+ tm.assert_equal(lhs, rhs)
2222
+
2223
+ def test_ops_nat_mixed_datetime64_timedelta64(self):
2224
+ # GH#11349
2225
+ timedelta_series = Series([NaT, Timedelta("1s")])
2226
+ datetime_series = Series([NaT, Timestamp("19900315")])
2227
+ nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[ns]")
2228
+ nat_series_dtype_timestamp = Series([NaT, NaT], dtype="datetime64[ns]")
2229
+ single_nat_dtype_datetime = Series([NaT], dtype="datetime64[ns]")
2230
+ single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[ns]")
2231
+
2232
+ # subtraction
2233
+ tm.assert_series_equal(
2234
+ datetime_series - single_nat_dtype_datetime, nat_series_dtype_timedelta
2235
+ )
2236
+
2237
+ tm.assert_series_equal(
2238
+ datetime_series - single_nat_dtype_timedelta, nat_series_dtype_timestamp
2239
+ )
2240
+ tm.assert_series_equal(
2241
+ -single_nat_dtype_timedelta + datetime_series, nat_series_dtype_timestamp
2242
+ )
2243
+
2244
+ # without a Series wrapping the NaT, it is ambiguous
2245
+ # whether it is a datetime64 or timedelta64
2246
+ # defaults to interpreting it as timedelta64
2247
+ tm.assert_series_equal(
2248
+ nat_series_dtype_timestamp - single_nat_dtype_datetime,
2249
+ nat_series_dtype_timedelta,
2250
+ )
2251
+
2252
+ tm.assert_series_equal(
2253
+ nat_series_dtype_timestamp - single_nat_dtype_timedelta,
2254
+ nat_series_dtype_timestamp,
2255
+ )
2256
+ tm.assert_series_equal(
2257
+ -single_nat_dtype_timedelta + nat_series_dtype_timestamp,
2258
+ nat_series_dtype_timestamp,
2259
+ )
2260
+ msg = "cannot subtract a datelike"
2261
+ with pytest.raises(TypeError, match=msg):
2262
+ timedelta_series - single_nat_dtype_datetime
2263
+
2264
+ # addition
2265
+ tm.assert_series_equal(
2266
+ nat_series_dtype_timestamp + single_nat_dtype_timedelta,
2267
+ nat_series_dtype_timestamp,
2268
+ )
2269
+ tm.assert_series_equal(
2270
+ single_nat_dtype_timedelta + nat_series_dtype_timestamp,
2271
+ nat_series_dtype_timestamp,
2272
+ )
2273
+
2274
+ tm.assert_series_equal(
2275
+ nat_series_dtype_timestamp + single_nat_dtype_timedelta,
2276
+ nat_series_dtype_timestamp,
2277
+ )
2278
+ tm.assert_series_equal(
2279
+ single_nat_dtype_timedelta + nat_series_dtype_timestamp,
2280
+ nat_series_dtype_timestamp,
2281
+ )
2282
+
2283
+ tm.assert_series_equal(
2284
+ nat_series_dtype_timedelta + single_nat_dtype_datetime,
2285
+ nat_series_dtype_timestamp,
2286
+ )
2287
+ tm.assert_series_equal(
2288
+ single_nat_dtype_datetime + nat_series_dtype_timedelta,
2289
+ nat_series_dtype_timestamp,
2290
+ )
2291
+
2292
+ def test_ufunc_coercions(self):
2293
+ idx = date_range("2011-01-01", periods=3, freq="2D", name="x")
2294
+
2295
+ delta = np.timedelta64(1, "D")
2296
+ exp = date_range("2011-01-02", periods=3, freq="2D", name="x")
2297
+ for result in [idx + delta, np.add(idx, delta)]:
2298
+ assert isinstance(result, DatetimeIndex)
2299
+ tm.assert_index_equal(result, exp)
2300
+ assert result.freq == "2D"
2301
+
2302
+ exp = date_range("2010-12-31", periods=3, freq="2D", name="x")
2303
+
2304
+ for result in [idx - delta, np.subtract(idx, delta)]:
2305
+ assert isinstance(result, DatetimeIndex)
2306
+ tm.assert_index_equal(result, exp)
2307
+ assert result.freq == "2D"
2308
+
2309
+ # When adding/subtracting an ndarray (which has no .freq), the result
2310
+ # does not infer freq
2311
+ idx = idx._with_freq(None)
2312
+ delta = np.array(
2313
+ [np.timedelta64(1, "D"), np.timedelta64(2, "D"), np.timedelta64(3, "D")]
2314
+ )
2315
+ exp = DatetimeIndex(["2011-01-02", "2011-01-05", "2011-01-08"], name="x")
2316
+
2317
+ for result in [idx + delta, np.add(idx, delta)]:
2318
+ tm.assert_index_equal(result, exp)
2319
+ assert result.freq == exp.freq
2320
+
2321
+ exp = DatetimeIndex(["2010-12-31", "2011-01-01", "2011-01-02"], name="x")
2322
+ for result in [idx - delta, np.subtract(idx, delta)]:
2323
+ assert isinstance(result, DatetimeIndex)
2324
+ tm.assert_index_equal(result, exp)
2325
+ assert result.freq == exp.freq
2326
+
2327
+ def test_dti_add_series(self, tz_naive_fixture, names):
2328
+ # GH#13905
2329
+ tz = tz_naive_fixture
2330
+ index = DatetimeIndex(
2331
+ ["2016-06-28 05:30", "2016-06-28 05:31"], tz=tz, name=names[0]
2332
+ )
2333
+ ser = Series([Timedelta(seconds=5)] * 2, index=index, name=names[1])
2334
+ expected = Series(index + Timedelta(seconds=5), index=index, name=names[2])
2335
+
2336
+ # passing name arg isn't enough when names[2] is None
2337
+ expected.name = names[2]
2338
+ assert expected.dtype == index.dtype
2339
+ result = ser + index
2340
+ tm.assert_series_equal(result, expected)
2341
+ result2 = index + ser
2342
+ tm.assert_series_equal(result2, expected)
2343
+
2344
+ expected = index + Timedelta(seconds=5)
2345
+ result3 = ser.values + index
2346
+ tm.assert_index_equal(result3, expected)
2347
+ result4 = index + ser.values
2348
+ tm.assert_index_equal(result4, expected)
2349
+
2350
+ @pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
2351
+ def test_dti_addsub_offset_arraylike(
2352
+ self, tz_naive_fixture, names, op, index_or_series
2353
+ ):
2354
+ # GH#18849, GH#19744
2355
+ other_box = index_or_series
2356
+
2357
+ tz = tz_naive_fixture
2358
+ dti = date_range("2017-01-01", periods=2, tz=tz, name=names[0])
2359
+ other = other_box([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
2360
+
2361
+ xbox = get_upcast_box(dti, other)
2362
+
2363
+ with tm.assert_produces_warning(PerformanceWarning):
2364
+ res = op(dti, other)
2365
+
2366
+ expected = DatetimeIndex(
2367
+ [op(dti[n], other[n]) for n in range(len(dti))], name=names[2], freq="infer"
2368
+ )
2369
+ expected = tm.box_expected(expected, xbox).astype(object)
2370
+ tm.assert_equal(res, expected)
2371
+
2372
+ @pytest.mark.parametrize("other_box", [pd.Index, np.array])
2373
+ def test_dti_addsub_object_arraylike(
2374
+ self, tz_naive_fixture, box_with_array, other_box
2375
+ ):
2376
+ tz = tz_naive_fixture
2377
+
2378
+ dti = date_range("2017-01-01", periods=2, tz=tz)
2379
+ dtarr = tm.box_expected(dti, box_with_array)
2380
+ other = other_box([pd.offsets.MonthEnd(), Timedelta(days=4)])
2381
+ xbox = get_upcast_box(dtarr, other)
2382
+
2383
+ expected = DatetimeIndex(["2017-01-31", "2017-01-06"], tz=tz_naive_fixture)
2384
+ expected = tm.box_expected(expected, xbox).astype(object)
2385
+
2386
+ with tm.assert_produces_warning(PerformanceWarning):
2387
+ result = dtarr + other
2388
+ tm.assert_equal(result, expected)
2389
+
2390
+ expected = DatetimeIndex(["2016-12-31", "2016-12-29"], tz=tz_naive_fixture)
2391
+ expected = tm.box_expected(expected, xbox).astype(object)
2392
+
2393
+ with tm.assert_produces_warning(PerformanceWarning):
2394
+ result = dtarr - other
2395
+ tm.assert_equal(result, expected)
2396
+
2397
+
2398
+ @pytest.mark.parametrize("years", [-1, 0, 1])
2399
+ @pytest.mark.parametrize("months", [-2, 0, 2])
2400
+ def test_shift_months(years, months):
2401
+ dti = DatetimeIndex(
2402
+ [
2403
+ Timestamp("2000-01-05 00:15:00"),
2404
+ Timestamp("2000-01-31 00:23:00"),
2405
+ Timestamp("2000-01-01"),
2406
+ Timestamp("2000-02-29"),
2407
+ Timestamp("2000-12-31"),
2408
+ ]
2409
+ )
2410
+ actual = DatetimeIndex(shift_months(dti.asi8, years * 12 + months))
2411
+
2412
+ raw = [x + pd.offsets.DateOffset(years=years, months=months) for x in dti]
2413
+ expected = DatetimeIndex(raw)
2414
+ tm.assert_index_equal(actual, expected)
2415
+
2416
+
2417
+ def test_dt64arr_addsub_object_dtype_2d():
2418
+ # block-wise DataFrame operations will require operating on 2D
2419
+ # DatetimeArray/TimedeltaArray, so check that specifically.
2420
+ dti = date_range("1994-02-13", freq="2W", periods=4)
2421
+ dta = dti._data.reshape((4, 1))
2422
+
2423
+ other = np.array([[pd.offsets.Day(n)] for n in range(4)])
2424
+ assert other.shape == dta.shape
2425
+
2426
+ with tm.assert_produces_warning(PerformanceWarning):
2427
+ result = dta + other
2428
+ with tm.assert_produces_warning(PerformanceWarning):
2429
+ expected = (dta[:, 0] + other[:, 0]).reshape(-1, 1)
2430
+
2431
+ tm.assert_numpy_array_equal(result, expected)
2432
+
2433
+ with tm.assert_produces_warning(PerformanceWarning):
2434
+ # Case where we expect to get a TimedeltaArray back
2435
+ result2 = dta - dta.astype(object)
2436
+
2437
+ assert result2.shape == (4, 1)
2438
+ assert all(td._value == 0 for td in result2.ravel())
2439
+
2440
+
2441
+ def test_non_nano_dt64_addsub_np_nat_scalars():
2442
+ # GH 52295
2443
+ ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
2444
+ result = ser - np.datetime64("nat", "ms")
2445
+ expected = Series([NaT] * 3, dtype="timedelta64[ms]")
2446
+ tm.assert_series_equal(result, expected)
2447
+
2448
+ result = ser + np.timedelta64("nat", "ms")
2449
+ expected = Series([NaT] * 3, dtype="datetime64[ms]")
2450
+ tm.assert_series_equal(result, expected)
2451
+
2452
+
2453
+ def test_non_nano_dt64_addsub_np_nat_scalars_unitless():
2454
+ # GH 52295
2455
+ # TODO: Can we default to the ser unit?
2456
+ ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
2457
+ result = ser - np.datetime64("nat")
2458
+ expected = Series([NaT] * 3, dtype="timedelta64[ns]")
2459
+ tm.assert_series_equal(result, expected)
2460
+
2461
+ result = ser + np.timedelta64("nat")
2462
+ expected = Series([NaT] * 3, dtype="datetime64[ns]")
2463
+ tm.assert_series_equal(result, expected)
2464
+
2465
+
2466
+ def test_non_nano_dt64_addsub_np_nat_scalars_unsupported_unit():
2467
+ # GH 52295
2468
+ ser = Series([12332, 23243, 33243], dtype="datetime64[s]")
2469
+ result = ser - np.datetime64("nat", "D")
2470
+ expected = Series([NaT] * 3, dtype="timedelta64[s]")
2471
+ tm.assert_series_equal(result, expected)
2472
+
2473
+ result = ser + np.timedelta64("nat", "D")
2474
+ expected = Series([NaT] * 3, dtype="datetime64[s]")
2475
+ tm.assert_series_equal(result, expected)
videochat2/lib/python3.10/site-packages/pandas/tests/arithmetic/test_interval.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.core.dtypes.common import is_list_like
7
+
8
+ import pandas as pd
9
+ from pandas import (
10
+ Categorical,
11
+ Index,
12
+ Interval,
13
+ IntervalIndex,
14
+ Period,
15
+ Series,
16
+ Timedelta,
17
+ Timestamp,
18
+ date_range,
19
+ period_range,
20
+ timedelta_range,
21
+ )
22
+ import pandas._testing as tm
23
+ from pandas.core.arrays import (
24
+ BooleanArray,
25
+ IntervalArray,
26
+ )
27
+ from pandas.tests.arithmetic.common import get_upcast_box
28
+
29
+
30
+ @pytest.fixture(
31
+ params=[
32
+ (Index([0, 2, 4, 4]), Index([1, 3, 5, 8])),
33
+ (Index([0.0, 1.0, 2.0, np.nan]), Index([1.0, 2.0, 3.0, np.nan])),
34
+ (
35
+ timedelta_range("0 days", periods=3).insert(3, pd.NaT),
36
+ timedelta_range("1 day", periods=3).insert(3, pd.NaT),
37
+ ),
38
+ (
39
+ date_range("20170101", periods=3).insert(3, pd.NaT),
40
+ date_range("20170102", periods=3).insert(3, pd.NaT),
41
+ ),
42
+ (
43
+ date_range("20170101", periods=3, tz="US/Eastern").insert(3, pd.NaT),
44
+ date_range("20170102", periods=3, tz="US/Eastern").insert(3, pd.NaT),
45
+ ),
46
+ ],
47
+ ids=lambda x: str(x[0].dtype),
48
+ )
49
+ def left_right_dtypes(request):
50
+ """
51
+ Fixture for building an IntervalArray from various dtypes
52
+ """
53
+ return request.param
54
+
55
+
56
+ @pytest.fixture
57
+ def interval_array(left_right_dtypes):
58
+ """
59
+ Fixture to generate an IntervalArray of various dtypes containing NA if possible
60
+ """
61
+ left, right = left_right_dtypes
62
+ return IntervalArray.from_arrays(left, right)
63
+
64
+
65
+ def create_categorical_intervals(left, right, closed="right"):
66
+ return Categorical(IntervalIndex.from_arrays(left, right, closed))
67
+
68
+
69
+ def create_series_intervals(left, right, closed="right"):
70
+ return Series(IntervalArray.from_arrays(left, right, closed))
71
+
72
+
73
+ def create_series_categorical_intervals(left, right, closed="right"):
74
+ return Series(Categorical(IntervalIndex.from_arrays(left, right, closed)))
75
+
76
+
77
+ class TestComparison:
78
+ @pytest.fixture(params=[operator.eq, operator.ne])
79
+ def op(self, request):
80
+ return request.param
81
+
82
+ @pytest.fixture(
83
+ params=[
84
+ IntervalArray.from_arrays,
85
+ IntervalIndex.from_arrays,
86
+ create_categorical_intervals,
87
+ create_series_intervals,
88
+ create_series_categorical_intervals,
89
+ ],
90
+ ids=[
91
+ "IntervalArray",
92
+ "IntervalIndex",
93
+ "Categorical[Interval]",
94
+ "Series[Interval]",
95
+ "Series[Categorical[Interval]]",
96
+ ],
97
+ )
98
+ def interval_constructor(self, request):
99
+ """
100
+ Fixture for all pandas native interval constructors.
101
+ To be used as the LHS of IntervalArray comparisons.
102
+ """
103
+ return request.param
104
+
105
+ def elementwise_comparison(self, op, interval_array, other):
106
+ """
107
+ Helper that performs elementwise comparisons between `array` and `other`
108
+ """
109
+ other = other if is_list_like(other) else [other] * len(interval_array)
110
+ expected = np.array([op(x, y) for x, y in zip(interval_array, other)])
111
+ if isinstance(other, Series):
112
+ return Series(expected, index=other.index)
113
+ return expected
114
+
115
+ def test_compare_scalar_interval(self, op, interval_array):
116
+ # matches first interval
117
+ other = interval_array[0]
118
+ result = op(interval_array, other)
119
+ expected = self.elementwise_comparison(op, interval_array, other)
120
+ tm.assert_numpy_array_equal(result, expected)
121
+
122
+ # matches on a single endpoint but not both
123
+ other = Interval(interval_array.left[0], interval_array.right[1])
124
+ result = op(interval_array, other)
125
+ expected = self.elementwise_comparison(op, interval_array, other)
126
+ tm.assert_numpy_array_equal(result, expected)
127
+
128
+ def test_compare_scalar_interval_mixed_closed(self, op, closed, other_closed):
129
+ interval_array = IntervalArray.from_arrays(range(2), range(1, 3), closed=closed)
130
+ other = Interval(0, 1, closed=other_closed)
131
+
132
+ result = op(interval_array, other)
133
+ expected = self.elementwise_comparison(op, interval_array, other)
134
+ tm.assert_numpy_array_equal(result, expected)
135
+
136
+ def test_compare_scalar_na(self, op, interval_array, nulls_fixture, box_with_array):
137
+ box = box_with_array
138
+ obj = tm.box_expected(interval_array, box)
139
+ result = op(obj, nulls_fixture)
140
+
141
+ if nulls_fixture is pd.NA:
142
+ # GH#31882
143
+ exp = np.ones(interval_array.shape, dtype=bool)
144
+ expected = BooleanArray(exp, exp)
145
+ else:
146
+ expected = self.elementwise_comparison(op, interval_array, nulls_fixture)
147
+
148
+ if not (box is Index and nulls_fixture is pd.NA):
149
+ # don't cast expected from BooleanArray to ndarray[object]
150
+ xbox = get_upcast_box(obj, nulls_fixture, True)
151
+ expected = tm.box_expected(expected, xbox)
152
+
153
+ tm.assert_equal(result, expected)
154
+
155
+ rev = op(nulls_fixture, obj)
156
+ tm.assert_equal(rev, expected)
157
+
158
+ @pytest.mark.parametrize(
159
+ "other",
160
+ [
161
+ 0,
162
+ 1.0,
163
+ True,
164
+ "foo",
165
+ Timestamp("2017-01-01"),
166
+ Timestamp("2017-01-01", tz="US/Eastern"),
167
+ Timedelta("0 days"),
168
+ Period("2017-01-01", "D"),
169
+ ],
170
+ )
171
+ def test_compare_scalar_other(self, op, interval_array, other):
172
+ result = op(interval_array, other)
173
+ expected = self.elementwise_comparison(op, interval_array, other)
174
+ tm.assert_numpy_array_equal(result, expected)
175
+
176
+ def test_compare_list_like_interval(self, op, interval_array, interval_constructor):
177
+ # same endpoints
178
+ other = interval_constructor(interval_array.left, interval_array.right)
179
+ result = op(interval_array, other)
180
+ expected = self.elementwise_comparison(op, interval_array, other)
181
+ tm.assert_equal(result, expected)
182
+
183
+ # different endpoints
184
+ other = interval_constructor(
185
+ interval_array.left[::-1], interval_array.right[::-1]
186
+ )
187
+ result = op(interval_array, other)
188
+ expected = self.elementwise_comparison(op, interval_array, other)
189
+ tm.assert_equal(result, expected)
190
+
191
+ # all nan endpoints
192
+ other = interval_constructor([np.nan] * 4, [np.nan] * 4)
193
+ result = op(interval_array, other)
194
+ expected = self.elementwise_comparison(op, interval_array, other)
195
+ tm.assert_equal(result, expected)
196
+
197
+ def test_compare_list_like_interval_mixed_closed(
198
+ self, op, interval_constructor, closed, other_closed
199
+ ):
200
+ interval_array = IntervalArray.from_arrays(range(2), range(1, 3), closed=closed)
201
+ other = interval_constructor(range(2), range(1, 3), closed=other_closed)
202
+
203
+ result = op(interval_array, other)
204
+ expected = self.elementwise_comparison(op, interval_array, other)
205
+ tm.assert_equal(result, expected)
206
+
207
+ @pytest.mark.parametrize(
208
+ "other",
209
+ [
210
+ (
211
+ Interval(0, 1),
212
+ Interval(Timedelta("1 day"), Timedelta("2 days")),
213
+ Interval(4, 5, "both"),
214
+ Interval(10, 20, "neither"),
215
+ ),
216
+ (0, 1.5, Timestamp("20170103"), np.nan),
217
+ (
218
+ Timestamp("20170102", tz="US/Eastern"),
219
+ Timedelta("2 days"),
220
+ "baz",
221
+ pd.NaT,
222
+ ),
223
+ ],
224
+ )
225
+ def test_compare_list_like_object(self, op, interval_array, other):
226
+ result = op(interval_array, other)
227
+ expected = self.elementwise_comparison(op, interval_array, other)
228
+ tm.assert_numpy_array_equal(result, expected)
229
+
230
+ def test_compare_list_like_nan(self, op, interval_array, nulls_fixture):
231
+ other = [nulls_fixture] * 4
232
+ result = op(interval_array, other)
233
+ expected = self.elementwise_comparison(op, interval_array, other)
234
+
235
+ tm.assert_equal(result, expected)
236
+
237
+ @pytest.mark.parametrize(
238
+ "other",
239
+ [
240
+ np.arange(4, dtype="int64"),
241
+ np.arange(4, dtype="float64"),
242
+ date_range("2017-01-01", periods=4),
243
+ date_range("2017-01-01", periods=4, tz="US/Eastern"),
244
+ timedelta_range("0 days", periods=4),
245
+ period_range("2017-01-01", periods=4, freq="D"),
246
+ Categorical(list("abab")),
247
+ Categorical(date_range("2017-01-01", periods=4)),
248
+ pd.array(list("abcd")),
249
+ pd.array(["foo", 3.14, None, object()], dtype=object),
250
+ ],
251
+ ids=lambda x: str(x.dtype),
252
+ )
253
+ def test_compare_list_like_other(self, op, interval_array, other):
254
+ result = op(interval_array, other)
255
+ expected = self.elementwise_comparison(op, interval_array, other)
256
+ tm.assert_numpy_array_equal(result, expected)
257
+
258
+ @pytest.mark.parametrize("length", [1, 3, 5])
259
+ @pytest.mark.parametrize("other_constructor", [IntervalArray, list])
260
+ def test_compare_length_mismatch_errors(self, op, other_constructor, length):
261
+ interval_array = IntervalArray.from_arrays(range(4), range(1, 5))
262
+ other = other_constructor([Interval(0, 1)] * length)
263
+ with pytest.raises(ValueError, match="Lengths must match to compare"):
264
+ op(interval_array, other)
265
+
266
+ @pytest.mark.parametrize(
267
+ "constructor, expected_type, assert_func",
268
+ [
269
+ (IntervalIndex, np.array, tm.assert_numpy_array_equal),
270
+ (Series, Series, tm.assert_series_equal),
271
+ ],
272
+ )
273
+ def test_index_series_compat(self, op, constructor, expected_type, assert_func):
274
+ # IntervalIndex/Series that rely on IntervalArray for comparisons
275
+ breaks = range(4)
276
+ index = constructor(IntervalIndex.from_breaks(breaks))
277
+
278
+ # scalar comparisons
279
+ other = index[0]
280
+ result = op(index, other)
281
+ expected = expected_type(self.elementwise_comparison(op, index, other))
282
+ assert_func(result, expected)
283
+
284
+ other = breaks[0]
285
+ result = op(index, other)
286
+ expected = expected_type(self.elementwise_comparison(op, index, other))
287
+ assert_func(result, expected)
288
+
289
+ # list-like comparisons
290
+ other = IntervalArray.from_breaks(breaks)
291
+ result = op(index, other)
292
+ expected = expected_type(self.elementwise_comparison(op, index, other))
293
+ assert_func(result, expected)
294
+
295
+ other = [index[0], breaks[0], "foo"]
296
+ result = op(index, other)
297
+ expected = expected_type(self.elementwise_comparison(op, index, other))
298
+ assert_func(result, expected)
299
+
300
+ @pytest.mark.parametrize("scalars", ["a", False, 1, 1.0, None])
301
+ def test_comparison_operations(self, scalars):
302
+ # GH #28981
303
+ expected = Series([False, False])
304
+ s = Series([Interval(0, 1), Interval(1, 2)], dtype="interval")
305
+ result = s == scalars
306
+ tm.assert_series_equal(result, expected)