Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- parrot/lib/python3.10/site-packages/multidict-6.1.0.dist-info/LICENSE +13 -0
- parrot/lib/python3.10/site-packages/narwhals/__pycache__/group_by.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/__pycache__/translate.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/__pycache__/typing.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_arrow/__pycache__/dataframe.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_duckdb/__init__.py +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_duckdb/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_duckdb/__pycache__/series.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_duckdb/series.py +24 -0
- parrot/lib/python3.10/site-packages/narwhals/_ibis/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_ibis/__pycache__/series.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_ibis/series.py +24 -0
- parrot/lib/python3.10/site-packages/narwhals/_interchange/__init__.py +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_interchange/__pycache__/dataframe.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_interchange/__pycache__/series.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_interchange/dataframe.py +100 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/namespace.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/selectors.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/series.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/typing.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/dataframe.py +730 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/namespace.py +370 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/series.py +917 -0
- parrot/lib/python3.10/site-packages/narwhals/_pandas_like/typing.py +17 -0
- parrot/lib/python3.10/site-packages/narwhals/_polars/__pycache__/series.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/narwhals/_polars/dataframe.py +253 -0
- parrot/lib/python3.10/site-packages/narwhals/stable/__init__.py +3 -0
- parrot/lib/python3.10/site-packages/narwhals/stable/__pycache__/v1.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/scipy/sparse/tests/data/csc_py2.npz +3 -0
- parrot/lib/python3.10/site-packages/scipy/sparse/tests/data/csc_py3.npz +3 -0
- videollama2/lib/python3.10/site-packages/contourpy/__init__.py +285 -0
- videollama2/lib/python3.10/site-packages/contourpy/__pycache__/array.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/__pycache__/convert.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/__pycache__/typecheck.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/__pycache__/types.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/_contourpy.cpython-310-x86_64-linux-gnu.so +3 -0
- videollama2/lib/python3.10/site-packages/contourpy/array.py +261 -0
- videollama2/lib/python3.10/site-packages/contourpy/convert.py +620 -0
- videollama2/lib/python3.10/site-packages/contourpy/dechunk.py +207 -0
- videollama2/lib/python3.10/site-packages/contourpy/py.typed +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/__init__.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/_build_config.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/bokeh_renderer.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/bokeh_util.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/data.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/mpl_renderer.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/mpl_util.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/renderer.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/contourpy/util/bokeh_renderer.py +335 -0
.gitattributes
CHANGED
|
@@ -1738,3 +1738,5 @@ vllm/lib/python3.10/site-packages/cupy/_core/_kernel.cpython-310-x86_64-linux-gn
|
|
| 1738 |
vllm/lib/python3.10/site-packages/cupy/_core/_routines_math.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1739 |
vllm/lib/python3.10/site-packages/cupy/_core/_memory_range.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1740 |
vllm/lib/python3.10/site-packages/cupy/_core/_dtype.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 1738 |
vllm/lib/python3.10/site-packages/cupy/_core/_routines_math.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1739 |
vllm/lib/python3.10/site-packages/cupy/_core/_memory_range.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1740 |
vllm/lib/python3.10/site-packages/cupy/_core/_dtype.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1741 |
+
videollama2/lib/python3.10/site-packages/contourpy/_contourpy.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1742 |
+
vllm/lib/python3.10/site-packages/cupy/cuda/jitify.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
parrot/lib/python3.10/site-packages/multidict-6.1.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright 2016 Andrew Svetlov and aio-libs contributors
|
| 2 |
+
|
| 3 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
you may not use this file except in compliance with the License.
|
| 5 |
+
You may obtain a copy of the License at
|
| 6 |
+
|
| 7 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
|
| 9 |
+
Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
See the License for the specific language governing permissions and
|
| 13 |
+
limitations under the License.
|
parrot/lib/python3.10/site-packages/narwhals/__pycache__/group_by.cpython-310.pyc
ADDED
|
Binary file (5.08 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/__pycache__/translate.cpython-310.pyc
ADDED
|
Binary file (16.2 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/__pycache__/typing.cpython-310.pyc
ADDED
|
Binary file (1.93 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_arrow/__pycache__/dataframe.cpython-310.pyc
ADDED
|
Binary file (20.3 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_duckdb/__init__.py
ADDED
|
File without changes
|
parrot/lib/python3.10/site-packages/narwhals/_duckdb/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (168 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_duckdb/__pycache__/series.cpython-310.pyc
ADDED
|
Binary file (1.38 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_duckdb/series.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Any
|
| 4 |
+
|
| 5 |
+
from narwhals._duckdb.dataframe import map_duckdb_dtype_to_narwhals_dtype
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class DuckDBInterchangeSeries:
|
| 9 |
+
def __init__(self, df: Any) -> None:
|
| 10 |
+
self._native_series = df
|
| 11 |
+
|
| 12 |
+
def __narwhals_series__(self) -> Any:
|
| 13 |
+
return self
|
| 14 |
+
|
| 15 |
+
def __getattr__(self, attr: str) -> Any:
|
| 16 |
+
if attr == "dtype":
|
| 17 |
+
return map_duckdb_dtype_to_narwhals_dtype(self._native_series.types[0])
|
| 18 |
+
msg = ( # pragma: no cover
|
| 19 |
+
f"Attribute {attr} is not supported for metadata-only dataframes.\n\n"
|
| 20 |
+
"If you would like to see this kind of object better supported in "
|
| 21 |
+
"Narwhals, please open a feature request "
|
| 22 |
+
"at https://github.com/narwhals-dev/narwhals/issues."
|
| 23 |
+
)
|
| 24 |
+
raise NotImplementedError(msg) # pragma: no cover
|
parrot/lib/python3.10/site-packages/narwhals/_ibis/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (166 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_ibis/__pycache__/series.cpython-310.pyc
ADDED
|
Binary file (1.36 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_ibis/series.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Any
|
| 4 |
+
|
| 5 |
+
from narwhals._ibis.dataframe import map_ibis_dtype_to_narwhals_dtype
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class IbisInterchangeSeries:
|
| 9 |
+
def __init__(self, df: Any) -> None:
|
| 10 |
+
self._native_series = df
|
| 11 |
+
|
| 12 |
+
def __narwhals_series__(self) -> Any:
|
| 13 |
+
return self
|
| 14 |
+
|
| 15 |
+
def __getattr__(self, attr: str) -> Any:
|
| 16 |
+
if attr == "dtype":
|
| 17 |
+
return map_ibis_dtype_to_narwhals_dtype(self._native_series.type())
|
| 18 |
+
msg = (
|
| 19 |
+
f"Attribute {attr} is not supported for metadata-only dataframes.\n\n"
|
| 20 |
+
"If you would like to see this kind of object better supported in "
|
| 21 |
+
"Narwhals, please open a feature request "
|
| 22 |
+
"at https://github.com/narwhals-dev/narwhals/issues."
|
| 23 |
+
)
|
| 24 |
+
raise NotImplementedError(msg)
|
parrot/lib/python3.10/site-packages/narwhals/_interchange/__init__.py
ADDED
|
File without changes
|
parrot/lib/python3.10/site-packages/narwhals/_interchange/__pycache__/dataframe.cpython-310.pyc
ADDED
|
Binary file (3.54 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_interchange/__pycache__/series.cpython-310.pyc
ADDED
|
Binary file (1.74 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_interchange/dataframe.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import enum
|
| 4 |
+
from typing import TYPE_CHECKING
|
| 5 |
+
from typing import Any
|
| 6 |
+
from typing import NoReturn
|
| 7 |
+
|
| 8 |
+
from narwhals import dtypes
|
| 9 |
+
|
| 10 |
+
if TYPE_CHECKING:
|
| 11 |
+
from narwhals._interchange.series import InterchangeSeries
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class DtypeKind(enum.IntEnum):
|
| 15 |
+
# https://data-apis.org/dataframe-protocol/latest/API.html
|
| 16 |
+
INT = 0
|
| 17 |
+
UINT = 1
|
| 18 |
+
FLOAT = 2
|
| 19 |
+
BOOL = 20
|
| 20 |
+
STRING = 21 # UTF-8
|
| 21 |
+
DATETIME = 22
|
| 22 |
+
CATEGORICAL = 23
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def map_interchange_dtype_to_narwhals_dtype(
|
| 26 |
+
interchange_dtype: tuple[DtypeKind, int, Any, Any],
|
| 27 |
+
) -> dtypes.DType:
|
| 28 |
+
if interchange_dtype[0] == DtypeKind.INT:
|
| 29 |
+
if interchange_dtype[1] == 64:
|
| 30 |
+
return dtypes.Int64()
|
| 31 |
+
if interchange_dtype[1] == 32:
|
| 32 |
+
return dtypes.Int32()
|
| 33 |
+
if interchange_dtype[1] == 16:
|
| 34 |
+
return dtypes.Int16()
|
| 35 |
+
if interchange_dtype[1] == 8:
|
| 36 |
+
return dtypes.Int8()
|
| 37 |
+
msg = "Invalid bit width for INT" # pragma: no cover
|
| 38 |
+
raise AssertionError(msg)
|
| 39 |
+
if interchange_dtype[0] == DtypeKind.UINT:
|
| 40 |
+
if interchange_dtype[1] == 64:
|
| 41 |
+
return dtypes.UInt64()
|
| 42 |
+
if interchange_dtype[1] == 32:
|
| 43 |
+
return dtypes.UInt32()
|
| 44 |
+
if interchange_dtype[1] == 16:
|
| 45 |
+
return dtypes.UInt16()
|
| 46 |
+
if interchange_dtype[1] == 8:
|
| 47 |
+
return dtypes.UInt8()
|
| 48 |
+
msg = "Invalid bit width for UINT" # pragma: no cover
|
| 49 |
+
raise AssertionError(msg)
|
| 50 |
+
if interchange_dtype[0] == DtypeKind.FLOAT:
|
| 51 |
+
if interchange_dtype[1] == 64:
|
| 52 |
+
return dtypes.Float64()
|
| 53 |
+
if interchange_dtype[1] == 32:
|
| 54 |
+
return dtypes.Float32()
|
| 55 |
+
msg = "Invalid bit width for FLOAT" # pragma: no cover
|
| 56 |
+
raise AssertionError(msg)
|
| 57 |
+
if interchange_dtype[0] == DtypeKind.BOOL:
|
| 58 |
+
return dtypes.Boolean()
|
| 59 |
+
if interchange_dtype[0] == DtypeKind.STRING:
|
| 60 |
+
return dtypes.String()
|
| 61 |
+
if interchange_dtype[0] == DtypeKind.DATETIME:
|
| 62 |
+
return dtypes.Datetime()
|
| 63 |
+
if interchange_dtype[0] == DtypeKind.CATEGORICAL: # pragma: no cover
|
| 64 |
+
# upstream issue: https://github.com/ibis-project/ibis/issues/9570
|
| 65 |
+
return dtypes.Categorical()
|
| 66 |
+
msg = f"Invalid dtype, got: {interchange_dtype}" # pragma: no cover
|
| 67 |
+
raise AssertionError(msg)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class InterchangeFrame:
|
| 71 |
+
def __init__(self, df: Any) -> None:
|
| 72 |
+
self._native_frame = df
|
| 73 |
+
self._interchange_frame = df.__dataframe__()
|
| 74 |
+
|
| 75 |
+
def __narwhals_dataframe__(self) -> Any:
|
| 76 |
+
return self
|
| 77 |
+
|
| 78 |
+
def __getitem__(self, item: str) -> InterchangeSeries:
|
| 79 |
+
from narwhals._interchange.series import InterchangeSeries
|
| 80 |
+
|
| 81 |
+
return InterchangeSeries(self._interchange_frame.get_column_by_name(item))
|
| 82 |
+
|
| 83 |
+
@property
|
| 84 |
+
def schema(self) -> dict[str, dtypes.DType]:
|
| 85 |
+
return {
|
| 86 |
+
column_name: map_interchange_dtype_to_narwhals_dtype(
|
| 87 |
+
self._interchange_frame.get_column_by_name(column_name).dtype
|
| 88 |
+
)
|
| 89 |
+
for column_name in self._interchange_frame.column_names()
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
def __getattr__(self, attr: str) -> NoReturn:
|
| 93 |
+
msg = (
|
| 94 |
+
f"Attribute {attr} is not supported for metadata-only dataframes.\n\n"
|
| 95 |
+
"Hint: you probably called `nw.from_native` on an object which isn't fully "
|
| 96 |
+
"supported by Narwhals, yet implements `__dataframe__`. If you would like to "
|
| 97 |
+
"see this kind of object supported in Narwhals, please open a feature request "
|
| 98 |
+
"at https://github.com/narwhals-dev/narwhals/issues."
|
| 99 |
+
)
|
| 100 |
+
raise NotImplementedError(msg)
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/namespace.cpython-310.pyc
ADDED
|
Binary file (11.5 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/selectors.cpython-310.pyc
ADDED
|
Binary file (6.63 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/series.cpython-310.pyc
ADDED
|
Binary file (29.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/__pycache__/typing.cpython-310.pyc
ADDED
|
Binary file (633 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/dataframe.py
ADDED
|
@@ -0,0 +1,730 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING
|
| 4 |
+
from typing import Any
|
| 5 |
+
from typing import Iterable
|
| 6 |
+
from typing import Iterator
|
| 7 |
+
from typing import Literal
|
| 8 |
+
from typing import Sequence
|
| 9 |
+
from typing import overload
|
| 10 |
+
|
| 11 |
+
from narwhals._expression_parsing import evaluate_into_exprs
|
| 12 |
+
from narwhals._pandas_like.expr import PandasLikeExpr
|
| 13 |
+
from narwhals._pandas_like.utils import broadcast_series
|
| 14 |
+
from narwhals._pandas_like.utils import convert_str_slice_to_int_slice
|
| 15 |
+
from narwhals._pandas_like.utils import create_native_series
|
| 16 |
+
from narwhals._pandas_like.utils import horizontal_concat
|
| 17 |
+
from narwhals._pandas_like.utils import translate_dtype
|
| 18 |
+
from narwhals._pandas_like.utils import validate_dataframe_comparand
|
| 19 |
+
from narwhals.dependencies import get_cudf
|
| 20 |
+
from narwhals.dependencies import get_modin
|
| 21 |
+
from narwhals.dependencies import get_pandas
|
| 22 |
+
from narwhals.dependencies import is_numpy_array
|
| 23 |
+
from narwhals.utils import Implementation
|
| 24 |
+
from narwhals.utils import flatten
|
| 25 |
+
from narwhals.utils import generate_unique_token
|
| 26 |
+
from narwhals.utils import is_sequence_but_not_str
|
| 27 |
+
from narwhals.utils import parse_columns_to_drop
|
| 28 |
+
|
| 29 |
+
if TYPE_CHECKING:
|
| 30 |
+
import numpy as np
|
| 31 |
+
import pandas as pd
|
| 32 |
+
from typing_extensions import Self
|
| 33 |
+
|
| 34 |
+
from narwhals._pandas_like.group_by import PandasLikeGroupBy
|
| 35 |
+
from narwhals._pandas_like.namespace import PandasLikeNamespace
|
| 36 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 37 |
+
from narwhals._pandas_like.typing import IntoPandasLikeExpr
|
| 38 |
+
from narwhals.dtypes import DType
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class PandasLikeDataFrame:
|
| 42 |
+
# --- not in the spec ---
|
| 43 |
+
def __init__(
|
| 44 |
+
self,
|
| 45 |
+
native_dataframe: Any,
|
| 46 |
+
*,
|
| 47 |
+
implementation: Implementation,
|
| 48 |
+
backend_version: tuple[int, ...],
|
| 49 |
+
) -> None:
|
| 50 |
+
self._validate_columns(native_dataframe.columns)
|
| 51 |
+
self._native_frame = native_dataframe
|
| 52 |
+
self._implementation = implementation
|
| 53 |
+
self._backend_version = backend_version
|
| 54 |
+
|
| 55 |
+
def __narwhals_dataframe__(self) -> Self:
|
| 56 |
+
return self
|
| 57 |
+
|
| 58 |
+
def __narwhals_lazyframe__(self) -> Self:
|
| 59 |
+
return self
|
| 60 |
+
|
| 61 |
+
def __narwhals_namespace__(self) -> PandasLikeNamespace:
|
| 62 |
+
from narwhals._pandas_like.namespace import PandasLikeNamespace
|
| 63 |
+
|
| 64 |
+
return PandasLikeNamespace(self._implementation, self._backend_version)
|
| 65 |
+
|
| 66 |
+
def __native_namespace__(self) -> Any:
|
| 67 |
+
if self._implementation is Implementation.PANDAS:
|
| 68 |
+
return get_pandas()
|
| 69 |
+
if self._implementation is Implementation.MODIN: # pragma: no cover
|
| 70 |
+
return get_modin()
|
| 71 |
+
if self._implementation is Implementation.CUDF: # pragma: no cover
|
| 72 |
+
return get_cudf()
|
| 73 |
+
msg = f"Expected pandas/modin/cudf, got: {type(self._implementation)}" # pragma: no cover
|
| 74 |
+
raise AssertionError(msg)
|
| 75 |
+
|
| 76 |
+
def __len__(self) -> int:
|
| 77 |
+
return len(self._native_frame)
|
| 78 |
+
|
| 79 |
+
def _validate_columns(self, columns: pd.Index) -> None:
|
| 80 |
+
try:
|
| 81 |
+
len_unique_columns = len(columns.drop_duplicates())
|
| 82 |
+
except Exception: # noqa: BLE001 # pragma: no cover
|
| 83 |
+
msg = f"Expected hashable (e.g. str or int) column names, got: {columns}"
|
| 84 |
+
raise ValueError(msg) from None
|
| 85 |
+
|
| 86 |
+
if len(columns) != len_unique_columns:
|
| 87 |
+
msg = f"Expected unique column names, got: {columns}"
|
| 88 |
+
raise ValueError(msg)
|
| 89 |
+
|
| 90 |
+
def _from_native_frame(self, df: Any) -> Self:
|
| 91 |
+
return self.__class__(
|
| 92 |
+
df,
|
| 93 |
+
implementation=self._implementation,
|
| 94 |
+
backend_version=self._backend_version,
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def get_column(self, name: str) -> PandasLikeSeries:
|
| 98 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 99 |
+
|
| 100 |
+
return PandasLikeSeries(
|
| 101 |
+
self._native_frame.loc[:, name],
|
| 102 |
+
implementation=self._implementation,
|
| 103 |
+
backend_version=self._backend_version,
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
def __array__(self, dtype: Any = None, copy: bool | None = None) -> np.ndarray:
|
| 107 |
+
return self.to_numpy(dtype=dtype, copy=copy)
|
| 108 |
+
|
| 109 |
+
@overload
|
| 110 |
+
def __getitem__(self, item: tuple[Sequence[int], str | int]) -> PandasLikeSeries: ... # type: ignore[overload-overlap]
|
| 111 |
+
|
| 112 |
+
@overload
|
| 113 |
+
def __getitem__(self, item: Sequence[int]) -> PandasLikeDataFrame: ...
|
| 114 |
+
|
| 115 |
+
@overload
|
| 116 |
+
def __getitem__(self, item: str) -> PandasLikeSeries: ... # type: ignore[overload-overlap]
|
| 117 |
+
|
| 118 |
+
@overload
|
| 119 |
+
def __getitem__(self, item: Sequence[str]) -> PandasLikeDataFrame: ...
|
| 120 |
+
|
| 121 |
+
@overload
|
| 122 |
+
def __getitem__(self, item: slice) -> PandasLikeDataFrame: ...
|
| 123 |
+
|
| 124 |
+
@overload
|
| 125 |
+
def __getitem__(self, item: tuple[slice, slice]) -> Self: ...
|
| 126 |
+
|
| 127 |
+
@overload
|
| 128 |
+
def __getitem__(
|
| 129 |
+
self, item: tuple[Sequence[int], Sequence[int] | slice]
|
| 130 |
+
) -> PandasLikeDataFrame: ...
|
| 131 |
+
|
| 132 |
+
@overload
|
| 133 |
+
def __getitem__(self, item: tuple[slice, Sequence[int]]) -> PandasLikeDataFrame: ...
|
| 134 |
+
|
| 135 |
+
def __getitem__(
|
| 136 |
+
self,
|
| 137 |
+
item: str
|
| 138 |
+
| int
|
| 139 |
+
| slice
|
| 140 |
+
| Sequence[int]
|
| 141 |
+
| Sequence[str]
|
| 142 |
+
| tuple[Sequence[int], str | int]
|
| 143 |
+
| tuple[slice | Sequence[int], Sequence[int] | slice]
|
| 144 |
+
| tuple[slice, slice],
|
| 145 |
+
) -> PandasLikeSeries | PandasLikeDataFrame:
|
| 146 |
+
if isinstance(item, tuple):
|
| 147 |
+
item = tuple(list(i) if is_sequence_but_not_str(i) else i for i in item)
|
| 148 |
+
|
| 149 |
+
if isinstance(item, str):
|
| 150 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 151 |
+
|
| 152 |
+
return PandasLikeSeries(
|
| 153 |
+
self._native_frame.loc[:, item],
|
| 154 |
+
implementation=self._implementation,
|
| 155 |
+
backend_version=self._backend_version,
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
elif (
|
| 159 |
+
isinstance(item, tuple)
|
| 160 |
+
and len(item) == 2
|
| 161 |
+
and is_sequence_but_not_str(item[1])
|
| 162 |
+
):
|
| 163 |
+
if len(item[1]) == 0:
|
| 164 |
+
# Return empty dataframe
|
| 165 |
+
return self._from_native_frame(self._native_frame.__class__())
|
| 166 |
+
if all(isinstance(x, int) for x in item[1]):
|
| 167 |
+
return self._from_native_frame(self._native_frame.iloc[item])
|
| 168 |
+
if all(isinstance(x, str) for x in item[1]):
|
| 169 |
+
indexer = (
|
| 170 |
+
item[0],
|
| 171 |
+
self._native_frame.columns.get_indexer(item[1]),
|
| 172 |
+
)
|
| 173 |
+
return self._from_native_frame(self._native_frame.iloc[indexer])
|
| 174 |
+
msg = (
|
| 175 |
+
f"Expected sequence str or int, got: {type(item[1])}" # pragma: no cover
|
| 176 |
+
)
|
| 177 |
+
raise TypeError(msg) # pragma: no cover
|
| 178 |
+
|
| 179 |
+
elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], slice):
|
| 180 |
+
columns = self._native_frame.columns
|
| 181 |
+
if isinstance(item[1].start, str) or isinstance(item[1].stop, str):
|
| 182 |
+
start, stop, step = convert_str_slice_to_int_slice(item[1], columns)
|
| 183 |
+
return self._from_native_frame(
|
| 184 |
+
self._native_frame.iloc[item[0], slice(start, stop, step)]
|
| 185 |
+
)
|
| 186 |
+
if isinstance(item[1].start, int) or isinstance(item[1].stop, int):
|
| 187 |
+
return self._from_native_frame(
|
| 188 |
+
self._native_frame.iloc[
|
| 189 |
+
item[0], slice(item[1].start, item[1].stop, item[1].step)
|
| 190 |
+
]
|
| 191 |
+
)
|
| 192 |
+
msg = f"Expected slice of integers or strings, got: {type(item[1])}" # pragma: no cover
|
| 193 |
+
raise TypeError(msg) # pragma: no cover
|
| 194 |
+
|
| 195 |
+
elif isinstance(item, tuple) and len(item) == 2:
|
| 196 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 197 |
+
|
| 198 |
+
if isinstance(item[1], str):
|
| 199 |
+
item = (item[0], self._native_frame.columns.get_loc(item[1])) # type: ignore[assignment]
|
| 200 |
+
native_series = self._native_frame.iloc[item]
|
| 201 |
+
elif isinstance(item[1], int):
|
| 202 |
+
native_series = self._native_frame.iloc[item]
|
| 203 |
+
else: # pragma: no cover
|
| 204 |
+
msg = f"Expected str or int, got: {type(item[1])}"
|
| 205 |
+
raise TypeError(msg)
|
| 206 |
+
|
| 207 |
+
return PandasLikeSeries(
|
| 208 |
+
native_series,
|
| 209 |
+
implementation=self._implementation,
|
| 210 |
+
backend_version=self._backend_version,
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
elif is_sequence_but_not_str(item) or (is_numpy_array(item) and item.ndim == 1):
|
| 214 |
+
if all(isinstance(x, str) for x in item) and len(item) > 0:
|
| 215 |
+
return self._from_native_frame(self._native_frame.loc[:, item])
|
| 216 |
+
return self._from_native_frame(self._native_frame.iloc[item])
|
| 217 |
+
|
| 218 |
+
elif isinstance(item, slice):
|
| 219 |
+
if isinstance(item.start, str) or isinstance(item.stop, str):
|
| 220 |
+
start, stop, step = convert_str_slice_to_int_slice(
|
| 221 |
+
item, self._native_frame.columns
|
| 222 |
+
)
|
| 223 |
+
return self._from_native_frame(
|
| 224 |
+
self._native_frame.iloc[:, slice(start, stop, step)]
|
| 225 |
+
)
|
| 226 |
+
return self._from_native_frame(self._native_frame.iloc[item])
|
| 227 |
+
|
| 228 |
+
else: # pragma: no cover
|
| 229 |
+
msg = f"Expected str or slice, got: {type(item)}"
|
| 230 |
+
raise TypeError(msg)
|
| 231 |
+
|
| 232 |
+
# --- properties ---
|
| 233 |
+
@property
|
| 234 |
+
def columns(self) -> list[str]:
|
| 235 |
+
return self._native_frame.columns.tolist() # type: ignore[no-any-return]
|
| 236 |
+
|
| 237 |
+
def rows(
|
| 238 |
+
self, *, named: bool = False
|
| 239 |
+
) -> list[tuple[Any, ...]] | list[dict[str, Any]]:
|
| 240 |
+
if not named:
|
| 241 |
+
return list(self._native_frame.itertuples(index=False, name=None))
|
| 242 |
+
|
| 243 |
+
return self._native_frame.to_dict(orient="records") # type: ignore[no-any-return]
|
| 244 |
+
|
| 245 |
+
def iter_rows(
|
| 246 |
+
self,
|
| 247 |
+
*,
|
| 248 |
+
named: bool = False,
|
| 249 |
+
buffer_size: int = 512,
|
| 250 |
+
) -> Iterator[list[tuple[Any, ...]]] | Iterator[list[dict[str, Any]]]:
|
| 251 |
+
"""
|
| 252 |
+
NOTE:
|
| 253 |
+
The param ``buffer_size`` is only here for compatibility with the polars API
|
| 254 |
+
and has no effect on the output.
|
| 255 |
+
"""
|
| 256 |
+
if not named:
|
| 257 |
+
yield from self._native_frame.itertuples(index=False, name=None)
|
| 258 |
+
else:
|
| 259 |
+
col_names = self._native_frame.columns
|
| 260 |
+
yield from (
|
| 261 |
+
dict(zip(col_names, row))
|
| 262 |
+
for row in self._native_frame.itertuples(index=False)
|
| 263 |
+
) # type: ignore[misc]
|
| 264 |
+
|
| 265 |
+
@property
|
| 266 |
+
def schema(self) -> dict[str, DType]:
|
| 267 |
+
return {
|
| 268 |
+
col: translate_dtype(self._native_frame.loc[:, col])
|
| 269 |
+
for col in self._native_frame.columns
|
| 270 |
+
}
|
| 271 |
+
|
| 272 |
+
def collect_schema(self) -> dict[str, DType]:
|
| 273 |
+
return self.schema
|
| 274 |
+
|
| 275 |
+
# --- reshape ---
|
| 276 |
+
def select(
|
| 277 |
+
self,
|
| 278 |
+
*exprs: IntoPandasLikeExpr,
|
| 279 |
+
**named_exprs: IntoPandasLikeExpr,
|
| 280 |
+
) -> Self:
|
| 281 |
+
if exprs and all(isinstance(x, str) for x in exprs) and not named_exprs:
|
| 282 |
+
# This is a simple slice => fastpath!
|
| 283 |
+
return self._from_native_frame(self._native_frame.loc[:, list(exprs)])
|
| 284 |
+
new_series = evaluate_into_exprs(self, *exprs, **named_exprs)
|
| 285 |
+
if not new_series:
|
| 286 |
+
# return empty dataframe, like Polars does
|
| 287 |
+
return self._from_native_frame(self._native_frame.__class__())
|
| 288 |
+
new_series = broadcast_series(new_series)
|
| 289 |
+
df = horizontal_concat(
|
| 290 |
+
new_series,
|
| 291 |
+
implementation=self._implementation,
|
| 292 |
+
backend_version=self._backend_version,
|
| 293 |
+
)
|
| 294 |
+
return self._from_native_frame(df)
|
| 295 |
+
|
| 296 |
+
def drop_nulls(self, subset: str | list[str] | None) -> Self:
|
| 297 |
+
if subset is None:
|
| 298 |
+
return self._from_native_frame(self._native_frame.dropna(axis=0))
|
| 299 |
+
subset = [subset] if isinstance(subset, str) else subset
|
| 300 |
+
plx = self.__narwhals_namespace__()
|
| 301 |
+
return self.filter(~plx.any_horizontal(plx.col(*subset).is_null()))
|
| 302 |
+
|
| 303 |
+
def with_row_index(self, name: str) -> Self:
|
| 304 |
+
row_index = create_native_series(
|
| 305 |
+
range(len(self._native_frame)),
|
| 306 |
+
index=self._native_frame.index,
|
| 307 |
+
implementation=self._implementation,
|
| 308 |
+
backend_version=self._backend_version,
|
| 309 |
+
).alias(name)
|
| 310 |
+
return self._from_native_frame(
|
| 311 |
+
horizontal_concat(
|
| 312 |
+
[row_index._native_series, self._native_frame],
|
| 313 |
+
implementation=self._implementation,
|
| 314 |
+
backend_version=self._backend_version,
|
| 315 |
+
)
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
def row(self, row: int) -> tuple[Any, ...]:
|
| 319 |
+
return tuple(x for x in self._native_frame.iloc[row])
|
| 320 |
+
|
| 321 |
+
def filter(
|
| 322 |
+
self,
|
| 323 |
+
*predicates: IntoPandasLikeExpr,
|
| 324 |
+
) -> Self:
|
| 325 |
+
plx = self.__narwhals_namespace__()
|
| 326 |
+
if (
|
| 327 |
+
len(predicates) == 1
|
| 328 |
+
and isinstance(predicates[0], list)
|
| 329 |
+
and all(isinstance(x, bool) for x in predicates[0])
|
| 330 |
+
):
|
| 331 |
+
_mask = predicates[0]
|
| 332 |
+
else:
|
| 333 |
+
expr = plx.all_horizontal(*predicates)
|
| 334 |
+
# Safety: all_horizontal's expression only returns a single column.
|
| 335 |
+
mask = expr._call(self)[0]
|
| 336 |
+
_mask = validate_dataframe_comparand(self._native_frame.index, mask)
|
| 337 |
+
return self._from_native_frame(self._native_frame.loc[_mask])
|
| 338 |
+
|
| 339 |
+
def with_columns(
|
| 340 |
+
self,
|
| 341 |
+
*exprs: IntoPandasLikeExpr,
|
| 342 |
+
**named_exprs: IntoPandasLikeExpr,
|
| 343 |
+
) -> Self:
|
| 344 |
+
index = self._native_frame.index
|
| 345 |
+
new_columns = evaluate_into_exprs(self, *exprs, **named_exprs)
|
| 346 |
+
|
| 347 |
+
if not new_columns and len(self) == 0:
|
| 348 |
+
return self
|
| 349 |
+
|
| 350 |
+
# If the inputs are all Expressions which return full columns
|
| 351 |
+
# (as opposed to scalars), we can use a fast path (concat, instead of assign).
|
| 352 |
+
# We can't use the fastpath if any input is not an expression (e.g.
|
| 353 |
+
# if it's a Series) because then we might be changing its flags.
|
| 354 |
+
# See `test_memmap` for an example of where this is necessary.
|
| 355 |
+
fast_path = (
|
| 356 |
+
all(len(s) > 1 for s in new_columns)
|
| 357 |
+
and all(isinstance(x, PandasLikeExpr) for x in exprs)
|
| 358 |
+
and all(isinstance(x, PandasLikeExpr) for (_, x) in named_exprs.items())
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
if fast_path:
|
| 362 |
+
new_column_name_to_new_column_map = {s.name: s for s in new_columns}
|
| 363 |
+
to_concat = []
|
| 364 |
+
# Make sure to preserve column order
|
| 365 |
+
for name in self._native_frame.columns:
|
| 366 |
+
if name in new_column_name_to_new_column_map:
|
| 367 |
+
to_concat.append(
|
| 368 |
+
validate_dataframe_comparand(
|
| 369 |
+
index, new_column_name_to_new_column_map.pop(name)
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
else:
|
| 373 |
+
to_concat.append(self._native_frame.loc[:, name])
|
| 374 |
+
to_concat.extend(
|
| 375 |
+
validate_dataframe_comparand(index, new_column_name_to_new_column_map[s])
|
| 376 |
+
for s in new_column_name_to_new_column_map
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
df = horizontal_concat(
|
| 380 |
+
to_concat,
|
| 381 |
+
implementation=self._implementation,
|
| 382 |
+
backend_version=self._backend_version,
|
| 383 |
+
)
|
| 384 |
+
else:
|
| 385 |
+
df = self._native_frame.copy(deep=False)
|
| 386 |
+
for s in new_columns:
|
| 387 |
+
df[s.name] = validate_dataframe_comparand(index, s)
|
| 388 |
+
return self._from_native_frame(df)
|
| 389 |
+
|
| 390 |
+
def rename(self, mapping: dict[str, str]) -> Self:
|
| 391 |
+
return self._from_native_frame(self._native_frame.rename(columns=mapping))
|
| 392 |
+
|
| 393 |
+
def drop(self: Self, columns: list[str], strict: bool) -> Self: # noqa: FBT001
|
| 394 |
+
to_drop = parse_columns_to_drop(
|
| 395 |
+
compliant_frame=self, columns=columns, strict=strict
|
| 396 |
+
)
|
| 397 |
+
return self._from_native_frame(self._native_frame.drop(columns=to_drop))
|
| 398 |
+
|
| 399 |
+
# --- transform ---
|
| 400 |
+
def sort(
|
| 401 |
+
self,
|
| 402 |
+
by: str | Iterable[str],
|
| 403 |
+
*more_by: str,
|
| 404 |
+
descending: bool | Sequence[bool] = False,
|
| 405 |
+
) -> Self:
|
| 406 |
+
flat_keys = flatten([*flatten([by]), *more_by])
|
| 407 |
+
df = self._native_frame
|
| 408 |
+
if isinstance(descending, bool):
|
| 409 |
+
ascending: bool | list[bool] = not descending
|
| 410 |
+
else:
|
| 411 |
+
ascending = [not d for d in descending]
|
| 412 |
+
return self._from_native_frame(df.sort_values(flat_keys, ascending=ascending))
|
| 413 |
+
|
| 414 |
+
# --- convert ---
|
| 415 |
+
def collect(self) -> PandasLikeDataFrame:
|
| 416 |
+
return PandasLikeDataFrame(
|
| 417 |
+
self._native_frame,
|
| 418 |
+
implementation=self._implementation,
|
| 419 |
+
backend_version=self._backend_version,
|
| 420 |
+
)
|
| 421 |
+
|
| 422 |
+
# --- actions ---
|
| 423 |
+
def group_by(self, *keys: str) -> PandasLikeGroupBy:
|
| 424 |
+
from narwhals._pandas_like.group_by import PandasLikeGroupBy
|
| 425 |
+
|
| 426 |
+
return PandasLikeGroupBy(
|
| 427 |
+
self,
|
| 428 |
+
list(keys),
|
| 429 |
+
)
|
| 430 |
+
|
| 431 |
+
def join(
|
| 432 |
+
self,
|
| 433 |
+
other: Self,
|
| 434 |
+
*,
|
| 435 |
+
how: Literal["left", "inner", "outer", "cross", "anti", "semi"] = "inner",
|
| 436 |
+
left_on: str | list[str] | None,
|
| 437 |
+
right_on: str | list[str] | None,
|
| 438 |
+
suffix: str,
|
| 439 |
+
) -> Self:
|
| 440 |
+
if isinstance(left_on, str):
|
| 441 |
+
left_on = [left_on]
|
| 442 |
+
if isinstance(right_on, str):
|
| 443 |
+
right_on = [right_on]
|
| 444 |
+
if how == "cross":
|
| 445 |
+
if (
|
| 446 |
+
self._implementation is Implementation.MODIN
|
| 447 |
+
or self._implementation is Implementation.CUDF
|
| 448 |
+
) or (
|
| 449 |
+
self._implementation is Implementation.PANDAS
|
| 450 |
+
and self._backend_version < (1, 4)
|
| 451 |
+
):
|
| 452 |
+
key_token = generate_unique_token(
|
| 453 |
+
n_bytes=8, columns=[*self.columns, *other.columns]
|
| 454 |
+
)
|
| 455 |
+
|
| 456 |
+
return self._from_native_frame(
|
| 457 |
+
self._native_frame.assign(**{key_token: 0})
|
| 458 |
+
.merge(
|
| 459 |
+
other._native_frame.assign(**{key_token: 0}),
|
| 460 |
+
how="inner",
|
| 461 |
+
left_on=key_token,
|
| 462 |
+
right_on=key_token,
|
| 463 |
+
suffixes=("", suffix),
|
| 464 |
+
)
|
| 465 |
+
.drop(columns=key_token),
|
| 466 |
+
)
|
| 467 |
+
else:
|
| 468 |
+
return self._from_native_frame(
|
| 469 |
+
self._native_frame.merge(
|
| 470 |
+
other._native_frame,
|
| 471 |
+
how="cross",
|
| 472 |
+
suffixes=("", suffix),
|
| 473 |
+
),
|
| 474 |
+
)
|
| 475 |
+
|
| 476 |
+
if how == "anti":
|
| 477 |
+
indicator_token = generate_unique_token(
|
| 478 |
+
n_bytes=8, columns=[*self.columns, *other.columns]
|
| 479 |
+
)
|
| 480 |
+
|
| 481 |
+
other_native = (
|
| 482 |
+
other._native_frame.loc[:, right_on]
|
| 483 |
+
.rename( # rename to avoid creating extra columns in join
|
| 484 |
+
columns=dict(zip(right_on, left_on)) # type: ignore[arg-type]
|
| 485 |
+
)
|
| 486 |
+
.drop_duplicates()
|
| 487 |
+
)
|
| 488 |
+
return self._from_native_frame(
|
| 489 |
+
self._native_frame.merge(
|
| 490 |
+
other_native,
|
| 491 |
+
how="outer",
|
| 492 |
+
indicator=indicator_token,
|
| 493 |
+
left_on=left_on,
|
| 494 |
+
right_on=left_on,
|
| 495 |
+
)
|
| 496 |
+
.loc[lambda t: t[indicator_token] == "left_only"]
|
| 497 |
+
.drop(columns=indicator_token)
|
| 498 |
+
)
|
| 499 |
+
|
| 500 |
+
if how == "semi":
|
| 501 |
+
other_native = (
|
| 502 |
+
other._native_frame.loc[:, right_on]
|
| 503 |
+
.rename( # rename to avoid creating extra columns in join
|
| 504 |
+
columns=dict(zip(right_on, left_on)) # type: ignore[arg-type]
|
| 505 |
+
)
|
| 506 |
+
.drop_duplicates() # avoids potential rows duplication from inner join
|
| 507 |
+
)
|
| 508 |
+
return self._from_native_frame(
|
| 509 |
+
self._native_frame.merge(
|
| 510 |
+
other_native,
|
| 511 |
+
how="inner",
|
| 512 |
+
left_on=left_on,
|
| 513 |
+
right_on=left_on,
|
| 514 |
+
)
|
| 515 |
+
)
|
| 516 |
+
|
| 517 |
+
if how == "left":
|
| 518 |
+
other_native = other._native_frame
|
| 519 |
+
result_native = self._native_frame.merge(
|
| 520 |
+
other_native,
|
| 521 |
+
how="left",
|
| 522 |
+
left_on=left_on,
|
| 523 |
+
right_on=right_on,
|
| 524 |
+
suffixes=("", suffix),
|
| 525 |
+
)
|
| 526 |
+
extra = []
|
| 527 |
+
for left_key, right_key in zip(left_on, right_on): # type: ignore[arg-type]
|
| 528 |
+
if right_key != left_key and right_key not in self.columns:
|
| 529 |
+
extra.append(right_key)
|
| 530 |
+
elif right_key != left_key:
|
| 531 |
+
extra.append(f"{right_key}{suffix}")
|
| 532 |
+
return self._from_native_frame(result_native.drop(columns=extra))
|
| 533 |
+
|
| 534 |
+
return self._from_native_frame(
|
| 535 |
+
self._native_frame.merge(
|
| 536 |
+
other._native_frame,
|
| 537 |
+
left_on=left_on,
|
| 538 |
+
right_on=right_on,
|
| 539 |
+
how=how,
|
| 540 |
+
suffixes=("", suffix),
|
| 541 |
+
),
|
| 542 |
+
)
|
| 543 |
+
|
| 544 |
+
def join_asof(
|
| 545 |
+
self,
|
| 546 |
+
other: Self,
|
| 547 |
+
*,
|
| 548 |
+
left_on: str | None = None,
|
| 549 |
+
right_on: str | None = None,
|
| 550 |
+
on: str | None = None,
|
| 551 |
+
by_left: str | list[str] | None = None,
|
| 552 |
+
by_right: str | list[str] | None = None,
|
| 553 |
+
by: str | list[str] | None = None,
|
| 554 |
+
strategy: Literal["backward", "forward", "nearest"] = "backward",
|
| 555 |
+
) -> Self:
|
| 556 |
+
plx = self.__native_namespace__()
|
| 557 |
+
return self._from_native_frame(
|
| 558 |
+
plx.merge_asof(
|
| 559 |
+
self._native_frame,
|
| 560 |
+
other._native_frame,
|
| 561 |
+
left_on=left_on,
|
| 562 |
+
right_on=right_on,
|
| 563 |
+
on=on,
|
| 564 |
+
left_by=by_left,
|
| 565 |
+
right_by=by_right,
|
| 566 |
+
by=by,
|
| 567 |
+
direction=strategy,
|
| 568 |
+
suffixes=("", "_right"),
|
| 569 |
+
),
|
| 570 |
+
)
|
| 571 |
+
|
| 572 |
+
# --- partial reduction ---
|
| 573 |
+
|
| 574 |
+
def head(self, n: int) -> Self:
|
| 575 |
+
return self._from_native_frame(self._native_frame.head(n))
|
| 576 |
+
|
| 577 |
+
def tail(self, n: int) -> Self:
|
| 578 |
+
return self._from_native_frame(self._native_frame.tail(n))
|
| 579 |
+
|
| 580 |
+
def unique(
|
| 581 |
+
self: Self,
|
| 582 |
+
subset: str | list[str] | None,
|
| 583 |
+
*,
|
| 584 |
+
keep: Literal["any", "first", "last", "none"] = "any",
|
| 585 |
+
maintain_order: bool = False,
|
| 586 |
+
) -> Self:
|
| 587 |
+
"""
|
| 588 |
+
NOTE:
|
| 589 |
+
The param `maintain_order` is only here for compatibility with the polars API
|
| 590 |
+
and has no effect on the output.
|
| 591 |
+
"""
|
| 592 |
+
mapped_keep = {"none": False, "any": "first"}.get(keep, keep)
|
| 593 |
+
subset = flatten(subset) if subset else None
|
| 594 |
+
return self._from_native_frame(
|
| 595 |
+
self._native_frame.drop_duplicates(subset=subset, keep=mapped_keep)
|
| 596 |
+
)
|
| 597 |
+
|
| 598 |
+
# --- lazy-only ---
|
| 599 |
+
def lazy(self) -> Self:
|
| 600 |
+
return self
|
| 601 |
+
|
| 602 |
+
@property
|
| 603 |
+
def shape(self) -> tuple[int, int]:
|
| 604 |
+
return self._native_frame.shape # type: ignore[no-any-return]
|
| 605 |
+
|
| 606 |
+
def to_dict(self, *, as_series: bool = False) -> dict[str, Any]:
|
| 607 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 608 |
+
|
| 609 |
+
if as_series:
|
| 610 |
+
# TODO(Unassigned): should this return narwhals series?
|
| 611 |
+
return {
|
| 612 |
+
col: PandasLikeSeries(
|
| 613 |
+
self._native_frame.loc[:, col],
|
| 614 |
+
implementation=self._implementation,
|
| 615 |
+
backend_version=self._backend_version,
|
| 616 |
+
)
|
| 617 |
+
for col in self.columns
|
| 618 |
+
}
|
| 619 |
+
return self._native_frame.to_dict(orient="list") # type: ignore[no-any-return]
|
| 620 |
+
|
| 621 |
+
def to_numpy(self, dtype: Any = None, copy: bool | None = None) -> Any:
|
| 622 |
+
from narwhals._pandas_like.series import PANDAS_TO_NUMPY_DTYPE_MISSING
|
| 623 |
+
|
| 624 |
+
if copy is None:
|
| 625 |
+
# pandas default differs from Polars, but cuDF default is True
|
| 626 |
+
copy = self._implementation is Implementation.CUDF
|
| 627 |
+
|
| 628 |
+
if dtype is not None:
|
| 629 |
+
return self._native_frame.to_numpy(dtype=dtype, copy=copy)
|
| 630 |
+
|
| 631 |
+
# pandas return `object` dtype for nullable dtypes if dtype=None,
|
| 632 |
+
# so we cast each Series to numpy and let numpy find a common dtype.
|
| 633 |
+
# If there aren't any dtypes where `to_numpy()` is "broken" (i.e. it
|
| 634 |
+
# returns Object) then we just call `to_numpy()` on the DataFrame.
|
| 635 |
+
for col_dtype in self._native_frame.dtypes:
|
| 636 |
+
if str(col_dtype) in PANDAS_TO_NUMPY_DTYPE_MISSING:
|
| 637 |
+
import numpy as np # ignore-banned-import
|
| 638 |
+
|
| 639 |
+
return np.hstack(
|
| 640 |
+
[self[col].to_numpy(copy=copy)[:, None] for col in self.columns]
|
| 641 |
+
)
|
| 642 |
+
return self._native_frame.to_numpy(copy=copy)
|
| 643 |
+
|
| 644 |
+
def to_pandas(self) -> Any:
|
| 645 |
+
if self._implementation is Implementation.PANDAS:
|
| 646 |
+
return self._native_frame
|
| 647 |
+
if self._implementation is Implementation.MODIN: # pragma: no cover
|
| 648 |
+
return self._native_frame._to_pandas()
|
| 649 |
+
return self._native_frame.to_pandas() # pragma: no cover
|
| 650 |
+
|
| 651 |
+
def write_parquet(self, file: Any) -> Any:
|
| 652 |
+
self._native_frame.to_parquet(file)
|
| 653 |
+
|
| 654 |
+
def write_csv(self, file: Any = None) -> Any:
|
| 655 |
+
return self._native_frame.to_csv(file, index=False)
|
| 656 |
+
|
| 657 |
+
# --- descriptive ---
|
| 658 |
+
def is_duplicated(self: Self) -> PandasLikeSeries:
|
| 659 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 660 |
+
|
| 661 |
+
return PandasLikeSeries(
|
| 662 |
+
self._native_frame.duplicated(keep=False),
|
| 663 |
+
implementation=self._implementation,
|
| 664 |
+
backend_version=self._backend_version,
|
| 665 |
+
)
|
| 666 |
+
|
| 667 |
+
def is_empty(self: Self) -> bool:
|
| 668 |
+
return self._native_frame.empty # type: ignore[no-any-return]
|
| 669 |
+
|
| 670 |
+
def is_unique(self: Self) -> PandasLikeSeries:
|
| 671 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 672 |
+
|
| 673 |
+
return PandasLikeSeries(
|
| 674 |
+
~self._native_frame.duplicated(keep=False),
|
| 675 |
+
implementation=self._implementation,
|
| 676 |
+
backend_version=self._backend_version,
|
| 677 |
+
)
|
| 678 |
+
|
| 679 |
+
def null_count(self: Self) -> PandasLikeDataFrame:
|
| 680 |
+
return PandasLikeDataFrame(
|
| 681 |
+
self._native_frame.isna().sum(axis=0).to_frame().transpose(),
|
| 682 |
+
implementation=self._implementation,
|
| 683 |
+
backend_version=self._backend_version,
|
| 684 |
+
)
|
| 685 |
+
|
| 686 |
+
def item(self: Self, row: int | None = None, column: int | str | None = None) -> Any:
|
| 687 |
+
if row is None and column is None:
|
| 688 |
+
if self.shape != (1, 1):
|
| 689 |
+
msg = (
|
| 690 |
+
"can only call `.item()` if the dataframe is of shape (1, 1),"
|
| 691 |
+
" or if explicit row/col values are provided;"
|
| 692 |
+
f" frame has shape {self.shape!r}"
|
| 693 |
+
)
|
| 694 |
+
raise ValueError(msg)
|
| 695 |
+
return self._native_frame.iloc[0, 0]
|
| 696 |
+
|
| 697 |
+
elif row is None or column is None:
|
| 698 |
+
msg = "cannot call `.item()` with only one of `row` or `column`"
|
| 699 |
+
raise ValueError(msg)
|
| 700 |
+
|
| 701 |
+
_col = self.columns.index(column) if isinstance(column, str) else column
|
| 702 |
+
return self._native_frame.iloc[row, _col]
|
| 703 |
+
|
| 704 |
+
def clone(self: Self) -> Self:
|
| 705 |
+
return self._from_native_frame(self._native_frame.copy())
|
| 706 |
+
|
| 707 |
+
def gather_every(self: Self, n: int, offset: int = 0) -> Self:
|
| 708 |
+
return self._from_native_frame(self._native_frame.iloc[offset::n])
|
| 709 |
+
|
| 710 |
+
def to_arrow(self: Self) -> Any:
|
| 711 |
+
if self._implementation is Implementation.CUDF: # pragma: no cover
|
| 712 |
+
return self._native_frame.to_arrow(preserve_index=False)
|
| 713 |
+
|
| 714 |
+
import pyarrow as pa # ignore-banned-import()
|
| 715 |
+
|
| 716 |
+
return pa.Table.from_pandas(self._native_frame)
|
| 717 |
+
|
| 718 |
+
def sample(
|
| 719 |
+
self: Self,
|
| 720 |
+
n: int | None = None,
|
| 721 |
+
*,
|
| 722 |
+
fraction: float | None = None,
|
| 723 |
+
with_replacement: bool = False,
|
| 724 |
+
seed: int | None = None,
|
| 725 |
+
) -> Self:
|
| 726 |
+
return self._from_native_frame(
|
| 727 |
+
self._native_frame.sample(
|
| 728 |
+
n=n, frac=fraction, replace=with_replacement, random_state=seed
|
| 729 |
+
)
|
| 730 |
+
)
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/namespace.py
ADDED
|
@@ -0,0 +1,370 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from functools import reduce
|
| 4 |
+
from typing import TYPE_CHECKING
|
| 5 |
+
from typing import Any
|
| 6 |
+
from typing import Callable
|
| 7 |
+
from typing import Iterable
|
| 8 |
+
from typing import cast
|
| 9 |
+
|
| 10 |
+
from narwhals import dtypes
|
| 11 |
+
from narwhals._expression_parsing import parse_into_exprs
|
| 12 |
+
from narwhals._pandas_like.dataframe import PandasLikeDataFrame
|
| 13 |
+
from narwhals._pandas_like.expr import PandasLikeExpr
|
| 14 |
+
from narwhals._pandas_like.selectors import PandasSelectorNamespace
|
| 15 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 16 |
+
from narwhals._pandas_like.utils import create_native_series
|
| 17 |
+
from narwhals._pandas_like.utils import horizontal_concat
|
| 18 |
+
from narwhals._pandas_like.utils import vertical_concat
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
from narwhals._pandas_like.typing import IntoPandasLikeExpr
|
| 22 |
+
from narwhals.utils import Implementation
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class PandasLikeNamespace:
|
| 26 |
+
Int64 = dtypes.Int64
|
| 27 |
+
Int32 = dtypes.Int32
|
| 28 |
+
Int16 = dtypes.Int16
|
| 29 |
+
Int8 = dtypes.Int8
|
| 30 |
+
UInt64 = dtypes.UInt64
|
| 31 |
+
UInt32 = dtypes.UInt32
|
| 32 |
+
UInt16 = dtypes.UInt16
|
| 33 |
+
UInt8 = dtypes.UInt8
|
| 34 |
+
Float64 = dtypes.Float64
|
| 35 |
+
Float32 = dtypes.Float32
|
| 36 |
+
Boolean = dtypes.Boolean
|
| 37 |
+
Object = dtypes.Object
|
| 38 |
+
Unknown = dtypes.Unknown
|
| 39 |
+
Categorical = dtypes.Categorical
|
| 40 |
+
Enum = dtypes.Enum
|
| 41 |
+
String = dtypes.String
|
| 42 |
+
Datetime = dtypes.Datetime
|
| 43 |
+
Duration = dtypes.Duration
|
| 44 |
+
Date = dtypes.Date
|
| 45 |
+
|
| 46 |
+
@property
|
| 47 |
+
def selectors(self) -> PandasSelectorNamespace:
|
| 48 |
+
return PandasSelectorNamespace(
|
| 49 |
+
implementation=self._implementation, backend_version=self._backend_version
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
# --- not in spec ---
|
| 53 |
+
def __init__(
|
| 54 |
+
self, implementation: Implementation, backend_version: tuple[int, ...]
|
| 55 |
+
) -> None:
|
| 56 |
+
self._implementation = implementation
|
| 57 |
+
self._backend_version = backend_version
|
| 58 |
+
|
| 59 |
+
def _create_expr_from_callable(
|
| 60 |
+
self,
|
| 61 |
+
func: Callable[[PandasLikeDataFrame], list[PandasLikeSeries]],
|
| 62 |
+
*,
|
| 63 |
+
depth: int,
|
| 64 |
+
function_name: str,
|
| 65 |
+
root_names: list[str] | None,
|
| 66 |
+
output_names: list[str] | None,
|
| 67 |
+
) -> PandasLikeExpr:
|
| 68 |
+
return PandasLikeExpr(
|
| 69 |
+
func,
|
| 70 |
+
depth=depth,
|
| 71 |
+
function_name=function_name,
|
| 72 |
+
root_names=root_names,
|
| 73 |
+
output_names=output_names,
|
| 74 |
+
implementation=self._implementation,
|
| 75 |
+
backend_version=self._backend_version,
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
def _create_series_from_scalar(
|
| 79 |
+
self, value: Any, series: PandasLikeSeries
|
| 80 |
+
) -> PandasLikeSeries:
|
| 81 |
+
return PandasLikeSeries._from_iterable(
|
| 82 |
+
[value],
|
| 83 |
+
name=series._native_series.name,
|
| 84 |
+
index=series._native_series.index[0:1],
|
| 85 |
+
implementation=self._implementation,
|
| 86 |
+
backend_version=self._backend_version,
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
def _create_expr_from_series(self, series: PandasLikeSeries) -> PandasLikeExpr:
|
| 90 |
+
return PandasLikeExpr(
|
| 91 |
+
lambda _df: [series],
|
| 92 |
+
depth=0,
|
| 93 |
+
function_name="series",
|
| 94 |
+
root_names=None,
|
| 95 |
+
output_names=None,
|
| 96 |
+
implementation=self._implementation,
|
| 97 |
+
backend_version=self._backend_version,
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
def _create_compliant_series(self, value: Any) -> PandasLikeSeries:
|
| 101 |
+
return create_native_series(
|
| 102 |
+
value,
|
| 103 |
+
implementation=self._implementation,
|
| 104 |
+
backend_version=self._backend_version,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
# --- selection ---
|
| 108 |
+
def col(self, *column_names: str) -> PandasLikeExpr:
|
| 109 |
+
return PandasLikeExpr.from_column_names(
|
| 110 |
+
*column_names,
|
| 111 |
+
implementation=self._implementation,
|
| 112 |
+
backend_version=self._backend_version,
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
def all(self) -> PandasLikeExpr:
|
| 116 |
+
return PandasLikeExpr(
|
| 117 |
+
lambda df: [
|
| 118 |
+
PandasLikeSeries(
|
| 119 |
+
df._native_frame.loc[:, column_name],
|
| 120 |
+
implementation=self._implementation,
|
| 121 |
+
backend_version=self._backend_version,
|
| 122 |
+
)
|
| 123 |
+
for column_name in df.columns
|
| 124 |
+
],
|
| 125 |
+
depth=0,
|
| 126 |
+
function_name="all",
|
| 127 |
+
root_names=None,
|
| 128 |
+
output_names=None,
|
| 129 |
+
implementation=self._implementation,
|
| 130 |
+
backend_version=self._backend_version,
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
def lit(self, value: Any, dtype: dtypes.DType | None) -> PandasLikeExpr:
|
| 134 |
+
def _lit_pandas_series(df: PandasLikeDataFrame) -> PandasLikeSeries:
|
| 135 |
+
pandas_series = PandasLikeSeries._from_iterable(
|
| 136 |
+
data=[value],
|
| 137 |
+
name="lit",
|
| 138 |
+
index=df._native_frame.index[0:1],
|
| 139 |
+
implementation=self._implementation,
|
| 140 |
+
backend_version=self._backend_version,
|
| 141 |
+
)
|
| 142 |
+
if dtype:
|
| 143 |
+
return pandas_series.cast(dtype)
|
| 144 |
+
return pandas_series
|
| 145 |
+
|
| 146 |
+
return PandasLikeExpr(
|
| 147 |
+
lambda df: [_lit_pandas_series(df)],
|
| 148 |
+
depth=0,
|
| 149 |
+
function_name="lit",
|
| 150 |
+
root_names=None,
|
| 151 |
+
output_names=["lit"],
|
| 152 |
+
implementation=self._implementation,
|
| 153 |
+
backend_version=self._backend_version,
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
# --- reduction ---
|
| 157 |
+
def sum(self, *column_names: str) -> PandasLikeExpr:
|
| 158 |
+
return PandasLikeExpr.from_column_names(
|
| 159 |
+
*column_names,
|
| 160 |
+
implementation=self._implementation,
|
| 161 |
+
backend_version=self._backend_version,
|
| 162 |
+
).sum()
|
| 163 |
+
|
| 164 |
+
def mean(self, *column_names: str) -> PandasLikeExpr:
|
| 165 |
+
return PandasLikeExpr.from_column_names(
|
| 166 |
+
*column_names,
|
| 167 |
+
implementation=self._implementation,
|
| 168 |
+
backend_version=self._backend_version,
|
| 169 |
+
).mean()
|
| 170 |
+
|
| 171 |
+
def max(self, *column_names: str) -> PandasLikeExpr:
|
| 172 |
+
return PandasLikeExpr.from_column_names(
|
| 173 |
+
*column_names,
|
| 174 |
+
implementation=self._implementation,
|
| 175 |
+
backend_version=self._backend_version,
|
| 176 |
+
).max()
|
| 177 |
+
|
| 178 |
+
def min(self, *column_names: str) -> PandasLikeExpr:
|
| 179 |
+
return PandasLikeExpr.from_column_names(
|
| 180 |
+
*column_names,
|
| 181 |
+
implementation=self._implementation,
|
| 182 |
+
backend_version=self._backend_version,
|
| 183 |
+
).min()
|
| 184 |
+
|
| 185 |
+
def len(self) -> PandasLikeExpr:
|
| 186 |
+
return PandasLikeExpr(
|
| 187 |
+
lambda df: [
|
| 188 |
+
PandasLikeSeries._from_iterable(
|
| 189 |
+
[len(df._native_frame)],
|
| 190 |
+
name="len",
|
| 191 |
+
index=[0],
|
| 192 |
+
implementation=self._implementation,
|
| 193 |
+
backend_version=self._backend_version,
|
| 194 |
+
)
|
| 195 |
+
],
|
| 196 |
+
depth=0,
|
| 197 |
+
function_name="len",
|
| 198 |
+
root_names=None,
|
| 199 |
+
output_names=["len"],
|
| 200 |
+
implementation=self._implementation,
|
| 201 |
+
backend_version=self._backend_version,
|
| 202 |
+
)
|
| 203 |
+
|
| 204 |
+
# --- horizontal ---
|
| 205 |
+
def sum_horizontal(self, *exprs: IntoPandasLikeExpr) -> PandasLikeExpr:
|
| 206 |
+
return reduce(
|
| 207 |
+
lambda x, y: x + y,
|
| 208 |
+
[expr.fill_null(0) for expr in parse_into_exprs(*exprs, namespace=self)],
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
def all_horizontal(self, *exprs: IntoPandasLikeExpr) -> PandasLikeExpr:
|
| 212 |
+
return reduce(lambda x, y: x & y, parse_into_exprs(*exprs, namespace=self))
|
| 213 |
+
|
| 214 |
+
def any_horizontal(self, *exprs: IntoPandasLikeExpr) -> PandasLikeExpr:
|
| 215 |
+
return reduce(lambda x, y: x | y, parse_into_exprs(*exprs, namespace=self))
|
| 216 |
+
|
| 217 |
+
def mean_horizontal(self, *exprs: IntoPandasLikeExpr) -> PandasLikeExpr:
|
| 218 |
+
pandas_like_exprs = parse_into_exprs(*exprs, namespace=self)
|
| 219 |
+
total = reduce(lambda x, y: x + y, (e.fill_null(0.0) for e in pandas_like_exprs))
|
| 220 |
+
n_non_zero = reduce(
|
| 221 |
+
lambda x, y: x + y, ((1 - e.is_null()) for e in pandas_like_exprs)
|
| 222 |
+
)
|
| 223 |
+
return total / n_non_zero
|
| 224 |
+
|
| 225 |
+
def concat(
|
| 226 |
+
self,
|
| 227 |
+
items: Iterable[PandasLikeDataFrame],
|
| 228 |
+
*,
|
| 229 |
+
how: str = "vertical",
|
| 230 |
+
) -> PandasLikeDataFrame:
|
| 231 |
+
dfs: list[Any] = [item._native_frame for item in items]
|
| 232 |
+
if how == "horizontal":
|
| 233 |
+
return PandasLikeDataFrame(
|
| 234 |
+
horizontal_concat(
|
| 235 |
+
dfs,
|
| 236 |
+
implementation=self._implementation,
|
| 237 |
+
backend_version=self._backend_version,
|
| 238 |
+
),
|
| 239 |
+
implementation=self._implementation,
|
| 240 |
+
backend_version=self._backend_version,
|
| 241 |
+
)
|
| 242 |
+
if how == "vertical":
|
| 243 |
+
return PandasLikeDataFrame(
|
| 244 |
+
vertical_concat(
|
| 245 |
+
dfs,
|
| 246 |
+
implementation=self._implementation,
|
| 247 |
+
backend_version=self._backend_version,
|
| 248 |
+
),
|
| 249 |
+
implementation=self._implementation,
|
| 250 |
+
backend_version=self._backend_version,
|
| 251 |
+
)
|
| 252 |
+
raise NotImplementedError
|
| 253 |
+
|
| 254 |
+
def when(
|
| 255 |
+
self,
|
| 256 |
+
*predicates: IntoPandasLikeExpr,
|
| 257 |
+
) -> PandasWhen:
|
| 258 |
+
plx = self.__class__(self._implementation, self._backend_version)
|
| 259 |
+
if predicates:
|
| 260 |
+
condition = plx.all_horizontal(*predicates)
|
| 261 |
+
else:
|
| 262 |
+
msg = "at least one predicate needs to be provided"
|
| 263 |
+
raise TypeError(msg)
|
| 264 |
+
|
| 265 |
+
return PandasWhen(condition, self._implementation, self._backend_version)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
class PandasWhen:
|
| 269 |
+
def __init__(
|
| 270 |
+
self,
|
| 271 |
+
condition: PandasLikeExpr,
|
| 272 |
+
implementation: Implementation,
|
| 273 |
+
backend_version: tuple[int, ...],
|
| 274 |
+
then_value: Any = None,
|
| 275 |
+
otherwise_value: Any = None,
|
| 276 |
+
) -> None:
|
| 277 |
+
self._implementation = implementation
|
| 278 |
+
self._backend_version = backend_version
|
| 279 |
+
self._condition = condition
|
| 280 |
+
self._then_value = then_value
|
| 281 |
+
self._otherwise_value = otherwise_value
|
| 282 |
+
|
| 283 |
+
def __call__(self, df: PandasLikeDataFrame) -> list[PandasLikeSeries]:
|
| 284 |
+
from narwhals._expression_parsing import parse_into_expr
|
| 285 |
+
from narwhals._pandas_like.namespace import PandasLikeNamespace
|
| 286 |
+
from narwhals._pandas_like.utils import validate_column_comparand
|
| 287 |
+
|
| 288 |
+
plx = PandasLikeNamespace(
|
| 289 |
+
implementation=self._implementation, backend_version=self._backend_version
|
| 290 |
+
)
|
| 291 |
+
|
| 292 |
+
condition = parse_into_expr(self._condition, namespace=plx)._call(df)[0] # type: ignore[arg-type]
|
| 293 |
+
try:
|
| 294 |
+
value_series = parse_into_expr(self._then_value, namespace=plx)._call(df)[0] # type: ignore[arg-type]
|
| 295 |
+
except TypeError:
|
| 296 |
+
# `self._otherwise_value` is a scalar and can't be converted to an expression
|
| 297 |
+
value_series = condition.__class__._from_iterable( # type: ignore[call-arg]
|
| 298 |
+
[self._then_value] * len(condition),
|
| 299 |
+
name="literal",
|
| 300 |
+
index=condition._native_series.index,
|
| 301 |
+
implementation=self._implementation,
|
| 302 |
+
backend_version=self._backend_version,
|
| 303 |
+
)
|
| 304 |
+
value_series = cast(PandasLikeSeries, value_series)
|
| 305 |
+
|
| 306 |
+
value_series_native = value_series._native_series
|
| 307 |
+
condition_native = validate_column_comparand(value_series_native.index, condition)
|
| 308 |
+
|
| 309 |
+
if self._otherwise_value is None:
|
| 310 |
+
return [
|
| 311 |
+
value_series._from_native_series(
|
| 312 |
+
value_series_native.where(condition_native)
|
| 313 |
+
)
|
| 314 |
+
]
|
| 315 |
+
try:
|
| 316 |
+
otherwise_series = parse_into_expr(
|
| 317 |
+
self._otherwise_value, namespace=plx
|
| 318 |
+
)._call(df)[0] # type: ignore[arg-type]
|
| 319 |
+
except TypeError:
|
| 320 |
+
# `self._otherwise_value` is a scalar and can't be converted to an expression
|
| 321 |
+
return [
|
| 322 |
+
value_series._from_native_series(
|
| 323 |
+
value_series_native.where(condition_native, self._otherwise_value)
|
| 324 |
+
)
|
| 325 |
+
]
|
| 326 |
+
else:
|
| 327 |
+
return [value_series.zip_with(condition, otherwise_series)]
|
| 328 |
+
|
| 329 |
+
def then(self, value: PandasLikeExpr | PandasLikeSeries | Any) -> PandasThen:
|
| 330 |
+
self._then_value = value
|
| 331 |
+
|
| 332 |
+
return PandasThen(
|
| 333 |
+
self,
|
| 334 |
+
depth=0,
|
| 335 |
+
function_name="whenthen",
|
| 336 |
+
root_names=None,
|
| 337 |
+
output_names=None,
|
| 338 |
+
implementation=self._implementation,
|
| 339 |
+
backend_version=self._backend_version,
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
class PandasThen(PandasLikeExpr):
|
| 344 |
+
def __init__(
|
| 345 |
+
self,
|
| 346 |
+
call: PandasWhen,
|
| 347 |
+
*,
|
| 348 |
+
depth: int,
|
| 349 |
+
function_name: str,
|
| 350 |
+
root_names: list[str] | None,
|
| 351 |
+
output_names: list[str] | None,
|
| 352 |
+
implementation: Implementation,
|
| 353 |
+
backend_version: tuple[int, ...],
|
| 354 |
+
) -> None:
|
| 355 |
+
self._implementation = implementation
|
| 356 |
+
self._backend_version = backend_version
|
| 357 |
+
|
| 358 |
+
self._call = call
|
| 359 |
+
self._depth = depth
|
| 360 |
+
self._function_name = function_name
|
| 361 |
+
self._root_names = root_names
|
| 362 |
+
self._output_names = output_names
|
| 363 |
+
|
| 364 |
+
def otherwise(self, value: PandasLikeExpr | PandasLikeSeries | Any) -> PandasLikeExpr:
|
| 365 |
+
# type ignore because we are setting the `_call` attribute to a
|
| 366 |
+
# callable object of type `PandasWhen`, base class has the attribute as
|
| 367 |
+
# only a `Callable`
|
| 368 |
+
self._call._otherwise_value = value # type: ignore[attr-defined]
|
| 369 |
+
self._function_name = "whenotherwise"
|
| 370 |
+
return self
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/series.py
ADDED
|
@@ -0,0 +1,917 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING
|
| 4 |
+
from typing import Any
|
| 5 |
+
from typing import Iterable
|
| 6 |
+
from typing import Iterator
|
| 7 |
+
from typing import Literal
|
| 8 |
+
from typing import Sequence
|
| 9 |
+
from typing import overload
|
| 10 |
+
|
| 11 |
+
from narwhals._pandas_like.utils import int_dtype_mapper
|
| 12 |
+
from narwhals._pandas_like.utils import narwhals_to_native_dtype
|
| 13 |
+
from narwhals._pandas_like.utils import native_series_from_iterable
|
| 14 |
+
from narwhals._pandas_like.utils import set_axis
|
| 15 |
+
from narwhals._pandas_like.utils import to_datetime
|
| 16 |
+
from narwhals._pandas_like.utils import translate_dtype
|
| 17 |
+
from narwhals._pandas_like.utils import validate_column_comparand
|
| 18 |
+
from narwhals.dependencies import get_cudf
|
| 19 |
+
from narwhals.dependencies import get_modin
|
| 20 |
+
from narwhals.dependencies import get_pandas
|
| 21 |
+
from narwhals.utils import Implementation
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
from typing_extensions import Self
|
| 25 |
+
|
| 26 |
+
from narwhals._pandas_like.dataframe import PandasLikeDataFrame
|
| 27 |
+
from narwhals.dtypes import DType
|
| 28 |
+
|
| 29 |
+
PANDAS_TO_NUMPY_DTYPE_NO_MISSING = {
|
| 30 |
+
"Int64": "int64",
|
| 31 |
+
"int64[pyarrow]": "int64",
|
| 32 |
+
"Int32": "int32",
|
| 33 |
+
"int32[pyarrow]": "int32",
|
| 34 |
+
"Int16": "int16",
|
| 35 |
+
"int16[pyarrow]": "int16",
|
| 36 |
+
"Int8": "int8",
|
| 37 |
+
"int8[pyarrow]": "int8",
|
| 38 |
+
"UInt64": "uint64",
|
| 39 |
+
"uint64[pyarrow]": "uint64",
|
| 40 |
+
"UInt32": "uint32",
|
| 41 |
+
"uint32[pyarrow]": "uint32",
|
| 42 |
+
"UInt16": "uint16",
|
| 43 |
+
"uint16[pyarrow]": "uint16",
|
| 44 |
+
"UInt8": "uint8",
|
| 45 |
+
"uint8[pyarrow]": "uint8",
|
| 46 |
+
"Float64": "float64",
|
| 47 |
+
"float64[pyarrow]": "float64",
|
| 48 |
+
"Float32": "float32",
|
| 49 |
+
"float32[pyarrow]": "float32",
|
| 50 |
+
}
|
| 51 |
+
PANDAS_TO_NUMPY_DTYPE_MISSING = {
|
| 52 |
+
"Int64": "float64",
|
| 53 |
+
"int64[pyarrow]": "float64",
|
| 54 |
+
"Int32": "float64",
|
| 55 |
+
"int32[pyarrow]": "float64",
|
| 56 |
+
"Int16": "float64",
|
| 57 |
+
"int16[pyarrow]": "float64",
|
| 58 |
+
"Int8": "float64",
|
| 59 |
+
"int8[pyarrow]": "float64",
|
| 60 |
+
"UInt64": "float64",
|
| 61 |
+
"uint64[pyarrow]": "float64",
|
| 62 |
+
"UInt32": "float64",
|
| 63 |
+
"uint32[pyarrow]": "float64",
|
| 64 |
+
"UInt16": "float64",
|
| 65 |
+
"uint16[pyarrow]": "float64",
|
| 66 |
+
"UInt8": "float64",
|
| 67 |
+
"uint8[pyarrow]": "float64",
|
| 68 |
+
"Float64": "float64",
|
| 69 |
+
"float64[pyarrow]": "float64",
|
| 70 |
+
"Float32": "float32",
|
| 71 |
+
"float32[pyarrow]": "float32",
|
| 72 |
+
}
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class PandasLikeSeries:
|
| 76 |
+
def __init__(
|
| 77 |
+
self,
|
| 78 |
+
native_series: Any,
|
| 79 |
+
*,
|
| 80 |
+
implementation: Implementation,
|
| 81 |
+
backend_version: tuple[int, ...],
|
| 82 |
+
) -> None:
|
| 83 |
+
self._name = native_series.name
|
| 84 |
+
self._native_series = native_series
|
| 85 |
+
self._implementation = implementation
|
| 86 |
+
self._backend_version = backend_version
|
| 87 |
+
|
| 88 |
+
# In pandas, copy-on-write becomes the default in version 3.
|
| 89 |
+
# So, before that, we need to explicitly avoid unnecessary
|
| 90 |
+
# copies by using `copy=False` sometimes.
|
| 91 |
+
if self._implementation is Implementation.PANDAS and self._backend_version < (
|
| 92 |
+
3,
|
| 93 |
+
0,
|
| 94 |
+
0,
|
| 95 |
+
):
|
| 96 |
+
self._use_copy_false = True
|
| 97 |
+
else:
|
| 98 |
+
self._use_copy_false = False
|
| 99 |
+
|
| 100 |
+
def __native_namespace__(self) -> Any:
|
| 101 |
+
if self._implementation is Implementation.PANDAS:
|
| 102 |
+
return get_pandas()
|
| 103 |
+
if self._implementation is Implementation.MODIN: # pragma: no cover
|
| 104 |
+
return get_modin()
|
| 105 |
+
if self._implementation is Implementation.CUDF: # pragma: no cover
|
| 106 |
+
return get_cudf()
|
| 107 |
+
msg = f"Expected pandas/modin/cudf, got: {type(self._implementation)}" # pragma: no cover
|
| 108 |
+
raise AssertionError(msg)
|
| 109 |
+
|
| 110 |
+
def __narwhals_series__(self) -> Self:
|
| 111 |
+
return self
|
| 112 |
+
|
| 113 |
+
@overload
|
| 114 |
+
def __getitem__(self, idx: int) -> Any: ...
|
| 115 |
+
|
| 116 |
+
@overload
|
| 117 |
+
def __getitem__(self, idx: slice | Sequence[int]) -> Self: ...
|
| 118 |
+
|
| 119 |
+
def __getitem__(self, idx: int | slice | Sequence[int]) -> Any | Self:
|
| 120 |
+
if isinstance(idx, int):
|
| 121 |
+
return self._native_series.iloc[idx]
|
| 122 |
+
return self._from_native_series(self._native_series.iloc[idx])
|
| 123 |
+
|
| 124 |
+
def _rename(self, series: Any, name: str) -> Any:
|
| 125 |
+
if self._use_copy_false:
|
| 126 |
+
return series.rename(name, copy=False)
|
| 127 |
+
return series.rename(name) # pragma: no cover
|
| 128 |
+
|
| 129 |
+
def _from_native_series(self, series: Any) -> Self:
|
| 130 |
+
return self.__class__(
|
| 131 |
+
series,
|
| 132 |
+
implementation=self._implementation,
|
| 133 |
+
backend_version=self._backend_version,
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
@classmethod
|
| 137 |
+
def _from_iterable(
|
| 138 |
+
cls: type[Self],
|
| 139 |
+
data: Iterable[Any],
|
| 140 |
+
name: str,
|
| 141 |
+
index: Any,
|
| 142 |
+
*,
|
| 143 |
+
implementation: Implementation,
|
| 144 |
+
backend_version: tuple[int, ...],
|
| 145 |
+
) -> Self:
|
| 146 |
+
return cls(
|
| 147 |
+
native_series_from_iterable(
|
| 148 |
+
data,
|
| 149 |
+
name=name,
|
| 150 |
+
index=index,
|
| 151 |
+
implementation=implementation,
|
| 152 |
+
),
|
| 153 |
+
implementation=implementation,
|
| 154 |
+
backend_version=backend_version,
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
def __len__(self) -> int:
|
| 158 |
+
return self.shape[0]
|
| 159 |
+
|
| 160 |
+
@property
|
| 161 |
+
def name(self) -> str:
|
| 162 |
+
return self._name # type: ignore[no-any-return]
|
| 163 |
+
|
| 164 |
+
@property
|
| 165 |
+
def shape(self) -> tuple[int]:
|
| 166 |
+
return self._native_series.shape # type: ignore[no-any-return]
|
| 167 |
+
|
| 168 |
+
@property
|
| 169 |
+
def dtype(self: Self) -> DType:
|
| 170 |
+
return translate_dtype(self._native_series)
|
| 171 |
+
|
| 172 |
+
def scatter(self, indices: int | Sequence[int], values: Any) -> Self:
|
| 173 |
+
if isinstance(values, self.__class__):
|
| 174 |
+
# .copy() is necessary in some pre-2.2 versions of pandas to avoid
|
| 175 |
+
# `values` also getting modified (!)
|
| 176 |
+
values = validate_column_comparand(self._native_series.index, values).copy()
|
| 177 |
+
values = set_axis(
|
| 178 |
+
values,
|
| 179 |
+
self._native_series.index[indices],
|
| 180 |
+
implementation=self._implementation,
|
| 181 |
+
backend_version=self._backend_version,
|
| 182 |
+
)
|
| 183 |
+
s = self._native_series
|
| 184 |
+
s.iloc[indices] = values
|
| 185 |
+
s.name = self.name
|
| 186 |
+
return self._from_native_series(s)
|
| 187 |
+
|
| 188 |
+
def cast(
|
| 189 |
+
self,
|
| 190 |
+
dtype: Any,
|
| 191 |
+
) -> Self:
|
| 192 |
+
ser = self._native_series
|
| 193 |
+
dtype = narwhals_to_native_dtype(dtype, ser.dtype, self._implementation)
|
| 194 |
+
return self._from_native_series(ser.astype(dtype))
|
| 195 |
+
|
| 196 |
+
def item(self: Self, index: int | None = None) -> Any:
|
| 197 |
+
# cuDF doesn't have Series.item().
|
| 198 |
+
if index is None:
|
| 199 |
+
if len(self) != 1:
|
| 200 |
+
msg = (
|
| 201 |
+
"can only call '.item()' if the Series is of length 1,"
|
| 202 |
+
f" or an explicit index is provided (Series is of length {len(self)})"
|
| 203 |
+
)
|
| 204 |
+
raise ValueError(msg)
|
| 205 |
+
return self._native_series.iloc[0]
|
| 206 |
+
return self._native_series.iloc[index]
|
| 207 |
+
|
| 208 |
+
def to_frame(self) -> PandasLikeDataFrame:
|
| 209 |
+
from narwhals._pandas_like.dataframe import PandasLikeDataFrame
|
| 210 |
+
|
| 211 |
+
return PandasLikeDataFrame(
|
| 212 |
+
self._native_series.to_frame(),
|
| 213 |
+
implementation=self._implementation,
|
| 214 |
+
backend_version=self._backend_version,
|
| 215 |
+
)
|
| 216 |
+
|
| 217 |
+
def to_list(self) -> Any:
|
| 218 |
+
return self._native_series.to_list()
|
| 219 |
+
|
| 220 |
+
def is_between(
|
| 221 |
+
self, lower_bound: Any, upper_bound: Any, closed: str = "both"
|
| 222 |
+
) -> PandasLikeSeries:
|
| 223 |
+
ser = self._native_series
|
| 224 |
+
if closed == "left":
|
| 225 |
+
res = ser.ge(lower_bound) & ser.lt(upper_bound)
|
| 226 |
+
elif closed == "right":
|
| 227 |
+
res = ser.gt(lower_bound) & ser.le(upper_bound)
|
| 228 |
+
elif closed == "none":
|
| 229 |
+
res = ser.gt(lower_bound) & ser.lt(upper_bound)
|
| 230 |
+
elif closed == "both":
|
| 231 |
+
res = ser.ge(lower_bound) & ser.le(upper_bound)
|
| 232 |
+
else: # pragma: no cover
|
| 233 |
+
raise AssertionError
|
| 234 |
+
return self._from_native_series(res)
|
| 235 |
+
|
| 236 |
+
def is_in(self, other: Any) -> PandasLikeSeries:
|
| 237 |
+
ser = self._native_series
|
| 238 |
+
res = ser.isin(other)
|
| 239 |
+
return self._from_native_series(res)
|
| 240 |
+
|
| 241 |
+
def arg_true(self) -> PandasLikeSeries:
|
| 242 |
+
ser = self._native_series
|
| 243 |
+
result = ser.__class__(range(len(ser)), name=ser.name, index=ser.index).loc[ser]
|
| 244 |
+
return self._from_native_series(result)
|
| 245 |
+
|
| 246 |
+
# Binary comparisons
|
| 247 |
+
|
| 248 |
+
def filter(self, other: Any) -> PandasLikeSeries:
|
| 249 |
+
ser = self._native_series
|
| 250 |
+
if not (isinstance(other, list) and all(isinstance(x, bool) for x in other)):
|
| 251 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 252 |
+
return self._from_native_series(self._rename(ser.loc[other], ser.name))
|
| 253 |
+
|
| 254 |
+
def __eq__(self, other: object) -> PandasLikeSeries: # type: ignore[override]
|
| 255 |
+
ser = self._native_series
|
| 256 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 257 |
+
return self._from_native_series(self._rename(ser.__eq__(other), ser.name))
|
| 258 |
+
|
| 259 |
+
def __ne__(self, other: object) -> PandasLikeSeries: # type: ignore[override]
|
| 260 |
+
ser = self._native_series
|
| 261 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 262 |
+
return self._from_native_series(self._rename(ser.__ne__(other), ser.name))
|
| 263 |
+
|
| 264 |
+
def __ge__(self, other: Any) -> PandasLikeSeries:
|
| 265 |
+
ser = self._native_series
|
| 266 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 267 |
+
return self._from_native_series(self._rename(ser.__ge__(other), ser.name))
|
| 268 |
+
|
| 269 |
+
def __gt__(self, other: Any) -> PandasLikeSeries:
|
| 270 |
+
ser = self._native_series
|
| 271 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 272 |
+
return self._from_native_series(self._rename(ser.__gt__(other), ser.name))
|
| 273 |
+
|
| 274 |
+
def __le__(self, other: Any) -> PandasLikeSeries:
|
| 275 |
+
ser = self._native_series
|
| 276 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 277 |
+
return self._from_native_series(self._rename(ser.__le__(other), ser.name))
|
| 278 |
+
|
| 279 |
+
def __lt__(self, other: Any) -> PandasLikeSeries:
|
| 280 |
+
ser = self._native_series
|
| 281 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 282 |
+
return self._from_native_series(self._rename(ser.__lt__(other), ser.name))
|
| 283 |
+
|
| 284 |
+
def __and__(self, other: Any) -> PandasLikeSeries:
|
| 285 |
+
ser = self._native_series
|
| 286 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 287 |
+
return self._from_native_series(self._rename(ser.__and__(other), ser.name))
|
| 288 |
+
|
| 289 |
+
def __rand__(self, other: Any) -> PandasLikeSeries:
|
| 290 |
+
ser = self._native_series
|
| 291 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 292 |
+
return self._from_native_series(self._rename(ser.__rand__(other), ser.name))
|
| 293 |
+
|
| 294 |
+
def __or__(self, other: Any) -> PandasLikeSeries:
|
| 295 |
+
ser = self._native_series
|
| 296 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 297 |
+
return self._from_native_series(self._rename(ser.__or__(other), ser.name))
|
| 298 |
+
|
| 299 |
+
def __ror__(self, other: Any) -> PandasLikeSeries:
|
| 300 |
+
ser = self._native_series
|
| 301 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 302 |
+
return self._from_native_series(self._rename(ser.__ror__(other), ser.name))
|
| 303 |
+
|
| 304 |
+
def __add__(self, other: Any) -> PandasLikeSeries:
|
| 305 |
+
ser = self._native_series
|
| 306 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 307 |
+
return self._from_native_series(self._rename(ser.__add__(other), ser.name))
|
| 308 |
+
|
| 309 |
+
def __radd__(self, other: Any) -> PandasLikeSeries:
|
| 310 |
+
ser = self._native_series
|
| 311 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 312 |
+
return self._from_native_series(self._rename(ser.__radd__(other), ser.name))
|
| 313 |
+
|
| 314 |
+
def __sub__(self, other: Any) -> PandasLikeSeries:
|
| 315 |
+
ser = self._native_series
|
| 316 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 317 |
+
return self._from_native_series(self._rename(ser.__sub__(other), ser.name))
|
| 318 |
+
|
| 319 |
+
def __rsub__(self, other: Any) -> PandasLikeSeries:
|
| 320 |
+
ser = self._native_series
|
| 321 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 322 |
+
return self._from_native_series(self._rename(ser.__rsub__(other), ser.name))
|
| 323 |
+
|
| 324 |
+
def __mul__(self, other: Any) -> PandasLikeSeries:
|
| 325 |
+
ser = self._native_series
|
| 326 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 327 |
+
return self._from_native_series(self._rename(ser.__mul__(other), ser.name))
|
| 328 |
+
|
| 329 |
+
def __rmul__(self, other: Any) -> PandasLikeSeries:
|
| 330 |
+
ser = self._native_series
|
| 331 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 332 |
+
return self._from_native_series(self._rename(ser.__rmul__(other), ser.name))
|
| 333 |
+
|
| 334 |
+
def __truediv__(self, other: Any) -> PandasLikeSeries:
|
| 335 |
+
ser = self._native_series
|
| 336 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 337 |
+
return self._from_native_series(self._rename(ser.__truediv__(other), ser.name))
|
| 338 |
+
|
| 339 |
+
def __rtruediv__(self, other: Any) -> PandasLikeSeries:
|
| 340 |
+
ser = self._native_series
|
| 341 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 342 |
+
return self._from_native_series(self._rename(ser.__rtruediv__(other), ser.name))
|
| 343 |
+
|
| 344 |
+
def __floordiv__(self, other: Any) -> PandasLikeSeries:
|
| 345 |
+
ser = self._native_series
|
| 346 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 347 |
+
return self._from_native_series(self._rename(ser.__floordiv__(other), ser.name))
|
| 348 |
+
|
| 349 |
+
def __rfloordiv__(self, other: Any) -> PandasLikeSeries:
|
| 350 |
+
ser = self._native_series
|
| 351 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 352 |
+
return self._from_native_series(self._rename(ser.__rfloordiv__(other), ser.name))
|
| 353 |
+
|
| 354 |
+
def __pow__(self, other: Any) -> PandasLikeSeries:
|
| 355 |
+
ser = self._native_series
|
| 356 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 357 |
+
return self._from_native_series(self._rename(ser.__pow__(other), ser.name))
|
| 358 |
+
|
| 359 |
+
def __rpow__(self, other: Any) -> PandasLikeSeries:
|
| 360 |
+
ser = self._native_series
|
| 361 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 362 |
+
return self._from_native_series(self._rename(ser.__rpow__(other), ser.name))
|
| 363 |
+
|
| 364 |
+
def __mod__(self, other: Any) -> PandasLikeSeries:
|
| 365 |
+
ser = self._native_series
|
| 366 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 367 |
+
return self._from_native_series(self._rename(ser.__mod__(other), ser.name))
|
| 368 |
+
|
| 369 |
+
def __rmod__(self, other: Any) -> PandasLikeSeries:
|
| 370 |
+
ser = self._native_series
|
| 371 |
+
other = validate_column_comparand(self._native_series.index, other)
|
| 372 |
+
return self._from_native_series(self._rename(ser.__rmod__(other), ser.name))
|
| 373 |
+
|
| 374 |
+
# Unary
|
| 375 |
+
|
| 376 |
+
def __invert__(self: PandasLikeSeries) -> PandasLikeSeries:
|
| 377 |
+
ser = self._native_series
|
| 378 |
+
return self._from_native_series(~ser)
|
| 379 |
+
|
| 380 |
+
# Reductions
|
| 381 |
+
|
| 382 |
+
def any(self) -> Any:
|
| 383 |
+
ser = self._native_series
|
| 384 |
+
return ser.any()
|
| 385 |
+
|
| 386 |
+
def all(self) -> Any:
|
| 387 |
+
ser = self._native_series
|
| 388 |
+
return ser.all()
|
| 389 |
+
|
| 390 |
+
def min(self) -> Any:
|
| 391 |
+
ser = self._native_series
|
| 392 |
+
return ser.min()
|
| 393 |
+
|
| 394 |
+
def max(self) -> Any:
|
| 395 |
+
ser = self._native_series
|
| 396 |
+
return ser.max()
|
| 397 |
+
|
| 398 |
+
def sum(self) -> Any:
|
| 399 |
+
ser = self._native_series
|
| 400 |
+
return ser.sum()
|
| 401 |
+
|
| 402 |
+
def count(self) -> Any:
|
| 403 |
+
ser = self._native_series
|
| 404 |
+
return ser.count()
|
| 405 |
+
|
| 406 |
+
def mean(self) -> Any:
|
| 407 |
+
ser = self._native_series
|
| 408 |
+
return ser.mean()
|
| 409 |
+
|
| 410 |
+
def std(
|
| 411 |
+
self,
|
| 412 |
+
*,
|
| 413 |
+
ddof: int = 1,
|
| 414 |
+
) -> Any:
|
| 415 |
+
ser = self._native_series
|
| 416 |
+
return ser.std(ddof=ddof)
|
| 417 |
+
|
| 418 |
+
def len(self) -> Any:
|
| 419 |
+
return len(self._native_series)
|
| 420 |
+
|
| 421 |
+
# Transformations
|
| 422 |
+
|
| 423 |
+
def is_null(self) -> PandasLikeSeries:
|
| 424 |
+
ser = self._native_series
|
| 425 |
+
return self._from_native_series(ser.isna())
|
| 426 |
+
|
| 427 |
+
def fill_null(self, value: Any) -> PandasLikeSeries:
|
| 428 |
+
ser = self._native_series
|
| 429 |
+
return self._from_native_series(ser.fillna(value))
|
| 430 |
+
|
| 431 |
+
def drop_nulls(self) -> PandasLikeSeries:
|
| 432 |
+
ser = self._native_series
|
| 433 |
+
return self._from_native_series(ser.dropna())
|
| 434 |
+
|
| 435 |
+
def n_unique(self) -> int:
|
| 436 |
+
ser = self._native_series
|
| 437 |
+
return ser.nunique(dropna=False) # type: ignore[no-any-return]
|
| 438 |
+
|
| 439 |
+
def sample(
|
| 440 |
+
self: Self,
|
| 441 |
+
n: int | None = None,
|
| 442 |
+
*,
|
| 443 |
+
fraction: float | None = None,
|
| 444 |
+
with_replacement: bool = False,
|
| 445 |
+
seed: int | None = None,
|
| 446 |
+
) -> Self:
|
| 447 |
+
ser = self._native_series
|
| 448 |
+
return self._from_native_series(
|
| 449 |
+
ser.sample(n=n, frac=fraction, replace=with_replacement, random_state=seed)
|
| 450 |
+
)
|
| 451 |
+
|
| 452 |
+
def abs(self) -> PandasLikeSeries:
|
| 453 |
+
return self._from_native_series(self._native_series.abs())
|
| 454 |
+
|
| 455 |
+
def cum_sum(self) -> PandasLikeSeries:
|
| 456 |
+
return self._from_native_series(self._native_series.cumsum())
|
| 457 |
+
|
| 458 |
+
def unique(self) -> PandasLikeSeries:
|
| 459 |
+
return self._from_native_series(
|
| 460 |
+
self._native_series.__class__(
|
| 461 |
+
self._native_series.unique(), name=self._native_series.name
|
| 462 |
+
)
|
| 463 |
+
)
|
| 464 |
+
|
| 465 |
+
def diff(self) -> PandasLikeSeries:
|
| 466 |
+
return self._from_native_series(self._native_series.diff())
|
| 467 |
+
|
| 468 |
+
def shift(self, n: int) -> PandasLikeSeries:
|
| 469 |
+
return self._from_native_series(self._native_series.shift(n))
|
| 470 |
+
|
| 471 |
+
def sort(
|
| 472 |
+
self, *, descending: bool = False, nulls_last: bool = False
|
| 473 |
+
) -> PandasLikeSeries:
|
| 474 |
+
ser = self._native_series
|
| 475 |
+
na_position = "last" if nulls_last else "first"
|
| 476 |
+
return self._from_native_series(
|
| 477 |
+
ser.sort_values(ascending=not descending, na_position=na_position).rename(
|
| 478 |
+
self.name
|
| 479 |
+
)
|
| 480 |
+
)
|
| 481 |
+
|
| 482 |
+
def alias(self, name: str) -> Self:
|
| 483 |
+
ser = self._native_series
|
| 484 |
+
return self._from_native_series(self._rename(ser, name))
|
| 485 |
+
|
| 486 |
+
def __array__(self, dtype: Any = None, copy: bool | None = None) -> Any:
|
| 487 |
+
# pandas used to always return object dtype for nullable dtypes.
|
| 488 |
+
# So, we intercept __array__ and pass to `to_numpy` ourselves to make
|
| 489 |
+
# sure an appropriate numpy dtype is returned.
|
| 490 |
+
return self.to_numpy(dtype=dtype, copy=copy)
|
| 491 |
+
|
| 492 |
+
def to_numpy(self, dtype: Any = None, copy: bool | None = None) -> Any:
|
| 493 |
+
# the default is meant to be None, but pandas doesn't allow it?
|
| 494 |
+
# https://numpy.org/doc/stable/reference/generated/numpy.ndarray.__array__.html
|
| 495 |
+
copy = copy or self._implementation is Implementation.CUDF
|
| 496 |
+
|
| 497 |
+
has_missing = self._native_series.isna().any()
|
| 498 |
+
if (
|
| 499 |
+
has_missing
|
| 500 |
+
and str(self._native_series.dtype) in PANDAS_TO_NUMPY_DTYPE_MISSING
|
| 501 |
+
):
|
| 502 |
+
if self._implementation is Implementation.PANDAS and self._backend_version < (
|
| 503 |
+
1,
|
| 504 |
+
): # pragma: no cover
|
| 505 |
+
kwargs = {}
|
| 506 |
+
else:
|
| 507 |
+
kwargs = {"na_value": float("nan")}
|
| 508 |
+
return self._native_series.to_numpy(
|
| 509 |
+
dtype=dtype
|
| 510 |
+
or PANDAS_TO_NUMPY_DTYPE_MISSING[str(self._native_series.dtype)],
|
| 511 |
+
copy=copy,
|
| 512 |
+
**kwargs,
|
| 513 |
+
)
|
| 514 |
+
if (
|
| 515 |
+
not has_missing
|
| 516 |
+
and str(self._native_series.dtype) in PANDAS_TO_NUMPY_DTYPE_NO_MISSING
|
| 517 |
+
):
|
| 518 |
+
return self._native_series.to_numpy(
|
| 519 |
+
dtype=dtype
|
| 520 |
+
or PANDAS_TO_NUMPY_DTYPE_NO_MISSING[str(self._native_series.dtype)],
|
| 521 |
+
copy=copy,
|
| 522 |
+
)
|
| 523 |
+
return self._native_series.to_numpy(dtype=dtype, copy=copy)
|
| 524 |
+
|
| 525 |
+
def to_pandas(self) -> Any:
|
| 526 |
+
if self._implementation is Implementation.PANDAS:
|
| 527 |
+
return self._native_series
|
| 528 |
+
elif self._implementation is Implementation.CUDF: # pragma: no cover
|
| 529 |
+
return self._native_series.to_pandas()
|
| 530 |
+
elif self._implementation is Implementation.MODIN: # pragma: no cover
|
| 531 |
+
return self._native_series._to_pandas()
|
| 532 |
+
msg = f"Unknown implementation: {self._implementation}" # pragma: no cover
|
| 533 |
+
raise AssertionError(msg)
|
| 534 |
+
|
| 535 |
+
# --- descriptive ---
|
| 536 |
+
def is_duplicated(self: Self) -> Self:
|
| 537 |
+
res = self._native_series.duplicated(keep=False)
|
| 538 |
+
res = self._rename(res, self.name)
|
| 539 |
+
return self._from_native_series(res)
|
| 540 |
+
|
| 541 |
+
def is_empty(self: Self) -> bool:
|
| 542 |
+
return self._native_series.empty # type: ignore[no-any-return]
|
| 543 |
+
|
| 544 |
+
def is_unique(self: Self) -> Self:
|
| 545 |
+
res = ~self._native_series.duplicated(keep=False)
|
| 546 |
+
res = self._rename(res, self.name)
|
| 547 |
+
return self._from_native_series(res)
|
| 548 |
+
|
| 549 |
+
def null_count(self: Self) -> int:
|
| 550 |
+
return self._native_series.isna().sum() # type: ignore[no-any-return]
|
| 551 |
+
|
| 552 |
+
def is_first_distinct(self: Self) -> Self:
|
| 553 |
+
res = ~self._native_series.duplicated(keep="first")
|
| 554 |
+
res = self._rename(res, self.name)
|
| 555 |
+
return self._from_native_series(res)
|
| 556 |
+
|
| 557 |
+
def is_last_distinct(self: Self) -> Self:
|
| 558 |
+
res = ~self._native_series.duplicated(keep="last")
|
| 559 |
+
res = self._rename(res, self.name)
|
| 560 |
+
return self._from_native_series(res)
|
| 561 |
+
|
| 562 |
+
def is_sorted(self: Self, *, descending: bool = False) -> bool:
|
| 563 |
+
if not isinstance(descending, bool):
|
| 564 |
+
msg = f"argument 'descending' should be boolean, found {type(descending)}"
|
| 565 |
+
raise TypeError(msg)
|
| 566 |
+
|
| 567 |
+
if descending:
|
| 568 |
+
return self._native_series.is_monotonic_decreasing # type: ignore[no-any-return]
|
| 569 |
+
else:
|
| 570 |
+
return self._native_series.is_monotonic_increasing # type: ignore[no-any-return]
|
| 571 |
+
|
| 572 |
+
def value_counts(
|
| 573 |
+
self: Self,
|
| 574 |
+
*,
|
| 575 |
+
sort: bool = False,
|
| 576 |
+
parallel: bool = False,
|
| 577 |
+
name: str | None = None,
|
| 578 |
+
normalize: bool = False,
|
| 579 |
+
) -> PandasLikeDataFrame:
|
| 580 |
+
"""Parallel is unused, exists for compatibility"""
|
| 581 |
+
from narwhals._pandas_like.dataframe import PandasLikeDataFrame
|
| 582 |
+
|
| 583 |
+
index_name_ = "index" if self._name is None else self._name
|
| 584 |
+
value_name_ = name or ("proportion" if normalize else "count")
|
| 585 |
+
|
| 586 |
+
val_count = self._native_series.value_counts(
|
| 587 |
+
dropna=False,
|
| 588 |
+
sort=False,
|
| 589 |
+
normalize=normalize,
|
| 590 |
+
).reset_index()
|
| 591 |
+
|
| 592 |
+
val_count.columns = [index_name_, value_name_]
|
| 593 |
+
|
| 594 |
+
if sort:
|
| 595 |
+
val_count = val_count.sort_values(value_name_, ascending=False)
|
| 596 |
+
|
| 597 |
+
return PandasLikeDataFrame(
|
| 598 |
+
val_count,
|
| 599 |
+
implementation=self._implementation,
|
| 600 |
+
backend_version=self._backend_version,
|
| 601 |
+
)
|
| 602 |
+
|
| 603 |
+
def quantile(
|
| 604 |
+
self: Self,
|
| 605 |
+
quantile: float,
|
| 606 |
+
interpolation: Literal["nearest", "higher", "lower", "midpoint", "linear"],
|
| 607 |
+
) -> Any:
|
| 608 |
+
return self._native_series.quantile(q=quantile, interpolation=interpolation)
|
| 609 |
+
|
| 610 |
+
def zip_with(self: Self, mask: Any, other: Any) -> PandasLikeSeries:
|
| 611 |
+
ser = self._native_series
|
| 612 |
+
mask = validate_column_comparand(ser.index, mask)
|
| 613 |
+
other = validate_column_comparand(ser.index, other)
|
| 614 |
+
res = ser.where(mask, other)
|
| 615 |
+
return self._from_native_series(res)
|
| 616 |
+
|
| 617 |
+
def head(self: Self, n: int) -> Self:
|
| 618 |
+
return self._from_native_series(self._native_series.head(n))
|
| 619 |
+
|
| 620 |
+
def tail(self: Self, n: int) -> Self:
|
| 621 |
+
return self._from_native_series(self._native_series.tail(n))
|
| 622 |
+
|
| 623 |
+
def round(self: Self, decimals: int) -> Self:
|
| 624 |
+
return self._from_native_series(self._native_series.round(decimals=decimals))
|
| 625 |
+
|
| 626 |
+
def to_dummies(
|
| 627 |
+
self: Self, *, separator: str = "_", drop_first: bool = False
|
| 628 |
+
) -> PandasLikeDataFrame:
|
| 629 |
+
from narwhals._pandas_like.dataframe import PandasLikeDataFrame
|
| 630 |
+
|
| 631 |
+
plx = self.__native_namespace__()
|
| 632 |
+
series = self._native_series
|
| 633 |
+
name = str(self._name) if self._name else ""
|
| 634 |
+
return PandasLikeDataFrame(
|
| 635 |
+
plx.get_dummies(
|
| 636 |
+
series,
|
| 637 |
+
prefix=name,
|
| 638 |
+
prefix_sep=separator,
|
| 639 |
+
drop_first=drop_first,
|
| 640 |
+
).astype(int),
|
| 641 |
+
implementation=self._implementation,
|
| 642 |
+
backend_version=self._backend_version,
|
| 643 |
+
)
|
| 644 |
+
|
| 645 |
+
def gather_every(self: Self, n: int, offset: int = 0) -> Self:
|
| 646 |
+
return self._from_native_series(self._native_series.iloc[offset::n])
|
| 647 |
+
|
| 648 |
+
def clip(
|
| 649 |
+
self: Self, lower_bound: Any | None = None, upper_bound: Any | None = None
|
| 650 |
+
) -> Self:
|
| 651 |
+
return self._from_native_series(
|
| 652 |
+
self._native_series.clip(lower_bound, upper_bound)
|
| 653 |
+
)
|
| 654 |
+
|
| 655 |
+
def to_arrow(self: Self) -> Any:
|
| 656 |
+
if self._implementation is Implementation.CUDF: # pragma: no cover
|
| 657 |
+
return self._native_series.to_arrow()
|
| 658 |
+
|
| 659 |
+
import pyarrow as pa # ignore-banned-import()
|
| 660 |
+
|
| 661 |
+
return pa.Array.from_pandas(self._native_series)
|
| 662 |
+
|
| 663 |
+
def mode(self: Self) -> Self:
|
| 664 |
+
native_series = self._native_series
|
| 665 |
+
result = native_series.mode()
|
| 666 |
+
result.name = native_series.name
|
| 667 |
+
return self._from_native_series(result)
|
| 668 |
+
|
| 669 |
+
def __iter__(self: Self) -> Iterator[Any]:
|
| 670 |
+
yield from self._native_series.__iter__()
|
| 671 |
+
|
| 672 |
+
@property
|
| 673 |
+
def str(self) -> PandasLikeSeriesStringNamespace:
|
| 674 |
+
return PandasLikeSeriesStringNamespace(self)
|
| 675 |
+
|
| 676 |
+
@property
|
| 677 |
+
def dt(self) -> PandasLikeSeriesDateTimeNamespace:
|
| 678 |
+
return PandasLikeSeriesDateTimeNamespace(self)
|
| 679 |
+
|
| 680 |
+
@property
|
| 681 |
+
def cat(self) -> PandasLikeSeriesCatNamespace:
|
| 682 |
+
return PandasLikeSeriesCatNamespace(self)
|
| 683 |
+
|
| 684 |
+
|
| 685 |
+
class PandasLikeSeriesCatNamespace:
|
| 686 |
+
def __init__(self, series: PandasLikeSeries) -> None:
|
| 687 |
+
self._pandas_series = series
|
| 688 |
+
|
| 689 |
+
def get_categories(self) -> PandasLikeSeries:
|
| 690 |
+
s = self._pandas_series._native_series
|
| 691 |
+
return self._pandas_series._from_native_series(
|
| 692 |
+
s.__class__(s.cat.categories, name=s.name)
|
| 693 |
+
)
|
| 694 |
+
|
| 695 |
+
|
| 696 |
+
class PandasLikeSeriesStringNamespace:
|
| 697 |
+
def __init__(self, series: PandasLikeSeries) -> None:
|
| 698 |
+
self._pandas_series = series
|
| 699 |
+
|
| 700 |
+
def len_chars(self) -> PandasLikeSeries:
|
| 701 |
+
return self._pandas_series._from_native_series(
|
| 702 |
+
self._pandas_series._native_series.str.len()
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
def replace(
|
| 706 |
+
self, pattern: str, value: str, *, literal: bool = False, n: int = 1
|
| 707 |
+
) -> PandasLikeSeries:
|
| 708 |
+
return self._pandas_series._from_native_series(
|
| 709 |
+
self._pandas_series._native_series.str.replace(
|
| 710 |
+
pat=pattern, repl=value, n=n, regex=not literal
|
| 711 |
+
),
|
| 712 |
+
)
|
| 713 |
+
|
| 714 |
+
def replace_all(
|
| 715 |
+
self, pattern: str, value: str, *, literal: bool = False
|
| 716 |
+
) -> PandasLikeSeries:
|
| 717 |
+
return self.replace(pattern, value, literal=literal, n=-1)
|
| 718 |
+
|
| 719 |
+
def strip_chars(self, characters: str | None) -> PandasLikeSeries:
|
| 720 |
+
return self._pandas_series._from_native_series(
|
| 721 |
+
self._pandas_series._native_series.str.strip(characters),
|
| 722 |
+
)
|
| 723 |
+
|
| 724 |
+
def starts_with(self, prefix: str) -> PandasLikeSeries:
|
| 725 |
+
return self._pandas_series._from_native_series(
|
| 726 |
+
self._pandas_series._native_series.str.startswith(prefix),
|
| 727 |
+
)
|
| 728 |
+
|
| 729 |
+
def ends_with(self, suffix: str) -> PandasLikeSeries:
|
| 730 |
+
return self._pandas_series._from_native_series(
|
| 731 |
+
self._pandas_series._native_series.str.endswith(suffix),
|
| 732 |
+
)
|
| 733 |
+
|
| 734 |
+
def contains(self, pattern: str, *, literal: bool = False) -> PandasLikeSeries:
|
| 735 |
+
return self._pandas_series._from_native_series(
|
| 736 |
+
self._pandas_series._native_series.str.contains(
|
| 737 |
+
pat=pattern, regex=not literal
|
| 738 |
+
)
|
| 739 |
+
)
|
| 740 |
+
|
| 741 |
+
def slice(self, offset: int, length: int | None = None) -> PandasLikeSeries:
|
| 742 |
+
stop = offset + length if length else None
|
| 743 |
+
return self._pandas_series._from_native_series(
|
| 744 |
+
self._pandas_series._native_series.str.slice(start=offset, stop=stop),
|
| 745 |
+
)
|
| 746 |
+
|
| 747 |
+
def to_datetime(self, format: str | None = None) -> PandasLikeSeries: # noqa: A002
|
| 748 |
+
return self._pandas_series._from_native_series(
|
| 749 |
+
to_datetime(self._pandas_series._implementation)(
|
| 750 |
+
self._pandas_series._native_series, format=format
|
| 751 |
+
)
|
| 752 |
+
)
|
| 753 |
+
|
| 754 |
+
def to_uppercase(self) -> PandasLikeSeries:
|
| 755 |
+
return self._pandas_series._from_native_series(
|
| 756 |
+
self._pandas_series._native_series.str.upper(),
|
| 757 |
+
)
|
| 758 |
+
|
| 759 |
+
def to_lowercase(self) -> PandasLikeSeries:
|
| 760 |
+
return self._pandas_series._from_native_series(
|
| 761 |
+
self._pandas_series._native_series.str.lower(),
|
| 762 |
+
)
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
class PandasLikeSeriesDateTimeNamespace:
|
| 766 |
+
def __init__(self, series: PandasLikeSeries) -> None:
|
| 767 |
+
self._pandas_series = series
|
| 768 |
+
|
| 769 |
+
def date(self) -> PandasLikeSeries:
|
| 770 |
+
result = self._pandas_series._from_native_series(
|
| 771 |
+
self._pandas_series._native_series.dt.date,
|
| 772 |
+
)
|
| 773 |
+
if str(result.dtype).lower() == "object":
|
| 774 |
+
msg = (
|
| 775 |
+
"Accessing `date` on the default pandas backend "
|
| 776 |
+
"will return a Series of type `object`."
|
| 777 |
+
"\nThis differs from polars API and will prevent `.dt` chaining. "
|
| 778 |
+
"Please switch to the `pyarrow` backend:"
|
| 779 |
+
'\ndf.convert_dtypes(dtype_backend="pyarrow")'
|
| 780 |
+
)
|
| 781 |
+
raise NotImplementedError(msg)
|
| 782 |
+
return result
|
| 783 |
+
|
| 784 |
+
def year(self) -> PandasLikeSeries:
|
| 785 |
+
return self._pandas_series._from_native_series(
|
| 786 |
+
self._pandas_series._native_series.dt.year,
|
| 787 |
+
)
|
| 788 |
+
|
| 789 |
+
def month(self) -> PandasLikeSeries:
|
| 790 |
+
return self._pandas_series._from_native_series(
|
| 791 |
+
self._pandas_series._native_series.dt.month,
|
| 792 |
+
)
|
| 793 |
+
|
| 794 |
+
def day(self) -> PandasLikeSeries:
|
| 795 |
+
return self._pandas_series._from_native_series(
|
| 796 |
+
self._pandas_series._native_series.dt.day,
|
| 797 |
+
)
|
| 798 |
+
|
| 799 |
+
def hour(self) -> PandasLikeSeries:
|
| 800 |
+
return self._pandas_series._from_native_series(
|
| 801 |
+
self._pandas_series._native_series.dt.hour,
|
| 802 |
+
)
|
| 803 |
+
|
| 804 |
+
def minute(self) -> PandasLikeSeries:
|
| 805 |
+
return self._pandas_series._from_native_series(
|
| 806 |
+
self._pandas_series._native_series.dt.minute,
|
| 807 |
+
)
|
| 808 |
+
|
| 809 |
+
def second(self) -> PandasLikeSeries:
|
| 810 |
+
return self._pandas_series._from_native_series(
|
| 811 |
+
self._pandas_series._native_series.dt.second,
|
| 812 |
+
)
|
| 813 |
+
|
| 814 |
+
def millisecond(self) -> PandasLikeSeries:
|
| 815 |
+
return self.microsecond() // 1000
|
| 816 |
+
|
| 817 |
+
def microsecond(self) -> PandasLikeSeries:
|
| 818 |
+
if self._pandas_series._backend_version < (3, 0, 0) and "pyarrow" in str(
|
| 819 |
+
self._pandas_series._native_series.dtype
|
| 820 |
+
):
|
| 821 |
+
# crazy workaround for https://github.com/pandas-dev/pandas/issues/59154
|
| 822 |
+
import pyarrow.compute as pc # ignore-banned-import()
|
| 823 |
+
|
| 824 |
+
native_series = self._pandas_series._native_series
|
| 825 |
+
arr = native_series.array.__arrow_array__()
|
| 826 |
+
result_arr = pc.add(
|
| 827 |
+
pc.multiply(pc.millisecond(arr), 1000), pc.microsecond(arr)
|
| 828 |
+
)
|
| 829 |
+
result = native_series.__class__(
|
| 830 |
+
native_series.array.__class__(result_arr), name=native_series.name
|
| 831 |
+
)
|
| 832 |
+
return self._pandas_series._from_native_series(result)
|
| 833 |
+
|
| 834 |
+
return self._pandas_series._from_native_series(
|
| 835 |
+
self._pandas_series._native_series.dt.microsecond
|
| 836 |
+
)
|
| 837 |
+
|
| 838 |
+
def nanosecond(self) -> PandasLikeSeries:
|
| 839 |
+
return ( # type: ignore[no-any-return]
|
| 840 |
+
self.microsecond() * 1_000 + self._pandas_series._native_series.dt.nanosecond
|
| 841 |
+
)
|
| 842 |
+
|
| 843 |
+
def ordinal_day(self) -> PandasLikeSeries:
|
| 844 |
+
ser = self._pandas_series._native_series
|
| 845 |
+
year_start = ser.dt.year
|
| 846 |
+
result = (
|
| 847 |
+
ser.to_numpy().astype("datetime64[D]")
|
| 848 |
+
- (year_start.to_numpy() - 1970).astype("datetime64[Y]")
|
| 849 |
+
).astype("int32") + 1
|
| 850 |
+
dtype = "Int64[pyarrow]" if "pyarrow" in str(ser.dtype) else "int32"
|
| 851 |
+
return self._pandas_series._from_native_series(
|
| 852 |
+
self._pandas_series._native_series.__class__(
|
| 853 |
+
result, dtype=dtype, name=year_start.name
|
| 854 |
+
)
|
| 855 |
+
)
|
| 856 |
+
|
| 857 |
+
def total_minutes(self) -> PandasLikeSeries:
|
| 858 |
+
s = self._pandas_series._native_series.dt.total_seconds()
|
| 859 |
+
s_sign = (
|
| 860 |
+
2 * (s > 0).astype(int_dtype_mapper(s.dtype)) - 1
|
| 861 |
+
) # this calculates the sign of each series element
|
| 862 |
+
s_abs = s.abs() // 60
|
| 863 |
+
if ~s.isna().any():
|
| 864 |
+
s_abs = s_abs.astype(int_dtype_mapper(s.dtype))
|
| 865 |
+
return self._pandas_series._from_native_series(s_abs * s_sign)
|
| 866 |
+
|
| 867 |
+
def total_seconds(self) -> PandasLikeSeries:
|
| 868 |
+
s = self._pandas_series._native_series.dt.total_seconds()
|
| 869 |
+
s_sign = (
|
| 870 |
+
2 * (s > 0).astype(int_dtype_mapper(s.dtype)) - 1
|
| 871 |
+
) # this calculates the sign of each series element
|
| 872 |
+
s_abs = s.abs() // 1
|
| 873 |
+
if ~s.isna().any():
|
| 874 |
+
s_abs = s_abs.astype(int_dtype_mapper(s.dtype))
|
| 875 |
+
return self._pandas_series._from_native_series(s_abs * s_sign)
|
| 876 |
+
|
| 877 |
+
def total_milliseconds(self) -> PandasLikeSeries:
|
| 878 |
+
s = self._pandas_series._native_series.dt.total_seconds() * 1e3
|
| 879 |
+
s_sign = (
|
| 880 |
+
2 * (s > 0).astype(int_dtype_mapper(s.dtype)) - 1
|
| 881 |
+
) # this calculates the sign of each series element
|
| 882 |
+
s_abs = s.abs() // 1
|
| 883 |
+
if ~s.isna().any():
|
| 884 |
+
s_abs = s_abs.astype(int_dtype_mapper(s.dtype))
|
| 885 |
+
return self._pandas_series._from_native_series(s_abs * s_sign)
|
| 886 |
+
|
| 887 |
+
def total_microseconds(self) -> PandasLikeSeries:
|
| 888 |
+
s = self._pandas_series._native_series.dt.total_seconds() * 1e6
|
| 889 |
+
s_sign = (
|
| 890 |
+
2 * (s > 0).astype(int_dtype_mapper(s.dtype)) - 1
|
| 891 |
+
) # this calculates the sign of each series element
|
| 892 |
+
s_abs = s.abs() // 1
|
| 893 |
+
if ~s.isna().any():
|
| 894 |
+
s_abs = s_abs.astype(int_dtype_mapper(s.dtype))
|
| 895 |
+
return self._pandas_series._from_native_series(s_abs * s_sign)
|
| 896 |
+
|
| 897 |
+
def total_nanoseconds(self) -> PandasLikeSeries:
|
| 898 |
+
s = self._pandas_series._native_series.dt.total_seconds() * 1e9
|
| 899 |
+
s_sign = (
|
| 900 |
+
2 * (s > 0).astype(int_dtype_mapper(s.dtype)) - 1
|
| 901 |
+
) # this calculates the sign of each series element
|
| 902 |
+
s_abs = s.abs() // 1
|
| 903 |
+
if ~s.isna().any():
|
| 904 |
+
s_abs = s_abs.astype(int_dtype_mapper(s.dtype))
|
| 905 |
+
return self._pandas_series._from_native_series(s_abs * s_sign)
|
| 906 |
+
|
| 907 |
+
def to_string(self, format: str) -> PandasLikeSeries: # noqa: A002
|
| 908 |
+
# Polars' parser treats `'%.f'` as pandas does `'.%f'`
|
| 909 |
+
# PyArrow interprets `'%S'` as "seconds, plus fractional seconds"
|
| 910 |
+
# and doesn't support `%f`
|
| 911 |
+
if "pyarrow" not in str(self._pandas_series._native_series.dtype):
|
| 912 |
+
format = format.replace("%S%.f", "%S.%f")
|
| 913 |
+
else:
|
| 914 |
+
format = format.replace("%S.%f", "%S").replace("%S%.f", "%S")
|
| 915 |
+
return self._pandas_series._from_native_series(
|
| 916 |
+
self._pandas_series._native_series.dt.strftime(format)
|
| 917 |
+
)
|
parrot/lib/python3.10/site-packages/narwhals/_pandas_like/typing.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations # pragma: no cover
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING # pragma: no cover
|
| 4 |
+
from typing import Union # pragma: no cover
|
| 5 |
+
|
| 6 |
+
if TYPE_CHECKING:
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
if sys.version_info >= (3, 10):
|
| 10 |
+
from typing import TypeAlias
|
| 11 |
+
else:
|
| 12 |
+
from typing_extensions import TypeAlias
|
| 13 |
+
|
| 14 |
+
from narwhals._pandas_like.expr import PandasLikeExpr
|
| 15 |
+
from narwhals._pandas_like.series import PandasLikeSeries
|
| 16 |
+
|
| 17 |
+
IntoPandasLikeExpr: TypeAlias = Union[PandasLikeExpr, str, PandasLikeSeries]
|
parrot/lib/python3.10/site-packages/narwhals/_polars/__pycache__/series.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/narwhals/_polars/dataframe.py
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING
|
| 4 |
+
from typing import Any
|
| 5 |
+
|
| 6 |
+
from narwhals._polars.namespace import PolarsNamespace
|
| 7 |
+
from narwhals._polars.utils import convert_str_slice_to_int_slice
|
| 8 |
+
from narwhals._polars.utils import extract_args_kwargs
|
| 9 |
+
from narwhals._polars.utils import translate_dtype
|
| 10 |
+
from narwhals.dependencies import get_polars
|
| 11 |
+
from narwhals.utils import Implementation
|
| 12 |
+
from narwhals.utils import is_sequence_but_not_str
|
| 13 |
+
from narwhals.utils import parse_columns_to_drop
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
import numpy as np
|
| 17 |
+
from typing_extensions import Self
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class PolarsDataFrame:
|
| 21 |
+
def __init__(self, df: Any, *, backend_version: tuple[int, ...]) -> None:
|
| 22 |
+
self._native_frame = df
|
| 23 |
+
self._implementation = Implementation.POLARS
|
| 24 |
+
self._backend_version = backend_version
|
| 25 |
+
|
| 26 |
+
def __repr__(self) -> str: # pragma: no cover
|
| 27 |
+
return "PolarsDataFrame"
|
| 28 |
+
|
| 29 |
+
def __narwhals_dataframe__(self) -> Self:
|
| 30 |
+
return self
|
| 31 |
+
|
| 32 |
+
def __narwhals_namespace__(self) -> PolarsNamespace:
|
| 33 |
+
return PolarsNamespace(backend_version=self._backend_version)
|
| 34 |
+
|
| 35 |
+
def __native_namespace__(self) -> Any:
|
| 36 |
+
return get_polars()
|
| 37 |
+
|
| 38 |
+
def _from_native_frame(self, df: Any) -> Self:
|
| 39 |
+
return self.__class__(df, backend_version=self._backend_version)
|
| 40 |
+
|
| 41 |
+
def _from_native_object(self, obj: Any) -> Any:
|
| 42 |
+
pl = get_polars()
|
| 43 |
+
if isinstance(obj, pl.Series):
|
| 44 |
+
from narwhals._polars.series import PolarsSeries
|
| 45 |
+
|
| 46 |
+
return PolarsSeries(obj, backend_version=self._backend_version)
|
| 47 |
+
if isinstance(obj, pl.DataFrame):
|
| 48 |
+
return self._from_native_frame(obj)
|
| 49 |
+
# scalar
|
| 50 |
+
return obj
|
| 51 |
+
|
| 52 |
+
def __getattr__(self, attr: str) -> Any:
|
| 53 |
+
if attr == "collect": # pragma: no cover
|
| 54 |
+
raise AttributeError
|
| 55 |
+
|
| 56 |
+
def func(*args: Any, **kwargs: Any) -> Any:
|
| 57 |
+
args, kwargs = extract_args_kwargs(args, kwargs) # type: ignore[assignment]
|
| 58 |
+
return self._from_native_object(
|
| 59 |
+
getattr(self._native_frame, attr)(*args, **kwargs)
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
return func
|
| 63 |
+
|
| 64 |
+
def __array__(self, dtype: Any | None = None, copy: bool | None = None) -> np.ndarray:
|
| 65 |
+
if self._backend_version < (0, 20, 28) and copy is not None: # pragma: no cover
|
| 66 |
+
msg = "`copy` in `__array__` is only supported for Polars>=0.20.28"
|
| 67 |
+
raise NotImplementedError(msg)
|
| 68 |
+
if self._backend_version < (0, 20, 28): # pragma: no cover
|
| 69 |
+
return self._native_frame.__array__(dtype)
|
| 70 |
+
return self._native_frame.__array__(dtype)
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
def schema(self) -> dict[str, Any]:
|
| 74 |
+
schema = self._native_frame.schema
|
| 75 |
+
return {name: translate_dtype(dtype) for name, dtype in schema.items()}
|
| 76 |
+
|
| 77 |
+
def collect_schema(self) -> dict[str, Any]:
|
| 78 |
+
if self._backend_version < (1,): # pragma: no cover
|
| 79 |
+
schema = self._native_frame.schema
|
| 80 |
+
else:
|
| 81 |
+
schema = dict(self._native_frame.collect_schema())
|
| 82 |
+
return {name: translate_dtype(dtype) for name, dtype in schema.items()}
|
| 83 |
+
|
| 84 |
+
@property
|
| 85 |
+
def shape(self) -> tuple[int, int]:
|
| 86 |
+
return self._native_frame.shape # type: ignore[no-any-return]
|
| 87 |
+
|
| 88 |
+
def __getitem__(self, item: Any) -> Any:
|
| 89 |
+
if self._backend_version > (0, 20, 30):
|
| 90 |
+
return self._from_native_object(self._native_frame.__getitem__(item))
|
| 91 |
+
else: # pragma: no cover
|
| 92 |
+
# TODO(marco): we can delete this branch after Polars==0.20.30 becomes the minimum
|
| 93 |
+
# Polars version we support
|
| 94 |
+
if isinstance(item, tuple):
|
| 95 |
+
item = tuple(list(i) if is_sequence_but_not_str(i) else i for i in item)
|
| 96 |
+
|
| 97 |
+
columns = self.columns
|
| 98 |
+
if isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], slice):
|
| 99 |
+
if isinstance(item[1].start, str) or isinstance(item[1].stop, str):
|
| 100 |
+
start, stop, step = convert_str_slice_to_int_slice(item[1], columns)
|
| 101 |
+
return self._from_native_frame(
|
| 102 |
+
self._native_frame.select(columns[start:stop:step]).__getitem__(
|
| 103 |
+
item[0]
|
| 104 |
+
)
|
| 105 |
+
)
|
| 106 |
+
if isinstance(item[1].start, int) or isinstance(item[1].stop, int):
|
| 107 |
+
return self._from_native_frame(
|
| 108 |
+
self._native_frame.select(
|
| 109 |
+
columns[item[1].start : item[1].stop : item[1].step]
|
| 110 |
+
).__getitem__(item[0])
|
| 111 |
+
)
|
| 112 |
+
msg = f"Expected slice of integers or strings, got: {type(item[1])}" # pragma: no cover
|
| 113 |
+
raise TypeError(msg) # pragma: no cover
|
| 114 |
+
pl = get_polars()
|
| 115 |
+
if (
|
| 116 |
+
isinstance(item, tuple)
|
| 117 |
+
and (len(item) == 2)
|
| 118 |
+
and is_sequence_but_not_str(item[1])
|
| 119 |
+
and (len(item[1]) == 0)
|
| 120 |
+
):
|
| 121 |
+
result = self._native_frame.select(item[1])
|
| 122 |
+
elif isinstance(item, slice) and (
|
| 123 |
+
isinstance(item.start, str) or isinstance(item.stop, str)
|
| 124 |
+
):
|
| 125 |
+
start, stop, step = convert_str_slice_to_int_slice(item, columns)
|
| 126 |
+
return self._from_native_frame(
|
| 127 |
+
self._native_frame.select(columns[start:stop:step])
|
| 128 |
+
)
|
| 129 |
+
elif is_sequence_but_not_str(item) and (len(item) == 0):
|
| 130 |
+
result = self._native_frame.slice(0, 0)
|
| 131 |
+
else:
|
| 132 |
+
result = self._native_frame.__getitem__(item)
|
| 133 |
+
if isinstance(result, pl.Series):
|
| 134 |
+
from narwhals._polars.series import PolarsSeries
|
| 135 |
+
|
| 136 |
+
return PolarsSeries(result, backend_version=self._backend_version)
|
| 137 |
+
return self._from_native_object(result)
|
| 138 |
+
|
| 139 |
+
def get_column(self, name: str) -> Any:
|
| 140 |
+
from narwhals._polars.series import PolarsSeries
|
| 141 |
+
|
| 142 |
+
return PolarsSeries(
|
| 143 |
+
self._native_frame.get_column(name), backend_version=self._backend_version
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
def is_empty(self) -> bool:
|
| 147 |
+
return len(self._native_frame) == 0
|
| 148 |
+
|
| 149 |
+
@property
|
| 150 |
+
def columns(self) -> list[str]:
|
| 151 |
+
return self._native_frame.columns # type: ignore[no-any-return]
|
| 152 |
+
|
| 153 |
+
def lazy(self) -> PolarsLazyFrame:
|
| 154 |
+
return PolarsLazyFrame(
|
| 155 |
+
self._native_frame.lazy(), backend_version=self._backend_version
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
def to_dict(self, *, as_series: bool) -> Any:
|
| 159 |
+
df = self._native_frame
|
| 160 |
+
|
| 161 |
+
if as_series:
|
| 162 |
+
from narwhals._polars.series import PolarsSeries
|
| 163 |
+
|
| 164 |
+
return {
|
| 165 |
+
name: PolarsSeries(col, backend_version=self._backend_version)
|
| 166 |
+
for name, col in df.to_dict(as_series=True).items()
|
| 167 |
+
}
|
| 168 |
+
else:
|
| 169 |
+
return df.to_dict(as_series=False)
|
| 170 |
+
|
| 171 |
+
def group_by(self, *by: str) -> Any:
|
| 172 |
+
from narwhals._polars.group_by import PolarsGroupBy
|
| 173 |
+
|
| 174 |
+
return PolarsGroupBy(self, list(by))
|
| 175 |
+
|
| 176 |
+
def with_row_index(self, name: str) -> Any:
|
| 177 |
+
if self._backend_version < (0, 20, 4): # pragma: no cover
|
| 178 |
+
return self._from_native_frame(self._native_frame.with_row_count(name))
|
| 179 |
+
return self._from_native_frame(self._native_frame.with_row_index(name))
|
| 180 |
+
|
| 181 |
+
def drop(self: Self, columns: list[str], strict: bool) -> Self: # noqa: FBT001
|
| 182 |
+
if self._backend_version < (1, 0, 0): # pragma: no cover
|
| 183 |
+
to_drop = parse_columns_to_drop(
|
| 184 |
+
compliant_frame=self, columns=columns, strict=strict
|
| 185 |
+
)
|
| 186 |
+
return self._from_native_frame(self._native_frame.drop(to_drop))
|
| 187 |
+
return self._from_native_frame(self._native_frame.drop(columns, strict=strict))
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
class PolarsLazyFrame:
|
| 191 |
+
def __init__(self, df: Any, *, backend_version: tuple[int, ...]) -> None:
|
| 192 |
+
self._native_frame = df
|
| 193 |
+
self._backend_version = backend_version
|
| 194 |
+
|
| 195 |
+
def __repr__(self) -> str: # pragma: no cover
|
| 196 |
+
return "PolarsLazyFrame"
|
| 197 |
+
|
| 198 |
+
def __narwhals_lazyframe__(self) -> Self:
|
| 199 |
+
return self
|
| 200 |
+
|
| 201 |
+
def __narwhals_namespace__(self) -> PolarsNamespace:
|
| 202 |
+
return PolarsNamespace(backend_version=self._backend_version)
|
| 203 |
+
|
| 204 |
+
def __native_namespace__(self) -> Any: # pragma: no cover
|
| 205 |
+
return get_polars()
|
| 206 |
+
|
| 207 |
+
def _from_native_frame(self, df: Any) -> Self:
|
| 208 |
+
return self.__class__(df, backend_version=self._backend_version)
|
| 209 |
+
|
| 210 |
+
def __getattr__(self, attr: str) -> Any:
|
| 211 |
+
def func(*args: Any, **kwargs: Any) -> Any:
|
| 212 |
+
args, kwargs = extract_args_kwargs(args, kwargs) # type: ignore[assignment]
|
| 213 |
+
return self._from_native_frame(
|
| 214 |
+
getattr(self._native_frame, attr)(*args, **kwargs)
|
| 215 |
+
)
|
| 216 |
+
|
| 217 |
+
return func
|
| 218 |
+
|
| 219 |
+
@property
|
| 220 |
+
def columns(self) -> list[str]:
|
| 221 |
+
return self._native_frame.columns # type: ignore[no-any-return]
|
| 222 |
+
|
| 223 |
+
@property
|
| 224 |
+
def schema(self) -> dict[str, Any]:
|
| 225 |
+
schema = self._native_frame.schema
|
| 226 |
+
return {name: translate_dtype(dtype) for name, dtype in schema.items()}
|
| 227 |
+
|
| 228 |
+
def collect_schema(self) -> dict[str, Any]:
|
| 229 |
+
if self._backend_version < (1,): # pragma: no cover
|
| 230 |
+
schema = self._native_frame.schema
|
| 231 |
+
else:
|
| 232 |
+
schema = dict(self._native_frame.collect_schema())
|
| 233 |
+
return {name: translate_dtype(dtype) for name, dtype in schema.items()}
|
| 234 |
+
|
| 235 |
+
def collect(self) -> PolarsDataFrame:
|
| 236 |
+
return PolarsDataFrame(
|
| 237 |
+
self._native_frame.collect(), backend_version=self._backend_version
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
def group_by(self, *by: str) -> Any:
|
| 241 |
+
from narwhals._polars.group_by import PolarsLazyGroupBy
|
| 242 |
+
|
| 243 |
+
return PolarsLazyGroupBy(self, list(by))
|
| 244 |
+
|
| 245 |
+
def with_row_index(self, name: str) -> Any:
|
| 246 |
+
if self._backend_version < (0, 20, 4): # pragma: no cover
|
| 247 |
+
return self._from_native_frame(self._native_frame.with_row_count(name))
|
| 248 |
+
return self._from_native_frame(self._native_frame.with_row_index(name))
|
| 249 |
+
|
| 250 |
+
def drop(self: Self, columns: list[str], strict: bool) -> Self: # noqa: FBT001
|
| 251 |
+
if self._backend_version < (1, 0, 0): # pragma: no cover
|
| 252 |
+
return self._from_native_frame(self._native_frame.drop(columns))
|
| 253 |
+
return self._from_native_frame(self._native_frame.drop(columns, strict=strict))
|
parrot/lib/python3.10/site-packages/narwhals/stable/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from narwhals.stable import v1
|
| 2 |
+
|
| 3 |
+
__all__ = ["v1"]
|
parrot/lib/python3.10/site-packages/narwhals/stable/__pycache__/v1.cpython-310.pyc
ADDED
|
Binary file (58.8 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/scipy/sparse/tests/data/csc_py2.npz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:bac27f1a3eb1fdd102dae39b7dd61ce83e82f096388e344e14285071984d01fa
|
| 3 |
+
size 846
|
parrot/lib/python3.10/site-packages/scipy/sparse/tests/data/csc_py3.npz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6b1b84315c7077417e720512d086a5a6217c2875b818d27704ae9b7237c69dfe
|
| 3 |
+
size 851
|
videollama2/lib/python3.10/site-packages/contourpy/__init__.py
ADDED
|
@@ -0,0 +1,285 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING
|
| 4 |
+
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
from contourpy._contourpy import (
|
| 8 |
+
ContourGenerator,
|
| 9 |
+
FillType,
|
| 10 |
+
LineType,
|
| 11 |
+
Mpl2005ContourGenerator,
|
| 12 |
+
Mpl2014ContourGenerator,
|
| 13 |
+
SerialContourGenerator,
|
| 14 |
+
ThreadedContourGenerator,
|
| 15 |
+
ZInterp,
|
| 16 |
+
max_threads,
|
| 17 |
+
)
|
| 18 |
+
from contourpy._version import __version__
|
| 19 |
+
from contourpy.chunk import calc_chunk_sizes
|
| 20 |
+
from contourpy.convert import (
|
| 21 |
+
convert_filled,
|
| 22 |
+
convert_lines,
|
| 23 |
+
convert_multi_filled,
|
| 24 |
+
convert_multi_lines,
|
| 25 |
+
)
|
| 26 |
+
from contourpy.dechunk import (
|
| 27 |
+
dechunk_filled,
|
| 28 |
+
dechunk_lines,
|
| 29 |
+
dechunk_multi_filled,
|
| 30 |
+
dechunk_multi_lines,
|
| 31 |
+
)
|
| 32 |
+
from contourpy.enum_util import as_fill_type, as_line_type, as_z_interp
|
| 33 |
+
|
| 34 |
+
if TYPE_CHECKING:
|
| 35 |
+
from typing import Any
|
| 36 |
+
|
| 37 |
+
from numpy.typing import ArrayLike
|
| 38 |
+
|
| 39 |
+
from ._contourpy import CoordinateArray, MaskArray
|
| 40 |
+
|
| 41 |
+
__all__ = [
|
| 42 |
+
"__version__",
|
| 43 |
+
"contour_generator",
|
| 44 |
+
"convert_filled",
|
| 45 |
+
"convert_lines",
|
| 46 |
+
"convert_multi_filled",
|
| 47 |
+
"convert_multi_lines",
|
| 48 |
+
"dechunk_filled",
|
| 49 |
+
"dechunk_lines",
|
| 50 |
+
"dechunk_multi_filled",
|
| 51 |
+
"dechunk_multi_lines",
|
| 52 |
+
"max_threads",
|
| 53 |
+
"FillType",
|
| 54 |
+
"LineType",
|
| 55 |
+
"ContourGenerator",
|
| 56 |
+
"Mpl2005ContourGenerator",
|
| 57 |
+
"Mpl2014ContourGenerator",
|
| 58 |
+
"SerialContourGenerator",
|
| 59 |
+
"ThreadedContourGenerator",
|
| 60 |
+
"ZInterp",
|
| 61 |
+
]
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
# Simple mapping of algorithm name to class name.
|
| 65 |
+
_class_lookup: dict[str, type[ContourGenerator]] = {
|
| 66 |
+
"mpl2005": Mpl2005ContourGenerator,
|
| 67 |
+
"mpl2014": Mpl2014ContourGenerator,
|
| 68 |
+
"serial": SerialContourGenerator,
|
| 69 |
+
"threaded": ThreadedContourGenerator,
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def _remove_z_mask(
|
| 74 |
+
z: ArrayLike | np.ma.MaskedArray[Any, Any] | None,
|
| 75 |
+
) -> tuple[CoordinateArray, MaskArray | None]:
|
| 76 |
+
# Preserve mask if present.
|
| 77 |
+
z_array = np.ma.asarray(z, dtype=np.float64) # type: ignore[no-untyped-call]
|
| 78 |
+
z_masked = np.ma.masked_invalid(z_array, copy=False) # type: ignore[no-untyped-call]
|
| 79 |
+
|
| 80 |
+
if np.ma.is_masked(z_masked): # type: ignore[no-untyped-call]
|
| 81 |
+
mask = np.ma.getmask(z_masked) # type: ignore[no-untyped-call]
|
| 82 |
+
else:
|
| 83 |
+
mask = None
|
| 84 |
+
|
| 85 |
+
return np.ma.getdata(z_masked), mask # type: ignore[no-untyped-call]
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def contour_generator(
|
| 89 |
+
x: ArrayLike | None = None,
|
| 90 |
+
y: ArrayLike | None = None,
|
| 91 |
+
z: ArrayLike | np.ma.MaskedArray[Any, Any] | None = None,
|
| 92 |
+
*,
|
| 93 |
+
name: str = "serial",
|
| 94 |
+
corner_mask: bool | None = None,
|
| 95 |
+
line_type: LineType | str | None = None,
|
| 96 |
+
fill_type: FillType | str | None = None,
|
| 97 |
+
chunk_size: int | tuple[int, int] | None = None,
|
| 98 |
+
chunk_count: int | tuple[int, int] | None = None,
|
| 99 |
+
total_chunk_count: int | None = None,
|
| 100 |
+
quad_as_tri: bool = False,
|
| 101 |
+
z_interp: ZInterp | str | None = ZInterp.Linear,
|
| 102 |
+
thread_count: int = 0,
|
| 103 |
+
) -> ContourGenerator:
|
| 104 |
+
"""Create and return a :class:`~.ContourGenerator` object.
|
| 105 |
+
|
| 106 |
+
The class and properties of the returned :class:`~.ContourGenerator` are determined by the
|
| 107 |
+
function arguments, with sensible defaults.
|
| 108 |
+
|
| 109 |
+
Args:
|
| 110 |
+
x (array-like of shape (ny, nx) or (nx,), optional): The x-coordinates of the ``z`` values.
|
| 111 |
+
May be 2D with the same shape as ``z.shape``, or 1D with length ``nx = z.shape[1]``.
|
| 112 |
+
If not specified are assumed to be ``np.arange(nx)``. Must be ordered monotonically.
|
| 113 |
+
y (array-like of shape (ny, nx) or (ny,), optional): The y-coordinates of the ``z`` values.
|
| 114 |
+
May be 2D with the same shape as ``z.shape``, or 1D with length ``ny = z.shape[0]``.
|
| 115 |
+
If not specified are assumed to be ``np.arange(ny)``. Must be ordered monotonically.
|
| 116 |
+
z (array-like of shape (ny, nx), may be a masked array): The 2D gridded values to calculate
|
| 117 |
+
the contours of. May be a masked array, and any invalid values (``np.inf`` or
|
| 118 |
+
``np.nan``) will also be masked out.
|
| 119 |
+
name (str): Algorithm name, one of ``"serial"``, ``"threaded"``, ``"mpl2005"`` or
|
| 120 |
+
``"mpl2014"``, default ``"serial"``.
|
| 121 |
+
corner_mask (bool, optional): Enable/disable corner masking, which only has an effect if
|
| 122 |
+
``z`` is a masked array. If ``False``, any quad touching a masked point is masked out.
|
| 123 |
+
If ``True``, only the triangular corners of quads nearest these points are always masked
|
| 124 |
+
out, other triangular corners comprising three unmasked points are contoured as usual.
|
| 125 |
+
If not specified, uses the default provided by the algorithm ``name``.
|
| 126 |
+
line_type (LineType or str, optional): The format of contour line data returned from calls
|
| 127 |
+
to :meth:`~.ContourGenerator.lines`, specified either as a :class:`~.LineType` or its
|
| 128 |
+
string equivalent such as ``"SeparateCode"``.
|
| 129 |
+
If not specified, uses the default provided by the algorithm ``name``.
|
| 130 |
+
The relationship between the :class:`~.LineType` enum and the data format returned from
|
| 131 |
+
:meth:`~.ContourGenerator.lines` is explained at :ref:`line_type`.
|
| 132 |
+
fill_type (FillType or str, optional): The format of filled contour data returned from calls
|
| 133 |
+
to :meth:`~.ContourGenerator.filled`, specified either as a :class:`~.FillType` or its
|
| 134 |
+
string equivalent such as ``"OuterOffset"``.
|
| 135 |
+
If not specified, uses the default provided by the algorithm ``name``.
|
| 136 |
+
The relationship between the :class:`~.FillType` enum and the data format returned from
|
| 137 |
+
:meth:`~.ContourGenerator.filled` is explained at :ref:`fill_type`.
|
| 138 |
+
chunk_size (int or tuple(int, int), optional): Chunk size in (y, x) directions, or the same
|
| 139 |
+
size in both directions if only one value is specified.
|
| 140 |
+
chunk_count (int or tuple(int, int), optional): Chunk count in (y, x) directions, or the
|
| 141 |
+
same count in both directions if only one value is specified.
|
| 142 |
+
total_chunk_count (int, optional): Total number of chunks.
|
| 143 |
+
quad_as_tri (bool): Enable/disable treating quads as 4 triangles, default ``False``.
|
| 144 |
+
If ``False``, a contour line within a quad is a straight line between points on two of
|
| 145 |
+
its edges. If ``True``, each full quad is divided into 4 triangles using a virtual point
|
| 146 |
+
at the centre (mean x, y of the corner points) and a contour line is piecewise linear
|
| 147 |
+
within those triangles. Corner-masked triangles are not affected by this setting, only
|
| 148 |
+
full unmasked quads.
|
| 149 |
+
z_interp (ZInterp or str, optional): How to interpolate ``z`` values when determining where
|
| 150 |
+
contour lines intersect the edges of quads and the ``z`` values of the central points of
|
| 151 |
+
quads, specified either as a :class:`~contourpy.ZInterp` or its string equivalent such
|
| 152 |
+
as ``"Log"``. Default is ``ZInterp.Linear``.
|
| 153 |
+
thread_count (int): Number of threads to use for contour calculation, default 0. Threads can
|
| 154 |
+
only be used with an algorithm ``name`` that supports threads (currently only
|
| 155 |
+
``name="threaded"``) and there must be at least the same number of chunks as threads.
|
| 156 |
+
If ``thread_count=0`` and ``name="threaded"`` then it uses the maximum number of threads
|
| 157 |
+
as determined by the C++11 call ``std::thread::hardware_concurrency()``. If ``name`` is
|
| 158 |
+
something other than ``"threaded"`` then the ``thread_count`` will be set to ``1``.
|
| 159 |
+
|
| 160 |
+
Return:
|
| 161 |
+
:class:`~.ContourGenerator`.
|
| 162 |
+
|
| 163 |
+
Note:
|
| 164 |
+
A maximum of one of ``chunk_size``, ``chunk_count`` and ``total_chunk_count`` may be
|
| 165 |
+
specified.
|
| 166 |
+
|
| 167 |
+
Warning:
|
| 168 |
+
The ``name="mpl2005"`` algorithm does not implement chunking for contour lines.
|
| 169 |
+
"""
|
| 170 |
+
x = np.asarray(x, dtype=np.float64)
|
| 171 |
+
y = np.asarray(y, dtype=np.float64)
|
| 172 |
+
z, mask = _remove_z_mask(z)
|
| 173 |
+
|
| 174 |
+
# Check arguments: z.
|
| 175 |
+
if z.ndim != 2:
|
| 176 |
+
raise TypeError(f"Input z must be 2D, not {z.ndim}D")
|
| 177 |
+
|
| 178 |
+
if z.shape[0] < 2 or z.shape[1] < 2:
|
| 179 |
+
raise TypeError(f"Input z must be at least a (2, 2) shaped array, but has shape {z.shape}")
|
| 180 |
+
|
| 181 |
+
ny, nx = z.shape
|
| 182 |
+
|
| 183 |
+
# Check arguments: x and y.
|
| 184 |
+
if x.ndim != y.ndim:
|
| 185 |
+
raise TypeError(f"Number of dimensions of x ({x.ndim}) and y ({y.ndim}) do not match")
|
| 186 |
+
|
| 187 |
+
if x.ndim == 0:
|
| 188 |
+
x = np.arange(nx, dtype=np.float64)
|
| 189 |
+
y = np.arange(ny, dtype=np.float64)
|
| 190 |
+
x, y = np.meshgrid(x, y)
|
| 191 |
+
elif x.ndim == 1:
|
| 192 |
+
if len(x) != nx:
|
| 193 |
+
raise TypeError(f"Length of x ({len(x)}) must match number of columns in z ({nx})")
|
| 194 |
+
if len(y) != ny:
|
| 195 |
+
raise TypeError(f"Length of y ({len(y)}) must match number of rows in z ({ny})")
|
| 196 |
+
x, y = np.meshgrid(x, y)
|
| 197 |
+
elif x.ndim == 2:
|
| 198 |
+
if x.shape != z.shape:
|
| 199 |
+
raise TypeError(f"Shapes of x {x.shape} and z {z.shape} do not match")
|
| 200 |
+
if y.shape != z.shape:
|
| 201 |
+
raise TypeError(f"Shapes of y {y.shape} and z {z.shape} do not match")
|
| 202 |
+
else:
|
| 203 |
+
raise TypeError(f"Inputs x and y must be None, 1D or 2D, not {x.ndim}D")
|
| 204 |
+
|
| 205 |
+
# Check mask shape just in case.
|
| 206 |
+
if mask is not None and mask.shape != z.shape:
|
| 207 |
+
raise ValueError("If mask is set it must be a 2D array with the same shape as z")
|
| 208 |
+
|
| 209 |
+
# Check arguments: name.
|
| 210 |
+
if name not in _class_lookup:
|
| 211 |
+
raise ValueError(f"Unrecognised contour generator name: {name}")
|
| 212 |
+
|
| 213 |
+
# Check arguments: chunk_size, chunk_count and total_chunk_count.
|
| 214 |
+
y_chunk_size, x_chunk_size = calc_chunk_sizes(
|
| 215 |
+
chunk_size, chunk_count, total_chunk_count, ny, nx)
|
| 216 |
+
|
| 217 |
+
cls = _class_lookup[name]
|
| 218 |
+
|
| 219 |
+
# Check arguments: corner_mask.
|
| 220 |
+
if corner_mask is None:
|
| 221 |
+
# Set it to default, which is True if the algorithm supports it.
|
| 222 |
+
corner_mask = cls.supports_corner_mask()
|
| 223 |
+
elif corner_mask and not cls.supports_corner_mask():
|
| 224 |
+
raise ValueError(f"{name} contour generator does not support corner_mask=True")
|
| 225 |
+
|
| 226 |
+
# Check arguments: line_type.
|
| 227 |
+
if line_type is None:
|
| 228 |
+
line_type = cls.default_line_type
|
| 229 |
+
else:
|
| 230 |
+
line_type = as_line_type(line_type)
|
| 231 |
+
|
| 232 |
+
if not cls.supports_line_type(line_type):
|
| 233 |
+
raise ValueError(f"{name} contour generator does not support line_type {line_type}")
|
| 234 |
+
|
| 235 |
+
# Check arguments: fill_type.
|
| 236 |
+
if fill_type is None:
|
| 237 |
+
fill_type = cls.default_fill_type
|
| 238 |
+
else:
|
| 239 |
+
fill_type = as_fill_type(fill_type)
|
| 240 |
+
|
| 241 |
+
if not cls.supports_fill_type(fill_type):
|
| 242 |
+
raise ValueError(f"{name} contour generator does not support fill_type {fill_type}")
|
| 243 |
+
|
| 244 |
+
# Check arguments: quad_as_tri.
|
| 245 |
+
if quad_as_tri and not cls.supports_quad_as_tri():
|
| 246 |
+
raise ValueError(f"{name} contour generator does not support quad_as_tri=True")
|
| 247 |
+
|
| 248 |
+
# Check arguments: z_interp.
|
| 249 |
+
if z_interp is None:
|
| 250 |
+
z_interp = ZInterp.Linear
|
| 251 |
+
else:
|
| 252 |
+
z_interp = as_z_interp(z_interp)
|
| 253 |
+
|
| 254 |
+
if z_interp != ZInterp.Linear and not cls.supports_z_interp():
|
| 255 |
+
raise ValueError(f"{name} contour generator does not support z_interp {z_interp}")
|
| 256 |
+
|
| 257 |
+
# Check arguments: thread_count.
|
| 258 |
+
if thread_count not in (0, 1) and not cls.supports_threads():
|
| 259 |
+
raise ValueError(f"{name} contour generator does not support thread_count {thread_count}")
|
| 260 |
+
|
| 261 |
+
# Prepare args and kwargs for contour generator constructor.
|
| 262 |
+
args = [x, y, z, mask]
|
| 263 |
+
kwargs: dict[str, int | bool | LineType | FillType | ZInterp] = {
|
| 264 |
+
"x_chunk_size": x_chunk_size,
|
| 265 |
+
"y_chunk_size": y_chunk_size,
|
| 266 |
+
}
|
| 267 |
+
|
| 268 |
+
if name not in ("mpl2005", "mpl2014"):
|
| 269 |
+
kwargs["line_type"] = line_type
|
| 270 |
+
kwargs["fill_type"] = fill_type
|
| 271 |
+
|
| 272 |
+
if cls.supports_corner_mask():
|
| 273 |
+
kwargs["corner_mask"] = corner_mask
|
| 274 |
+
|
| 275 |
+
if cls.supports_quad_as_tri():
|
| 276 |
+
kwargs["quad_as_tri"] = quad_as_tri
|
| 277 |
+
|
| 278 |
+
if cls.supports_z_interp():
|
| 279 |
+
kwargs["z_interp"] = z_interp
|
| 280 |
+
|
| 281 |
+
if cls.supports_threads():
|
| 282 |
+
kwargs["thread_count"] = thread_count
|
| 283 |
+
|
| 284 |
+
# Create contour generator.
|
| 285 |
+
return cls(*args, **kwargs)
|
videollama2/lib/python3.10/site-packages/contourpy/__pycache__/array.cpython-310.pyc
ADDED
|
Binary file (9.57 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/__pycache__/convert.cpython-310.pyc
ADDED
|
Binary file (16.7 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/__pycache__/typecheck.cpython-310.pyc
ADDED
|
Binary file (6.58 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/__pycache__/types.cpython-310.pyc
ADDED
|
Binary file (377 Bytes). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/_contourpy.cpython-310-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fe9c88f2f427be9957274766f567919180820ca775e77ab75173142f2501ce4f
|
| 3 |
+
size 844208
|
videollama2/lib/python3.10/site-packages/contourpy/array.py
ADDED
|
@@ -0,0 +1,261 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from itertools import chain
|
| 4 |
+
from typing import TYPE_CHECKING
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
from contourpy.typecheck import check_code_array, check_offset_array, check_point_array
|
| 9 |
+
from contourpy.types import CLOSEPOLY, LINETO, MOVETO, code_dtype, offset_dtype, point_dtype
|
| 10 |
+
|
| 11 |
+
if TYPE_CHECKING:
|
| 12 |
+
import contourpy._contourpy as cpy
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def codes_from_offsets(offsets: cpy.OffsetArray) -> cpy.CodeArray:
|
| 16 |
+
"""Determine codes from offsets, assuming they all correspond to closed polygons.
|
| 17 |
+
"""
|
| 18 |
+
check_offset_array(offsets)
|
| 19 |
+
|
| 20 |
+
n = offsets[-1]
|
| 21 |
+
codes = np.full(n, LINETO, dtype=code_dtype)
|
| 22 |
+
codes[offsets[:-1]] = MOVETO
|
| 23 |
+
codes[offsets[1:] - 1] = CLOSEPOLY
|
| 24 |
+
return codes
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def codes_from_offsets_and_points(
|
| 28 |
+
offsets: cpy.OffsetArray,
|
| 29 |
+
points: cpy.PointArray,
|
| 30 |
+
) -> cpy.CodeArray:
|
| 31 |
+
"""Determine codes from offsets and points, using the equality of the start and end points of
|
| 32 |
+
each line to determine if lines are closed or not.
|
| 33 |
+
"""
|
| 34 |
+
check_offset_array(offsets)
|
| 35 |
+
check_point_array(points)
|
| 36 |
+
|
| 37 |
+
codes = np.full(len(points), LINETO, dtype=code_dtype)
|
| 38 |
+
codes[offsets[:-1]] = MOVETO
|
| 39 |
+
|
| 40 |
+
end_offsets = offsets[1:] - 1
|
| 41 |
+
closed = np.all(points[offsets[:-1]] == points[end_offsets], axis=1)
|
| 42 |
+
codes[end_offsets[closed]] = CLOSEPOLY
|
| 43 |
+
|
| 44 |
+
return codes
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def codes_from_points(points: cpy.PointArray) -> cpy.CodeArray:
|
| 48 |
+
"""Determine codes for a single line, using the equality of the start and end points to
|
| 49 |
+
determine if the line is closed or not.
|
| 50 |
+
"""
|
| 51 |
+
check_point_array(points)
|
| 52 |
+
|
| 53 |
+
n = len(points)
|
| 54 |
+
codes = np.full(n, LINETO, dtype=code_dtype)
|
| 55 |
+
codes[0] = MOVETO
|
| 56 |
+
if np.all(points[0] == points[-1]):
|
| 57 |
+
codes[-1] = CLOSEPOLY
|
| 58 |
+
return codes
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def concat_codes(list_of_codes: list[cpy.CodeArray]) -> cpy.CodeArray:
|
| 62 |
+
"""Concatenate a list of codes arrays into a single code array.
|
| 63 |
+
"""
|
| 64 |
+
if not list_of_codes:
|
| 65 |
+
raise ValueError("Empty list passed to concat_codes")
|
| 66 |
+
|
| 67 |
+
return np.concatenate(list_of_codes, dtype=code_dtype)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def concat_codes_or_none(list_of_codes_or_none: list[cpy.CodeArray | None]) -> cpy.CodeArray | None:
|
| 71 |
+
"""Concatenate a list of codes arrays or None into a single code array or None.
|
| 72 |
+
"""
|
| 73 |
+
list_of_codes = [codes for codes in list_of_codes_or_none if codes is not None]
|
| 74 |
+
if list_of_codes:
|
| 75 |
+
return concat_codes(list_of_codes)
|
| 76 |
+
else:
|
| 77 |
+
return None
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def concat_offsets(list_of_offsets: list[cpy.OffsetArray]) -> cpy.OffsetArray:
|
| 81 |
+
"""Concatenate a list of offsets arrays into a single offset array.
|
| 82 |
+
"""
|
| 83 |
+
if not list_of_offsets:
|
| 84 |
+
raise ValueError("Empty list passed to concat_offsets")
|
| 85 |
+
|
| 86 |
+
n = len(list_of_offsets)
|
| 87 |
+
cumulative = np.cumsum([offsets[-1] for offsets in list_of_offsets], dtype=offset_dtype)
|
| 88 |
+
ret: cpy.OffsetArray = np.concatenate(
|
| 89 |
+
(list_of_offsets[0], *(list_of_offsets[i+1][1:] + cumulative[i] for i in range(n-1))),
|
| 90 |
+
dtype=offset_dtype,
|
| 91 |
+
)
|
| 92 |
+
return ret
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def concat_offsets_or_none(
|
| 96 |
+
list_of_offsets_or_none: list[cpy.OffsetArray | None],
|
| 97 |
+
) -> cpy.OffsetArray | None:
|
| 98 |
+
"""Concatenate a list of offsets arrays or None into a single offset array or None.
|
| 99 |
+
"""
|
| 100 |
+
list_of_offsets = [offsets for offsets in list_of_offsets_or_none if offsets is not None]
|
| 101 |
+
if list_of_offsets:
|
| 102 |
+
return concat_offsets(list_of_offsets)
|
| 103 |
+
else:
|
| 104 |
+
return None
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def concat_points(list_of_points: list[cpy.PointArray]) -> cpy.PointArray:
|
| 108 |
+
"""Concatenate a list of point arrays into a single point array.
|
| 109 |
+
"""
|
| 110 |
+
if not list_of_points:
|
| 111 |
+
raise ValueError("Empty list passed to concat_points")
|
| 112 |
+
|
| 113 |
+
return np.concatenate(list_of_points, dtype=point_dtype)
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def concat_points_or_none(
|
| 117 |
+
list_of_points_or_none: list[cpy.PointArray | None],
|
| 118 |
+
) -> cpy.PointArray | None:
|
| 119 |
+
"""Concatenate a list of point arrays or None into a single point array or None.
|
| 120 |
+
"""
|
| 121 |
+
list_of_points = [points for points in list_of_points_or_none if points is not None]
|
| 122 |
+
if list_of_points:
|
| 123 |
+
return concat_points(list_of_points)
|
| 124 |
+
else:
|
| 125 |
+
return None
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def concat_points_or_none_with_nan(
|
| 129 |
+
list_of_points_or_none: list[cpy.PointArray | None],
|
| 130 |
+
) -> cpy.PointArray | None:
|
| 131 |
+
"""Concatenate a list of points or None into a single point array or None, with NaNs used to
|
| 132 |
+
separate each line.
|
| 133 |
+
"""
|
| 134 |
+
list_of_points = [points for points in list_of_points_or_none if points is not None]
|
| 135 |
+
if list_of_points:
|
| 136 |
+
return concat_points_with_nan(list_of_points)
|
| 137 |
+
else:
|
| 138 |
+
return None
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def concat_points_with_nan(list_of_points: list[cpy.PointArray]) -> cpy.PointArray:
|
| 142 |
+
"""Concatenate a list of points into a single point array with NaNs used to separate each line.
|
| 143 |
+
"""
|
| 144 |
+
if not list_of_points:
|
| 145 |
+
raise ValueError("Empty list passed to concat_points_with_nan")
|
| 146 |
+
|
| 147 |
+
if len(list_of_points) == 1:
|
| 148 |
+
return list_of_points[0]
|
| 149 |
+
else:
|
| 150 |
+
nan_spacer = np.full((1, 2), np.nan, dtype=point_dtype)
|
| 151 |
+
list_of_points = [list_of_points[0],
|
| 152 |
+
*list(chain(*((nan_spacer, x) for x in list_of_points[1:])))]
|
| 153 |
+
return concat_points(list_of_points)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def insert_nan_at_offsets(points: cpy.PointArray, offsets: cpy.OffsetArray) -> cpy.PointArray:
|
| 157 |
+
"""Insert NaNs into a point array at locations specified by an offset array.
|
| 158 |
+
"""
|
| 159 |
+
check_point_array(points)
|
| 160 |
+
check_offset_array(offsets)
|
| 161 |
+
|
| 162 |
+
if len(offsets) <= 2:
|
| 163 |
+
return points
|
| 164 |
+
else:
|
| 165 |
+
nan_spacer = np.array([np.nan, np.nan], dtype=point_dtype)
|
| 166 |
+
# Convert offsets to int64 to avoid numpy error when mixing signed and unsigned ints.
|
| 167 |
+
return np.insert(points, offsets[1:-1].astype(np.int64), nan_spacer, axis=0)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def offsets_from_codes(codes: cpy.CodeArray) -> cpy.OffsetArray:
|
| 171 |
+
"""Determine offsets from codes using locations of MOVETO codes.
|
| 172 |
+
"""
|
| 173 |
+
check_code_array(codes)
|
| 174 |
+
|
| 175 |
+
return np.append(np.nonzero(codes == MOVETO)[0], len(codes)).astype(offset_dtype)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def offsets_from_lengths(list_of_points: list[cpy.PointArray]) -> cpy.OffsetArray:
|
| 179 |
+
"""Determine offsets from lengths of point arrays.
|
| 180 |
+
"""
|
| 181 |
+
if not list_of_points:
|
| 182 |
+
raise ValueError("Empty list passed to offsets_from_lengths")
|
| 183 |
+
|
| 184 |
+
return np.cumsum([0] + [len(line) for line in list_of_points], dtype=offset_dtype)
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def outer_offsets_from_list_of_codes(list_of_codes: list[cpy.CodeArray]) -> cpy.OffsetArray:
|
| 188 |
+
"""Determine outer offsets from codes using locations of MOVETO codes.
|
| 189 |
+
"""
|
| 190 |
+
if not list_of_codes:
|
| 191 |
+
raise ValueError("Empty list passed to outer_offsets_from_list_of_codes")
|
| 192 |
+
|
| 193 |
+
return np.cumsum([0] + [np.count_nonzero(codes == MOVETO) for codes in list_of_codes],
|
| 194 |
+
dtype=offset_dtype)
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def outer_offsets_from_list_of_offsets(list_of_offsets: list[cpy.OffsetArray]) -> cpy.OffsetArray:
|
| 198 |
+
"""Determine outer offsets from a list of offsets.
|
| 199 |
+
"""
|
| 200 |
+
if not list_of_offsets:
|
| 201 |
+
raise ValueError("Empty list passed to outer_offsets_from_list_of_offsets")
|
| 202 |
+
|
| 203 |
+
return np.cumsum([0] + [len(offsets)-1 for offsets in list_of_offsets], dtype=offset_dtype)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def remove_nan(points: cpy.PointArray) -> tuple[cpy.PointArray, cpy.OffsetArray]:
|
| 207 |
+
"""Remove NaN from a points array, also return the offsets corresponding to the NaN removed.
|
| 208 |
+
"""
|
| 209 |
+
check_point_array(points)
|
| 210 |
+
|
| 211 |
+
nan_offsets = np.nonzero(np.isnan(points[:, 0]))[0]
|
| 212 |
+
if len(nan_offsets) == 0:
|
| 213 |
+
return points, np.array([0, len(points)], dtype=offset_dtype)
|
| 214 |
+
else:
|
| 215 |
+
points = np.delete(points, nan_offsets, axis=0)
|
| 216 |
+
nan_offsets -= np.arange(len(nan_offsets))
|
| 217 |
+
offsets: cpy.OffsetArray = np.empty(len(nan_offsets)+2, dtype=offset_dtype)
|
| 218 |
+
offsets[0] = 0
|
| 219 |
+
offsets[1:-1] = nan_offsets
|
| 220 |
+
offsets[-1] = len(points)
|
| 221 |
+
return points, offsets
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def split_codes_by_offsets(codes: cpy.CodeArray, offsets: cpy.OffsetArray) -> list[cpy.CodeArray]:
|
| 225 |
+
"""Split a code array at locations specified by an offset array into a list of code arrays.
|
| 226 |
+
"""
|
| 227 |
+
check_code_array(codes)
|
| 228 |
+
check_offset_array(offsets)
|
| 229 |
+
|
| 230 |
+
if len(offsets) > 2:
|
| 231 |
+
return np.split(codes, offsets[1:-1])
|
| 232 |
+
else:
|
| 233 |
+
return [codes]
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def split_points_by_offsets(
|
| 237 |
+
points: cpy.PointArray,
|
| 238 |
+
offsets: cpy.OffsetArray,
|
| 239 |
+
) -> list[cpy.PointArray]:
|
| 240 |
+
"""Split a point array at locations specified by an offset array into a list of point arrays.
|
| 241 |
+
"""
|
| 242 |
+
check_point_array(points)
|
| 243 |
+
check_offset_array(offsets)
|
| 244 |
+
|
| 245 |
+
if len(offsets) > 2:
|
| 246 |
+
return np.split(points, offsets[1:-1])
|
| 247 |
+
else:
|
| 248 |
+
return [points]
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def split_points_at_nan(points: cpy.PointArray) -> list[cpy.PointArray]:
|
| 252 |
+
"""Split a points array at NaNs into a list of point arrays.
|
| 253 |
+
"""
|
| 254 |
+
check_point_array(points)
|
| 255 |
+
|
| 256 |
+
nan_offsets = np.nonzero(np.isnan(points[:, 0]))[0]
|
| 257 |
+
if len(nan_offsets) == 0:
|
| 258 |
+
return [points]
|
| 259 |
+
else:
|
| 260 |
+
nan_offsets = np.concatenate(([-1], nan_offsets, [len(points)]))
|
| 261 |
+
return [points[s+1:e] for s, e in zip(nan_offsets[:-1], nan_offsets[1:])]
|
videollama2/lib/python3.10/site-packages/contourpy/convert.py
ADDED
|
@@ -0,0 +1,620 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING, cast
|
| 4 |
+
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
from contourpy._contourpy import FillType, LineType
|
| 8 |
+
import contourpy.array as arr
|
| 9 |
+
from contourpy.enum_util import as_fill_type, as_line_type
|
| 10 |
+
from contourpy.typecheck import check_filled, check_lines
|
| 11 |
+
from contourpy.types import MOVETO, offset_dtype
|
| 12 |
+
|
| 13 |
+
if TYPE_CHECKING:
|
| 14 |
+
import contourpy._contourpy as cpy
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def _convert_filled_from_OuterCode(
|
| 18 |
+
filled: cpy.FillReturn_OuterCode,
|
| 19 |
+
fill_type_to: FillType,
|
| 20 |
+
) -> cpy.FillReturn:
|
| 21 |
+
if fill_type_to == FillType.OuterCode:
|
| 22 |
+
return filled
|
| 23 |
+
elif fill_type_to == FillType.OuterOffset:
|
| 24 |
+
return (filled[0], [arr.offsets_from_codes(codes) for codes in filled[1]])
|
| 25 |
+
|
| 26 |
+
if len(filled[0]) > 0:
|
| 27 |
+
points = arr.concat_points(filled[0])
|
| 28 |
+
codes = arr.concat_codes(filled[1])
|
| 29 |
+
else:
|
| 30 |
+
points = None
|
| 31 |
+
codes = None
|
| 32 |
+
|
| 33 |
+
if fill_type_to == FillType.ChunkCombinedCode:
|
| 34 |
+
return ([points], [codes])
|
| 35 |
+
elif fill_type_to == FillType.ChunkCombinedOffset:
|
| 36 |
+
return ([points], [None if codes is None else arr.offsets_from_codes(codes)])
|
| 37 |
+
elif fill_type_to == FillType.ChunkCombinedCodeOffset:
|
| 38 |
+
outer_offsets = None if points is None else arr.offsets_from_lengths(filled[0])
|
| 39 |
+
ret1: cpy.FillReturn_ChunkCombinedCodeOffset = ([points], [codes], [outer_offsets])
|
| 40 |
+
return ret1
|
| 41 |
+
elif fill_type_to == FillType.ChunkCombinedOffsetOffset:
|
| 42 |
+
if codes is None:
|
| 43 |
+
ret2: cpy.FillReturn_ChunkCombinedOffsetOffset = ([None], [None], [None])
|
| 44 |
+
else:
|
| 45 |
+
offsets = arr.offsets_from_codes(codes)
|
| 46 |
+
outer_offsets = arr.outer_offsets_from_list_of_codes(filled[1])
|
| 47 |
+
ret2 = ([points], [offsets], [outer_offsets])
|
| 48 |
+
return ret2
|
| 49 |
+
else:
|
| 50 |
+
raise ValueError(f"Invalid FillType {fill_type_to}")
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _convert_filled_from_OuterOffset(
|
| 54 |
+
filled: cpy.FillReturn_OuterOffset,
|
| 55 |
+
fill_type_to: FillType,
|
| 56 |
+
) -> cpy.FillReturn:
|
| 57 |
+
if fill_type_to == FillType.OuterCode:
|
| 58 |
+
separate_codes = [arr.codes_from_offsets(offsets) for offsets in filled[1]]
|
| 59 |
+
return (filled[0], separate_codes)
|
| 60 |
+
elif fill_type_to == FillType.OuterOffset:
|
| 61 |
+
return filled
|
| 62 |
+
|
| 63 |
+
if len(filled[0]) > 0:
|
| 64 |
+
points = arr.concat_points(filled[0])
|
| 65 |
+
offsets = arr.concat_offsets(filled[1])
|
| 66 |
+
else:
|
| 67 |
+
points = None
|
| 68 |
+
offsets = None
|
| 69 |
+
|
| 70 |
+
if fill_type_to == FillType.ChunkCombinedCode:
|
| 71 |
+
return ([points], [None if offsets is None else arr.codes_from_offsets(offsets)])
|
| 72 |
+
elif fill_type_to == FillType.ChunkCombinedOffset:
|
| 73 |
+
return ([points], [offsets])
|
| 74 |
+
elif fill_type_to == FillType.ChunkCombinedCodeOffset:
|
| 75 |
+
if offsets is None:
|
| 76 |
+
ret1: cpy.FillReturn_ChunkCombinedCodeOffset = ([None], [None], [None])
|
| 77 |
+
else:
|
| 78 |
+
codes = arr.codes_from_offsets(offsets)
|
| 79 |
+
outer_offsets = arr.offsets_from_lengths(filled[0])
|
| 80 |
+
ret1 = ([points], [codes], [outer_offsets])
|
| 81 |
+
return ret1
|
| 82 |
+
elif fill_type_to == FillType.ChunkCombinedOffsetOffset:
|
| 83 |
+
if points is None:
|
| 84 |
+
ret2: cpy.FillReturn_ChunkCombinedOffsetOffset = ([None], [None], [None])
|
| 85 |
+
else:
|
| 86 |
+
outer_offsets = arr.outer_offsets_from_list_of_offsets(filled[1])
|
| 87 |
+
ret2 = ([points], [offsets], [outer_offsets])
|
| 88 |
+
return ret2
|
| 89 |
+
else:
|
| 90 |
+
raise ValueError(f"Invalid FillType {fill_type_to}")
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _convert_filled_from_ChunkCombinedCode(
|
| 94 |
+
filled: cpy.FillReturn_ChunkCombinedCode,
|
| 95 |
+
fill_type_to: FillType,
|
| 96 |
+
) -> cpy.FillReturn:
|
| 97 |
+
if fill_type_to == FillType.ChunkCombinedCode:
|
| 98 |
+
return filled
|
| 99 |
+
elif fill_type_to == FillType.ChunkCombinedOffset:
|
| 100 |
+
codes = [None if codes is None else arr.offsets_from_codes(codes) for codes in filled[1]]
|
| 101 |
+
return (filled[0], codes)
|
| 102 |
+
else:
|
| 103 |
+
raise ValueError(
|
| 104 |
+
f"Conversion from {FillType.ChunkCombinedCode} to {fill_type_to} not supported")
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def _convert_filled_from_ChunkCombinedOffset(
|
| 108 |
+
filled: cpy.FillReturn_ChunkCombinedOffset,
|
| 109 |
+
fill_type_to: FillType,
|
| 110 |
+
) -> cpy.FillReturn:
|
| 111 |
+
if fill_type_to == FillType.ChunkCombinedCode:
|
| 112 |
+
chunk_codes: list[cpy.CodeArray | None] = []
|
| 113 |
+
for points, offsets in zip(*filled):
|
| 114 |
+
if points is None:
|
| 115 |
+
chunk_codes.append(None)
|
| 116 |
+
else:
|
| 117 |
+
if TYPE_CHECKING:
|
| 118 |
+
assert offsets is not None
|
| 119 |
+
chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points))
|
| 120 |
+
return (filled[0], chunk_codes)
|
| 121 |
+
elif fill_type_to == FillType.ChunkCombinedOffset:
|
| 122 |
+
return filled
|
| 123 |
+
else:
|
| 124 |
+
raise ValueError(
|
| 125 |
+
f"Conversion from {FillType.ChunkCombinedOffset} to {fill_type_to} not supported")
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _convert_filled_from_ChunkCombinedCodeOffset(
|
| 129 |
+
filled: cpy.FillReturn_ChunkCombinedCodeOffset,
|
| 130 |
+
fill_type_to: FillType,
|
| 131 |
+
) -> cpy.FillReturn:
|
| 132 |
+
if fill_type_to == FillType.OuterCode:
|
| 133 |
+
separate_points = []
|
| 134 |
+
separate_codes = []
|
| 135 |
+
for points, codes, outer_offsets in zip(*filled):
|
| 136 |
+
if points is not None:
|
| 137 |
+
if TYPE_CHECKING:
|
| 138 |
+
assert codes is not None
|
| 139 |
+
assert outer_offsets is not None
|
| 140 |
+
separate_points += arr.split_points_by_offsets(points, outer_offsets)
|
| 141 |
+
separate_codes += arr.split_codes_by_offsets(codes, outer_offsets)
|
| 142 |
+
return (separate_points, separate_codes)
|
| 143 |
+
elif fill_type_to == FillType.OuterOffset:
|
| 144 |
+
separate_points = []
|
| 145 |
+
separate_offsets = []
|
| 146 |
+
for points, codes, outer_offsets in zip(*filled):
|
| 147 |
+
if points is not None:
|
| 148 |
+
if TYPE_CHECKING:
|
| 149 |
+
assert codes is not None
|
| 150 |
+
assert outer_offsets is not None
|
| 151 |
+
separate_points += arr.split_points_by_offsets(points, outer_offsets)
|
| 152 |
+
separate_codes = arr.split_codes_by_offsets(codes, outer_offsets)
|
| 153 |
+
separate_offsets += [arr.offsets_from_codes(codes) for codes in separate_codes]
|
| 154 |
+
return (separate_points, separate_offsets)
|
| 155 |
+
elif fill_type_to == FillType.ChunkCombinedCode:
|
| 156 |
+
ret1: cpy.FillReturn_ChunkCombinedCode = (filled[0], filled[1])
|
| 157 |
+
return ret1
|
| 158 |
+
elif fill_type_to == FillType.ChunkCombinedOffset:
|
| 159 |
+
all_offsets = [None if codes is None else arr.offsets_from_codes(codes)
|
| 160 |
+
for codes in filled[1]]
|
| 161 |
+
ret2: cpy.FillReturn_ChunkCombinedOffset = (filled[0], all_offsets)
|
| 162 |
+
return ret2
|
| 163 |
+
elif fill_type_to == FillType.ChunkCombinedCodeOffset:
|
| 164 |
+
return filled
|
| 165 |
+
elif fill_type_to == FillType.ChunkCombinedOffsetOffset:
|
| 166 |
+
chunk_offsets: list[cpy.OffsetArray | None] = []
|
| 167 |
+
chunk_outer_offsets: list[cpy.OffsetArray | None] = []
|
| 168 |
+
for codes, outer_offsets in zip(*filled[1:]):
|
| 169 |
+
if codes is None:
|
| 170 |
+
chunk_offsets.append(None)
|
| 171 |
+
chunk_outer_offsets.append(None)
|
| 172 |
+
else:
|
| 173 |
+
if TYPE_CHECKING:
|
| 174 |
+
assert outer_offsets is not None
|
| 175 |
+
offsets = arr.offsets_from_codes(codes)
|
| 176 |
+
outer_offsets = np.array([np.nonzero(offsets == oo)[0][0] for oo in outer_offsets],
|
| 177 |
+
dtype=offset_dtype)
|
| 178 |
+
chunk_offsets.append(offsets)
|
| 179 |
+
chunk_outer_offsets.append(outer_offsets)
|
| 180 |
+
ret3: cpy.FillReturn_ChunkCombinedOffsetOffset = (
|
| 181 |
+
filled[0], chunk_offsets, chunk_outer_offsets,
|
| 182 |
+
)
|
| 183 |
+
return ret3
|
| 184 |
+
else:
|
| 185 |
+
raise ValueError(f"Invalid FillType {fill_type_to}")
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def _convert_filled_from_ChunkCombinedOffsetOffset(
|
| 189 |
+
filled: cpy.FillReturn_ChunkCombinedOffsetOffset,
|
| 190 |
+
fill_type_to: FillType,
|
| 191 |
+
) -> cpy.FillReturn:
|
| 192 |
+
if fill_type_to == FillType.OuterCode:
|
| 193 |
+
separate_points = []
|
| 194 |
+
separate_codes = []
|
| 195 |
+
for points, offsets, outer_offsets in zip(*filled):
|
| 196 |
+
if points is not None:
|
| 197 |
+
if TYPE_CHECKING:
|
| 198 |
+
assert offsets is not None
|
| 199 |
+
assert outer_offsets is not None
|
| 200 |
+
codes = arr.codes_from_offsets_and_points(offsets, points)
|
| 201 |
+
outer_offsets = offsets[outer_offsets]
|
| 202 |
+
separate_points += arr.split_points_by_offsets(points, outer_offsets)
|
| 203 |
+
separate_codes += arr.split_codes_by_offsets(codes, outer_offsets)
|
| 204 |
+
return (separate_points, separate_codes)
|
| 205 |
+
elif fill_type_to == FillType.OuterOffset:
|
| 206 |
+
separate_points = []
|
| 207 |
+
separate_offsets = []
|
| 208 |
+
for points, offsets, outer_offsets in zip(*filled):
|
| 209 |
+
if points is not None:
|
| 210 |
+
if TYPE_CHECKING:
|
| 211 |
+
assert offsets is not None
|
| 212 |
+
assert outer_offsets is not None
|
| 213 |
+
if len(outer_offsets) > 2:
|
| 214 |
+
separate_offsets += [offsets[s:e+1] - offsets[s] for s, e in
|
| 215 |
+
zip(outer_offsets[:-1], outer_offsets[1:])]
|
| 216 |
+
else:
|
| 217 |
+
separate_offsets.append(offsets)
|
| 218 |
+
separate_points += arr.split_points_by_offsets(points, offsets[outer_offsets])
|
| 219 |
+
return (separate_points, separate_offsets)
|
| 220 |
+
elif fill_type_to == FillType.ChunkCombinedCode:
|
| 221 |
+
chunk_codes: list[cpy.CodeArray | None] = []
|
| 222 |
+
for points, offsets, outer_offsets in zip(*filled):
|
| 223 |
+
if points is None:
|
| 224 |
+
chunk_codes.append(None)
|
| 225 |
+
else:
|
| 226 |
+
if TYPE_CHECKING:
|
| 227 |
+
assert offsets is not None
|
| 228 |
+
assert outer_offsets is not None
|
| 229 |
+
chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points))
|
| 230 |
+
ret1: cpy.FillReturn_ChunkCombinedCode = (filled[0], chunk_codes)
|
| 231 |
+
return ret1
|
| 232 |
+
elif fill_type_to == FillType.ChunkCombinedOffset:
|
| 233 |
+
return (filled[0], filled[1])
|
| 234 |
+
elif fill_type_to == FillType.ChunkCombinedCodeOffset:
|
| 235 |
+
chunk_codes = []
|
| 236 |
+
chunk_outer_offsets: list[cpy.OffsetArray | None] = []
|
| 237 |
+
for points, offsets, outer_offsets in zip(*filled):
|
| 238 |
+
if points is None:
|
| 239 |
+
chunk_codes.append(None)
|
| 240 |
+
chunk_outer_offsets.append(None)
|
| 241 |
+
else:
|
| 242 |
+
if TYPE_CHECKING:
|
| 243 |
+
assert offsets is not None
|
| 244 |
+
assert outer_offsets is not None
|
| 245 |
+
chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points))
|
| 246 |
+
chunk_outer_offsets.append(offsets[outer_offsets])
|
| 247 |
+
ret2: cpy.FillReturn_ChunkCombinedCodeOffset = (filled[0], chunk_codes, chunk_outer_offsets)
|
| 248 |
+
return ret2
|
| 249 |
+
elif fill_type_to == FillType.ChunkCombinedOffsetOffset:
|
| 250 |
+
return filled
|
| 251 |
+
else:
|
| 252 |
+
raise ValueError(f"Invalid FillType {fill_type_to}")
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
def convert_filled(
|
| 256 |
+
filled: cpy.FillReturn,
|
| 257 |
+
fill_type_from: FillType | str,
|
| 258 |
+
fill_type_to: FillType | str,
|
| 259 |
+
) -> cpy.FillReturn:
|
| 260 |
+
"""Convert filled contours from one :class:`~.FillType` to another.
|
| 261 |
+
|
| 262 |
+
Args:
|
| 263 |
+
filled (sequence of arrays): Filled contour polygons to convert, such as those returned by
|
| 264 |
+
:meth:`.ContourGenerator.filled`.
|
| 265 |
+
fill_type_from (FillType or str): :class:`~.FillType` to convert from as enum or
|
| 266 |
+
string equivalent.
|
| 267 |
+
fill_type_to (FillType or str): :class:`~.FillType` to convert to as enum or string
|
| 268 |
+
equivalent.
|
| 269 |
+
|
| 270 |
+
Return:
|
| 271 |
+
Converted filled contour polygons.
|
| 272 |
+
|
| 273 |
+
When converting non-chunked fill types (``FillType.OuterCode`` or ``FillType.OuterOffset``) to
|
| 274 |
+
chunked ones, all polygons are placed in the first chunk. When converting in the other
|
| 275 |
+
direction, all chunk information is discarded. Converting a fill type that is not aware of the
|
| 276 |
+
relationship between outer boundaries and contained holes (``FillType.ChunkCombinedCode`` or
|
| 277 |
+
``FillType.ChunkCombinedOffset``) to one that is will raise a ``ValueError``.
|
| 278 |
+
|
| 279 |
+
.. versionadded:: 1.2.0
|
| 280 |
+
"""
|
| 281 |
+
fill_type_from = as_fill_type(fill_type_from)
|
| 282 |
+
fill_type_to = as_fill_type(fill_type_to)
|
| 283 |
+
|
| 284 |
+
check_filled(filled, fill_type_from)
|
| 285 |
+
|
| 286 |
+
if fill_type_from == FillType.OuterCode:
|
| 287 |
+
if TYPE_CHECKING:
|
| 288 |
+
filled = cast(cpy.FillReturn_OuterCode, filled)
|
| 289 |
+
return _convert_filled_from_OuterCode(filled, fill_type_to)
|
| 290 |
+
elif fill_type_from == FillType.OuterOffset:
|
| 291 |
+
if TYPE_CHECKING:
|
| 292 |
+
filled = cast(cpy.FillReturn_OuterOffset, filled)
|
| 293 |
+
return _convert_filled_from_OuterOffset(filled, fill_type_to)
|
| 294 |
+
elif fill_type_from == FillType.ChunkCombinedCode:
|
| 295 |
+
if TYPE_CHECKING:
|
| 296 |
+
filled = cast(cpy.FillReturn_ChunkCombinedCode, filled)
|
| 297 |
+
return _convert_filled_from_ChunkCombinedCode(filled, fill_type_to)
|
| 298 |
+
elif fill_type_from == FillType.ChunkCombinedOffset:
|
| 299 |
+
if TYPE_CHECKING:
|
| 300 |
+
filled = cast(cpy.FillReturn_ChunkCombinedOffset, filled)
|
| 301 |
+
return _convert_filled_from_ChunkCombinedOffset(filled, fill_type_to)
|
| 302 |
+
elif fill_type_from == FillType.ChunkCombinedCodeOffset:
|
| 303 |
+
if TYPE_CHECKING:
|
| 304 |
+
filled = cast(cpy.FillReturn_ChunkCombinedCodeOffset, filled)
|
| 305 |
+
return _convert_filled_from_ChunkCombinedCodeOffset(filled, fill_type_to)
|
| 306 |
+
elif fill_type_from == FillType.ChunkCombinedOffsetOffset:
|
| 307 |
+
if TYPE_CHECKING:
|
| 308 |
+
filled = cast(cpy.FillReturn_ChunkCombinedOffsetOffset, filled)
|
| 309 |
+
return _convert_filled_from_ChunkCombinedOffsetOffset(filled, fill_type_to)
|
| 310 |
+
else:
|
| 311 |
+
raise ValueError(f"Invalid FillType {fill_type_from}")
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def _convert_lines_from_Separate(
|
| 315 |
+
lines: cpy.LineReturn_Separate,
|
| 316 |
+
line_type_to: LineType,
|
| 317 |
+
) -> cpy.LineReturn:
|
| 318 |
+
if line_type_to == LineType.Separate:
|
| 319 |
+
return lines
|
| 320 |
+
elif line_type_to == LineType.SeparateCode:
|
| 321 |
+
separate_codes = [arr.codes_from_points(line) for line in lines]
|
| 322 |
+
return (lines, separate_codes)
|
| 323 |
+
elif line_type_to == LineType.ChunkCombinedCode:
|
| 324 |
+
if not lines:
|
| 325 |
+
ret1: cpy.LineReturn_ChunkCombinedCode = ([None], [None])
|
| 326 |
+
else:
|
| 327 |
+
points = arr.concat_points(lines)
|
| 328 |
+
offsets = arr.offsets_from_lengths(lines)
|
| 329 |
+
codes = arr.codes_from_offsets_and_points(offsets, points)
|
| 330 |
+
ret1 = ([points], [codes])
|
| 331 |
+
return ret1
|
| 332 |
+
elif line_type_to == LineType.ChunkCombinedOffset:
|
| 333 |
+
if not lines:
|
| 334 |
+
ret2: cpy.LineReturn_ChunkCombinedOffset = ([None], [None])
|
| 335 |
+
else:
|
| 336 |
+
ret2 = ([arr.concat_points(lines)], [arr.offsets_from_lengths(lines)])
|
| 337 |
+
return ret2
|
| 338 |
+
elif line_type_to == LineType.ChunkCombinedNan:
|
| 339 |
+
if not lines:
|
| 340 |
+
ret3: cpy.LineReturn_ChunkCombinedNan = ([None],)
|
| 341 |
+
else:
|
| 342 |
+
ret3 = ([arr.concat_points_with_nan(lines)],)
|
| 343 |
+
return ret3
|
| 344 |
+
else:
|
| 345 |
+
raise ValueError(f"Invalid LineType {line_type_to}")
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def _convert_lines_from_SeparateCode(
|
| 349 |
+
lines: cpy.LineReturn_SeparateCode,
|
| 350 |
+
line_type_to: LineType,
|
| 351 |
+
) -> cpy.LineReturn:
|
| 352 |
+
if line_type_to == LineType.Separate:
|
| 353 |
+
# Drop codes.
|
| 354 |
+
return lines[0]
|
| 355 |
+
elif line_type_to == LineType.SeparateCode:
|
| 356 |
+
return lines
|
| 357 |
+
elif line_type_to == LineType.ChunkCombinedCode:
|
| 358 |
+
if not lines[0]:
|
| 359 |
+
ret1: cpy.LineReturn_ChunkCombinedCode = ([None], [None])
|
| 360 |
+
else:
|
| 361 |
+
ret1 = ([arr.concat_points(lines[0])], [arr.concat_codes(lines[1])])
|
| 362 |
+
return ret1
|
| 363 |
+
elif line_type_to == LineType.ChunkCombinedOffset:
|
| 364 |
+
if not lines[0]:
|
| 365 |
+
ret2: cpy.LineReturn_ChunkCombinedOffset = ([None], [None])
|
| 366 |
+
else:
|
| 367 |
+
ret2 = ([arr.concat_points(lines[0])], [arr.offsets_from_lengths(lines[0])])
|
| 368 |
+
return ret2
|
| 369 |
+
elif line_type_to == LineType.ChunkCombinedNan:
|
| 370 |
+
if not lines[0]:
|
| 371 |
+
ret3: cpy.LineReturn_ChunkCombinedNan = ([None],)
|
| 372 |
+
else:
|
| 373 |
+
ret3 = ([arr.concat_points_with_nan(lines[0])],)
|
| 374 |
+
return ret3
|
| 375 |
+
else:
|
| 376 |
+
raise ValueError(f"Invalid LineType {line_type_to}")
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
def _convert_lines_from_ChunkCombinedCode(
|
| 380 |
+
lines: cpy.LineReturn_ChunkCombinedCode,
|
| 381 |
+
line_type_to: LineType,
|
| 382 |
+
) -> cpy.LineReturn:
|
| 383 |
+
if line_type_to in (LineType.Separate, LineType.SeparateCode):
|
| 384 |
+
separate_lines = []
|
| 385 |
+
for points, codes in zip(*lines):
|
| 386 |
+
if points is not None:
|
| 387 |
+
if TYPE_CHECKING:
|
| 388 |
+
assert codes is not None
|
| 389 |
+
split_at = np.nonzero(codes == MOVETO)[0]
|
| 390 |
+
if len(split_at) > 1:
|
| 391 |
+
separate_lines += np.split(points, split_at[1:])
|
| 392 |
+
else:
|
| 393 |
+
separate_lines.append(points)
|
| 394 |
+
if line_type_to == LineType.Separate:
|
| 395 |
+
return separate_lines
|
| 396 |
+
else:
|
| 397 |
+
separate_codes = [arr.codes_from_points(line) for line in separate_lines]
|
| 398 |
+
return (separate_lines, separate_codes)
|
| 399 |
+
elif line_type_to == LineType.ChunkCombinedCode:
|
| 400 |
+
return lines
|
| 401 |
+
elif line_type_to == LineType.ChunkCombinedOffset:
|
| 402 |
+
chunk_offsets = [None if codes is None else arr.offsets_from_codes(codes)
|
| 403 |
+
for codes in lines[1]]
|
| 404 |
+
return (lines[0], chunk_offsets)
|
| 405 |
+
elif line_type_to == LineType.ChunkCombinedNan:
|
| 406 |
+
points_nan: list[cpy.PointArray | None] = []
|
| 407 |
+
for points, codes in zip(*lines):
|
| 408 |
+
if points is None:
|
| 409 |
+
points_nan.append(None)
|
| 410 |
+
else:
|
| 411 |
+
if TYPE_CHECKING:
|
| 412 |
+
assert codes is not None
|
| 413 |
+
offsets = arr.offsets_from_codes(codes)
|
| 414 |
+
points_nan.append(arr.insert_nan_at_offsets(points, offsets))
|
| 415 |
+
return (points_nan,)
|
| 416 |
+
else:
|
| 417 |
+
raise ValueError(f"Invalid LineType {line_type_to}")
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def _convert_lines_from_ChunkCombinedOffset(
|
| 421 |
+
lines: cpy.LineReturn_ChunkCombinedOffset,
|
| 422 |
+
line_type_to: LineType,
|
| 423 |
+
) -> cpy.LineReturn:
|
| 424 |
+
if line_type_to in (LineType.Separate, LineType.SeparateCode):
|
| 425 |
+
separate_lines = []
|
| 426 |
+
for points, offsets in zip(*lines):
|
| 427 |
+
if points is not None:
|
| 428 |
+
if TYPE_CHECKING:
|
| 429 |
+
assert offsets is not None
|
| 430 |
+
separate_lines += arr.split_points_by_offsets(points, offsets)
|
| 431 |
+
if line_type_to == LineType.Separate:
|
| 432 |
+
return separate_lines
|
| 433 |
+
else:
|
| 434 |
+
separate_codes = [arr.codes_from_points(line) for line in separate_lines]
|
| 435 |
+
return (separate_lines, separate_codes)
|
| 436 |
+
elif line_type_to == LineType.ChunkCombinedCode:
|
| 437 |
+
chunk_codes: list[cpy.CodeArray | None] = []
|
| 438 |
+
for points, offsets in zip(*lines):
|
| 439 |
+
if points is None:
|
| 440 |
+
chunk_codes.append(None)
|
| 441 |
+
else:
|
| 442 |
+
if TYPE_CHECKING:
|
| 443 |
+
assert offsets is not None
|
| 444 |
+
chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points))
|
| 445 |
+
return (lines[0], chunk_codes)
|
| 446 |
+
elif line_type_to == LineType.ChunkCombinedOffset:
|
| 447 |
+
return lines
|
| 448 |
+
elif line_type_to == LineType.ChunkCombinedNan:
|
| 449 |
+
points_nan: list[cpy.PointArray | None] = []
|
| 450 |
+
for points, offsets in zip(*lines):
|
| 451 |
+
if points is None:
|
| 452 |
+
points_nan.append(None)
|
| 453 |
+
else:
|
| 454 |
+
if TYPE_CHECKING:
|
| 455 |
+
assert offsets is not None
|
| 456 |
+
points_nan.append(arr.insert_nan_at_offsets(points, offsets))
|
| 457 |
+
return (points_nan,)
|
| 458 |
+
else:
|
| 459 |
+
raise ValueError(f"Invalid LineType {line_type_to}")
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
def _convert_lines_from_ChunkCombinedNan(
|
| 463 |
+
lines: cpy.LineReturn_ChunkCombinedNan,
|
| 464 |
+
line_type_to: LineType,
|
| 465 |
+
) -> cpy.LineReturn:
|
| 466 |
+
if line_type_to in (LineType.Separate, LineType.SeparateCode):
|
| 467 |
+
separate_lines = []
|
| 468 |
+
for points in lines[0]:
|
| 469 |
+
if points is not None:
|
| 470 |
+
separate_lines += arr.split_points_at_nan(points)
|
| 471 |
+
if line_type_to == LineType.Separate:
|
| 472 |
+
return separate_lines
|
| 473 |
+
else:
|
| 474 |
+
separate_codes = [arr.codes_from_points(points) for points in separate_lines]
|
| 475 |
+
return (separate_lines, separate_codes)
|
| 476 |
+
elif line_type_to == LineType.ChunkCombinedCode:
|
| 477 |
+
chunk_points: list[cpy.PointArray | None] = []
|
| 478 |
+
chunk_codes: list[cpy.CodeArray | None] = []
|
| 479 |
+
for points in lines[0]:
|
| 480 |
+
if points is None:
|
| 481 |
+
chunk_points.append(None)
|
| 482 |
+
chunk_codes.append(None)
|
| 483 |
+
else:
|
| 484 |
+
points, offsets = arr.remove_nan(points)
|
| 485 |
+
chunk_points.append(points)
|
| 486 |
+
chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points))
|
| 487 |
+
return (chunk_points, chunk_codes)
|
| 488 |
+
elif line_type_to == LineType.ChunkCombinedOffset:
|
| 489 |
+
chunk_points = []
|
| 490 |
+
chunk_offsets: list[cpy.OffsetArray | None] = []
|
| 491 |
+
for points in lines[0]:
|
| 492 |
+
if points is None:
|
| 493 |
+
chunk_points.append(None)
|
| 494 |
+
chunk_offsets.append(None)
|
| 495 |
+
else:
|
| 496 |
+
points, offsets = arr.remove_nan(points)
|
| 497 |
+
chunk_points.append(points)
|
| 498 |
+
chunk_offsets.append(offsets)
|
| 499 |
+
return (chunk_points, chunk_offsets)
|
| 500 |
+
elif line_type_to == LineType.ChunkCombinedNan:
|
| 501 |
+
return lines
|
| 502 |
+
else:
|
| 503 |
+
raise ValueError(f"Invalid LineType {line_type_to}")
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
def convert_lines(
|
| 507 |
+
lines: cpy.LineReturn,
|
| 508 |
+
line_type_from: LineType | str,
|
| 509 |
+
line_type_to: LineType | str,
|
| 510 |
+
) -> cpy.LineReturn:
|
| 511 |
+
"""Convert contour lines from one :class:`~.LineType` to another.
|
| 512 |
+
|
| 513 |
+
Args:
|
| 514 |
+
lines (sequence of arrays): Contour lines to convert, such as those returned by
|
| 515 |
+
:meth:`.ContourGenerator.lines`.
|
| 516 |
+
line_type_from (LineType or str): :class:`~.LineType` to convert from as enum or
|
| 517 |
+
string equivalent.
|
| 518 |
+
line_type_to (LineType or str): :class:`~.LineType` to convert to as enum or string
|
| 519 |
+
equivalent.
|
| 520 |
+
|
| 521 |
+
Return:
|
| 522 |
+
Converted contour lines.
|
| 523 |
+
|
| 524 |
+
When converting non-chunked line types (``LineType.Separate`` or ``LineType.SeparateCode``) to
|
| 525 |
+
chunked ones (``LineType.ChunkCombinedCode``, ``LineType.ChunkCombinedOffset`` or
|
| 526 |
+
``LineType.ChunkCombinedNan``), all lines are placed in the first chunk. When converting in the
|
| 527 |
+
other direction, all chunk information is discarded.
|
| 528 |
+
|
| 529 |
+
.. versionadded:: 1.2.0
|
| 530 |
+
"""
|
| 531 |
+
line_type_from = as_line_type(line_type_from)
|
| 532 |
+
line_type_to = as_line_type(line_type_to)
|
| 533 |
+
|
| 534 |
+
check_lines(lines, line_type_from)
|
| 535 |
+
|
| 536 |
+
if line_type_from == LineType.Separate:
|
| 537 |
+
if TYPE_CHECKING:
|
| 538 |
+
lines = cast(cpy.LineReturn_Separate, lines)
|
| 539 |
+
return _convert_lines_from_Separate(lines, line_type_to)
|
| 540 |
+
elif line_type_from == LineType.SeparateCode:
|
| 541 |
+
if TYPE_CHECKING:
|
| 542 |
+
lines = cast(cpy.LineReturn_SeparateCode, lines)
|
| 543 |
+
return _convert_lines_from_SeparateCode(lines, line_type_to)
|
| 544 |
+
elif line_type_from == LineType.ChunkCombinedCode:
|
| 545 |
+
if TYPE_CHECKING:
|
| 546 |
+
lines = cast(cpy.LineReturn_ChunkCombinedCode, lines)
|
| 547 |
+
return _convert_lines_from_ChunkCombinedCode(lines, line_type_to)
|
| 548 |
+
elif line_type_from == LineType.ChunkCombinedOffset:
|
| 549 |
+
if TYPE_CHECKING:
|
| 550 |
+
lines = cast(cpy.LineReturn_ChunkCombinedOffset, lines)
|
| 551 |
+
return _convert_lines_from_ChunkCombinedOffset(lines, line_type_to)
|
| 552 |
+
elif line_type_from == LineType.ChunkCombinedNan:
|
| 553 |
+
if TYPE_CHECKING:
|
| 554 |
+
lines = cast(cpy.LineReturn_ChunkCombinedNan, lines)
|
| 555 |
+
return _convert_lines_from_ChunkCombinedNan(lines, line_type_to)
|
| 556 |
+
else:
|
| 557 |
+
raise ValueError(f"Invalid LineType {line_type_from}")
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
def convert_multi_filled(
|
| 561 |
+
multi_filled: list[cpy.FillReturn],
|
| 562 |
+
fill_type_from: FillType | str,
|
| 563 |
+
fill_type_to: FillType | str,
|
| 564 |
+
) -> list[cpy.FillReturn]:
|
| 565 |
+
"""Convert multiple sets of filled contours from one :class:`~.FillType` to another.
|
| 566 |
+
|
| 567 |
+
Args:
|
| 568 |
+
multi_filled (nested sequence of arrays): Filled contour polygons to convert, such as those
|
| 569 |
+
returned by :meth:`.ContourGenerator.multi_filled`.
|
| 570 |
+
fill_type_from (FillType or str): :class:`~.FillType` to convert from as enum or
|
| 571 |
+
string equivalent.
|
| 572 |
+
fill_type_to (FillType or str): :class:`~.FillType` to convert to as enum or string
|
| 573 |
+
equivalent.
|
| 574 |
+
|
| 575 |
+
Return:
|
| 576 |
+
Converted sets filled contour polygons.
|
| 577 |
+
|
| 578 |
+
When converting non-chunked fill types (``FillType.OuterCode`` or ``FillType.OuterOffset``) to
|
| 579 |
+
chunked ones, all polygons are placed in the first chunk. When converting in the other
|
| 580 |
+
direction, all chunk information is discarded. Converting a fill type that is not aware of the
|
| 581 |
+
relationship between outer boundaries and contained holes (``FillType.ChunkCombinedCode`` or
|
| 582 |
+
``FillType.ChunkCombinedOffset``) to one that is will raise a ``ValueError``.
|
| 583 |
+
|
| 584 |
+
.. versionadded:: 1.3.0
|
| 585 |
+
"""
|
| 586 |
+
fill_type_from = as_fill_type(fill_type_from)
|
| 587 |
+
fill_type_to = as_fill_type(fill_type_to)
|
| 588 |
+
|
| 589 |
+
return [convert_filled(filled, fill_type_from, fill_type_to) for filled in multi_filled]
|
| 590 |
+
|
| 591 |
+
|
| 592 |
+
def convert_multi_lines(
|
| 593 |
+
multi_lines: list[cpy.LineReturn],
|
| 594 |
+
line_type_from: LineType | str,
|
| 595 |
+
line_type_to: LineType | str,
|
| 596 |
+
) -> list[cpy.LineReturn]:
|
| 597 |
+
"""Convert multiple sets of contour lines from one :class:`~.LineType` to another.
|
| 598 |
+
|
| 599 |
+
Args:
|
| 600 |
+
multi_lines (nested sequence of arrays): Contour lines to convert, such as those returned by
|
| 601 |
+
:meth:`.ContourGenerator.multi_lines`.
|
| 602 |
+
line_type_from (LineType or str): :class:`~.LineType` to convert from as enum or
|
| 603 |
+
string equivalent.
|
| 604 |
+
line_type_to (LineType or str): :class:`~.LineType` to convert to as enum or string
|
| 605 |
+
equivalent.
|
| 606 |
+
|
| 607 |
+
Return:
|
| 608 |
+
Converted set of contour lines.
|
| 609 |
+
|
| 610 |
+
When converting non-chunked line types (``LineType.Separate`` or ``LineType.SeparateCode``) to
|
| 611 |
+
chunked ones (``LineType.ChunkCombinedCode``, ``LineType.ChunkCombinedOffset`` or
|
| 612 |
+
``LineType.ChunkCombinedNan``), all lines are placed in the first chunk. When converting in the
|
| 613 |
+
other direction, all chunk information is discarded.
|
| 614 |
+
|
| 615 |
+
.. versionadded:: 1.3.0
|
| 616 |
+
"""
|
| 617 |
+
line_type_from = as_line_type(line_type_from)
|
| 618 |
+
line_type_to = as_line_type(line_type_to)
|
| 619 |
+
|
| 620 |
+
return [convert_lines(lines, line_type_from, line_type_to) for lines in multi_lines]
|
videollama2/lib/python3.10/site-packages/contourpy/dechunk.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING, cast
|
| 4 |
+
|
| 5 |
+
from contourpy._contourpy import FillType, LineType
|
| 6 |
+
from contourpy.array import (
|
| 7 |
+
concat_codes_or_none,
|
| 8 |
+
concat_offsets_or_none,
|
| 9 |
+
concat_points_or_none,
|
| 10 |
+
concat_points_or_none_with_nan,
|
| 11 |
+
)
|
| 12 |
+
from contourpy.enum_util import as_fill_type, as_line_type
|
| 13 |
+
from contourpy.typecheck import check_filled, check_lines
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
import contourpy._contourpy as cpy
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def dechunk_filled(filled: cpy.FillReturn, fill_type: FillType | str) -> cpy.FillReturn:
|
| 20 |
+
"""Return the specified filled contours with chunked data moved into the first chunk.
|
| 21 |
+
|
| 22 |
+
Filled contours that are not chunked (``FillType.OuterCode`` and ``FillType.OuterOffset``) and
|
| 23 |
+
those that are but only contain a single chunk are returned unmodified. Individual polygons are
|
| 24 |
+
unchanged, they are not geometrically combined.
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
filled (sequence of arrays): Filled contour data, such as returned by
|
| 28 |
+
:meth:`.ContourGenerator.filled`.
|
| 29 |
+
fill_type (FillType or str): Type of :meth:`~.ContourGenerator.filled` as enum or string
|
| 30 |
+
equivalent.
|
| 31 |
+
|
| 32 |
+
Return:
|
| 33 |
+
Filled contours in a single chunk.
|
| 34 |
+
|
| 35 |
+
.. versionadded:: 1.2.0
|
| 36 |
+
"""
|
| 37 |
+
fill_type = as_fill_type(fill_type)
|
| 38 |
+
|
| 39 |
+
if fill_type in (FillType.OuterCode, FillType.OuterOffset):
|
| 40 |
+
# No-op if fill_type is not chunked.
|
| 41 |
+
return filled
|
| 42 |
+
|
| 43 |
+
check_filled(filled, fill_type)
|
| 44 |
+
if len(filled[0]) < 2:
|
| 45 |
+
# No-op if just one chunk.
|
| 46 |
+
return filled
|
| 47 |
+
|
| 48 |
+
if TYPE_CHECKING:
|
| 49 |
+
filled = cast(cpy.FillReturn_Chunk, filled)
|
| 50 |
+
points = concat_points_or_none(filled[0])
|
| 51 |
+
|
| 52 |
+
if fill_type == FillType.ChunkCombinedCode:
|
| 53 |
+
if TYPE_CHECKING:
|
| 54 |
+
filled = cast(cpy.FillReturn_ChunkCombinedCode, filled)
|
| 55 |
+
if points is None:
|
| 56 |
+
ret1: cpy.FillReturn_ChunkCombinedCode = ([None], [None])
|
| 57 |
+
else:
|
| 58 |
+
ret1 = ([points], [concat_codes_or_none(filled[1])])
|
| 59 |
+
return ret1
|
| 60 |
+
elif fill_type == FillType.ChunkCombinedOffset:
|
| 61 |
+
if TYPE_CHECKING:
|
| 62 |
+
filled = cast(cpy.FillReturn_ChunkCombinedOffset, filled)
|
| 63 |
+
if points is None:
|
| 64 |
+
ret2: cpy.FillReturn_ChunkCombinedOffset = ([None], [None])
|
| 65 |
+
else:
|
| 66 |
+
ret2 = ([points], [concat_offsets_or_none(filled[1])])
|
| 67 |
+
return ret2
|
| 68 |
+
elif fill_type == FillType.ChunkCombinedCodeOffset:
|
| 69 |
+
if TYPE_CHECKING:
|
| 70 |
+
filled = cast(cpy.FillReturn_ChunkCombinedCodeOffset, filled)
|
| 71 |
+
if points is None:
|
| 72 |
+
ret3: cpy.FillReturn_ChunkCombinedCodeOffset = ([None], [None], [None])
|
| 73 |
+
else:
|
| 74 |
+
outer_offsets = concat_offsets_or_none(filled[2])
|
| 75 |
+
ret3 = ([points], [concat_codes_or_none(filled[1])], [outer_offsets])
|
| 76 |
+
return ret3
|
| 77 |
+
elif fill_type == FillType.ChunkCombinedOffsetOffset:
|
| 78 |
+
if TYPE_CHECKING:
|
| 79 |
+
filled = cast(cpy.FillReturn_ChunkCombinedOffsetOffset, filled)
|
| 80 |
+
if points is None:
|
| 81 |
+
ret4: cpy.FillReturn_ChunkCombinedOffsetOffset = ([None], [None], [None])
|
| 82 |
+
else:
|
| 83 |
+
outer_offsets = concat_offsets_or_none(filled[2])
|
| 84 |
+
ret4 = ([points], [concat_offsets_or_none(filled[1])], [outer_offsets])
|
| 85 |
+
return ret4
|
| 86 |
+
else:
|
| 87 |
+
raise ValueError(f"Invalid FillType {fill_type}")
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def dechunk_lines(lines: cpy.LineReturn, line_type: LineType | str) -> cpy.LineReturn:
|
| 91 |
+
"""Return the specified contour lines with chunked data moved into the first chunk.
|
| 92 |
+
|
| 93 |
+
Contour lines that are not chunked (``LineType.Separate`` and ``LineType.SeparateCode``) and
|
| 94 |
+
those that are but only contain a single chunk are returned unmodified. Individual lines are
|
| 95 |
+
unchanged, they are not geometrically combined.
|
| 96 |
+
|
| 97 |
+
Args:
|
| 98 |
+
lines (sequence of arrays): Contour line data, such as returned by
|
| 99 |
+
:meth:`.ContourGenerator.lines`.
|
| 100 |
+
line_type (LineType or str): Type of :meth:`~.ContourGenerator.lines` as enum or string
|
| 101 |
+
equivalent.
|
| 102 |
+
|
| 103 |
+
Return:
|
| 104 |
+
Contour lines in a single chunk.
|
| 105 |
+
|
| 106 |
+
.. versionadded:: 1.2.0
|
| 107 |
+
"""
|
| 108 |
+
line_type = as_line_type(line_type)
|
| 109 |
+
|
| 110 |
+
if line_type in (LineType.Separate, LineType.SeparateCode):
|
| 111 |
+
# No-op if line_type is not chunked.
|
| 112 |
+
return lines
|
| 113 |
+
|
| 114 |
+
check_lines(lines, line_type)
|
| 115 |
+
if len(lines[0]) < 2:
|
| 116 |
+
# No-op if just one chunk.
|
| 117 |
+
return lines
|
| 118 |
+
|
| 119 |
+
if TYPE_CHECKING:
|
| 120 |
+
lines = cast(cpy.LineReturn_Chunk, lines)
|
| 121 |
+
|
| 122 |
+
if line_type == LineType.ChunkCombinedCode:
|
| 123 |
+
if TYPE_CHECKING:
|
| 124 |
+
lines = cast(cpy.LineReturn_ChunkCombinedCode, lines)
|
| 125 |
+
points = concat_points_or_none(lines[0])
|
| 126 |
+
if points is None:
|
| 127 |
+
ret1: cpy.LineReturn_ChunkCombinedCode = ([None], [None])
|
| 128 |
+
else:
|
| 129 |
+
ret1 = ([points], [concat_codes_or_none(lines[1])])
|
| 130 |
+
return ret1
|
| 131 |
+
elif line_type == LineType.ChunkCombinedOffset:
|
| 132 |
+
if TYPE_CHECKING:
|
| 133 |
+
lines = cast(cpy.LineReturn_ChunkCombinedOffset, lines)
|
| 134 |
+
points = concat_points_or_none(lines[0])
|
| 135 |
+
if points is None:
|
| 136 |
+
ret2: cpy.LineReturn_ChunkCombinedOffset = ([None], [None])
|
| 137 |
+
else:
|
| 138 |
+
ret2 = ([points], [concat_offsets_or_none(lines[1])])
|
| 139 |
+
return ret2
|
| 140 |
+
elif line_type == LineType.ChunkCombinedNan:
|
| 141 |
+
if TYPE_CHECKING:
|
| 142 |
+
lines = cast(cpy.LineReturn_ChunkCombinedNan, lines)
|
| 143 |
+
points = concat_points_or_none_with_nan(lines[0])
|
| 144 |
+
ret3: cpy.LineReturn_ChunkCombinedNan = ([points],)
|
| 145 |
+
return ret3
|
| 146 |
+
else:
|
| 147 |
+
raise ValueError(f"Invalid LineType {line_type}")
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def dechunk_multi_filled(
|
| 151 |
+
multi_filled: list[cpy.FillReturn],
|
| 152 |
+
fill_type: FillType | str,
|
| 153 |
+
) -> list[cpy.FillReturn]:
|
| 154 |
+
"""Return multiple sets of filled contours with chunked data moved into the first chunks.
|
| 155 |
+
|
| 156 |
+
Filled contours that are not chunked (``FillType.OuterCode`` and ``FillType.OuterOffset``) and
|
| 157 |
+
those that are but only contain a single chunk are returned unmodified. Individual polygons are
|
| 158 |
+
unchanged, they are not geometrically combined.
|
| 159 |
+
|
| 160 |
+
Args:
|
| 161 |
+
multi_filled (nested sequence of arrays): Filled contour data, such as returned by
|
| 162 |
+
:meth:`.ContourGenerator.multi_filled`.
|
| 163 |
+
fill_type (FillType or str): Type of :meth:`~.ContourGenerator.filled` as enum or string
|
| 164 |
+
equivalent.
|
| 165 |
+
|
| 166 |
+
Return:
|
| 167 |
+
Multiple sets of filled contours in a single chunk.
|
| 168 |
+
|
| 169 |
+
.. versionadded:: 1.3.0
|
| 170 |
+
"""
|
| 171 |
+
fill_type = as_fill_type(fill_type)
|
| 172 |
+
|
| 173 |
+
if fill_type in (FillType.OuterCode, FillType.OuterOffset):
|
| 174 |
+
# No-op if fill_type is not chunked.
|
| 175 |
+
return multi_filled
|
| 176 |
+
|
| 177 |
+
return [dechunk_filled(filled, fill_type) for filled in multi_filled]
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def dechunk_multi_lines(
|
| 181 |
+
multi_lines: list[cpy.LineReturn],
|
| 182 |
+
line_type: LineType | str,
|
| 183 |
+
) -> list[cpy.LineReturn]:
|
| 184 |
+
"""Return multiple sets of contour lines with all chunked data moved into the first chunks.
|
| 185 |
+
|
| 186 |
+
Contour lines that are not chunked (``LineType.Separate`` and ``LineType.SeparateCode``) and
|
| 187 |
+
those that are but only contain a single chunk are returned unmodified. Individual lines are
|
| 188 |
+
unchanged, they are not geometrically combined.
|
| 189 |
+
|
| 190 |
+
Args:
|
| 191 |
+
multi_lines (nested sequence of arrays): Contour line data, such as returned by
|
| 192 |
+
:meth:`.ContourGenerator.multi_lines`.
|
| 193 |
+
line_type (LineType or str): Type of :meth:`~.ContourGenerator.lines` as enum or string
|
| 194 |
+
equivalent.
|
| 195 |
+
|
| 196 |
+
Return:
|
| 197 |
+
Multiple sets of contour lines in a single chunk.
|
| 198 |
+
|
| 199 |
+
.. versionadded:: 1.3.0
|
| 200 |
+
"""
|
| 201 |
+
line_type = as_line_type(line_type)
|
| 202 |
+
|
| 203 |
+
if line_type in (LineType.Separate, LineType.SeparateCode):
|
| 204 |
+
# No-op if line_type is not chunked.
|
| 205 |
+
return multi_lines
|
| 206 |
+
|
| 207 |
+
return [dechunk_lines(lines, line_type) for lines in multi_lines]
|
videollama2/lib/python3.10/site-packages/contourpy/py.typed
ADDED
|
File without changes
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (304 Bytes). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/_build_config.cpython-310.pyc
ADDED
|
Binary file (1.78 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/bokeh_renderer.cpython-310.pyc
ADDED
|
Binary file (13.7 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/bokeh_util.cpython-310.pyc
ADDED
|
Binary file (2.21 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/data.cpython-310.pyc
ADDED
|
Binary file (2.71 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/mpl_renderer.cpython-310.pyc
ADDED
|
Binary file (18.6 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/mpl_util.cpython-310.pyc
ADDED
|
Binary file (3.08 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/__pycache__/renderer.cpython-310.pyc
ADDED
|
Binary file (6 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/contourpy/util/bokeh_renderer.py
ADDED
|
@@ -0,0 +1,335 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import io
|
| 4 |
+
from typing import TYPE_CHECKING, Any
|
| 5 |
+
|
| 6 |
+
from bokeh.io import export_png, export_svg, show
|
| 7 |
+
from bokeh.io.export import get_screenshot_as_png
|
| 8 |
+
from bokeh.layouts import gridplot
|
| 9 |
+
from bokeh.models.annotations.labels import Label
|
| 10 |
+
from bokeh.palettes import Category10
|
| 11 |
+
from bokeh.plotting import figure
|
| 12 |
+
import numpy as np
|
| 13 |
+
|
| 14 |
+
from contourpy.enum_util import as_fill_type, as_line_type
|
| 15 |
+
from contourpy.util.bokeh_util import filled_to_bokeh, lines_to_bokeh
|
| 16 |
+
from contourpy.util.renderer import Renderer
|
| 17 |
+
|
| 18 |
+
if TYPE_CHECKING:
|
| 19 |
+
from bokeh.models import GridPlot
|
| 20 |
+
from bokeh.palettes import Palette
|
| 21 |
+
from numpy.typing import ArrayLike
|
| 22 |
+
from selenium.webdriver.remote.webdriver import WebDriver
|
| 23 |
+
|
| 24 |
+
from contourpy import FillType, LineType
|
| 25 |
+
from contourpy._contourpy import FillReturn, LineReturn
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class BokehRenderer(Renderer):
|
| 29 |
+
"""Utility renderer using Bokeh to render a grid of plots over the same (x, y) range.
|
| 30 |
+
|
| 31 |
+
Args:
|
| 32 |
+
nrows (int, optional): Number of rows of plots, default ``1``.
|
| 33 |
+
ncols (int, optional): Number of columns of plots, default ``1``.
|
| 34 |
+
figsize (tuple(float, float), optional): Figure size in inches (assuming 100 dpi), default
|
| 35 |
+
``(9, 9)``.
|
| 36 |
+
show_frame (bool, optional): Whether to show frame and axes ticks, default ``True``.
|
| 37 |
+
want_svg (bool, optional): Whether output is required in SVG format or not, default
|
| 38 |
+
``False``.
|
| 39 |
+
|
| 40 |
+
Warning:
|
| 41 |
+
:class:`~.BokehRenderer`, unlike :class:`~.MplRenderer`, needs to be told in advance if
|
| 42 |
+
output to SVG format will be required later, otherwise it will assume PNG output.
|
| 43 |
+
"""
|
| 44 |
+
_figures: list[figure]
|
| 45 |
+
_layout: GridPlot
|
| 46 |
+
_palette: Palette
|
| 47 |
+
_want_svg: bool
|
| 48 |
+
|
| 49 |
+
def __init__(
|
| 50 |
+
self,
|
| 51 |
+
nrows: int = 1,
|
| 52 |
+
ncols: int = 1,
|
| 53 |
+
figsize: tuple[float, float] = (9, 9),
|
| 54 |
+
show_frame: bool = True,
|
| 55 |
+
want_svg: bool = False,
|
| 56 |
+
) -> None:
|
| 57 |
+
self._want_svg = want_svg
|
| 58 |
+
self._palette = Category10[10]
|
| 59 |
+
|
| 60 |
+
total_size = 100*np.asarray(figsize, dtype=int) # Assuming 100 dpi.
|
| 61 |
+
|
| 62 |
+
nfigures = nrows*ncols
|
| 63 |
+
self._figures = []
|
| 64 |
+
backend = "svg" if self._want_svg else "canvas"
|
| 65 |
+
for _ in range(nfigures):
|
| 66 |
+
fig = figure(output_backend=backend)
|
| 67 |
+
fig.xgrid.visible = False
|
| 68 |
+
fig.ygrid.visible = False
|
| 69 |
+
self._figures.append(fig)
|
| 70 |
+
if not show_frame:
|
| 71 |
+
fig.outline_line_color = None # type: ignore[assignment]
|
| 72 |
+
fig.axis.visible = False
|
| 73 |
+
|
| 74 |
+
self._layout = gridplot(
|
| 75 |
+
self._figures, ncols=ncols, toolbar_location=None, # type: ignore[arg-type]
|
| 76 |
+
width=total_size[0] // ncols, height=total_size[1] // nrows)
|
| 77 |
+
|
| 78 |
+
def _convert_color(self, color: str) -> str:
|
| 79 |
+
if isinstance(color, str) and color[0] == "C":
|
| 80 |
+
index = int(color[1:])
|
| 81 |
+
color = self._palette[index]
|
| 82 |
+
return color
|
| 83 |
+
|
| 84 |
+
def _get_figure(self, ax: figure | int) -> figure:
|
| 85 |
+
if isinstance(ax, int):
|
| 86 |
+
ax = self._figures[ax]
|
| 87 |
+
return ax
|
| 88 |
+
|
| 89 |
+
def filled(
|
| 90 |
+
self,
|
| 91 |
+
filled: FillReturn,
|
| 92 |
+
fill_type: FillType | str,
|
| 93 |
+
ax: figure | int = 0,
|
| 94 |
+
color: str = "C0",
|
| 95 |
+
alpha: float = 0.7,
|
| 96 |
+
) -> None:
|
| 97 |
+
"""Plot filled contours on a single plot.
|
| 98 |
+
|
| 99 |
+
Args:
|
| 100 |
+
filled (sequence of arrays): Filled contour data as returned by
|
| 101 |
+
:meth:`~.ContourGenerator.filled`.
|
| 102 |
+
fill_type (FillType or str): Type of :meth:`~.ContourGenerator.filled` data as returned
|
| 103 |
+
by :attr:`~.ContourGenerator.fill_type`, or a string equivalent.
|
| 104 |
+
ax (int or Bokeh Figure, optional): Which plot to use, default ``0``.
|
| 105 |
+
color (str, optional): Color to plot with. May be a string color or the letter ``"C"``
|
| 106 |
+
followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the
|
| 107 |
+
``Category10`` palette. Default ``"C0"``.
|
| 108 |
+
alpha (float, optional): Opacity to plot with, default ``0.7``.
|
| 109 |
+
"""
|
| 110 |
+
fill_type = as_fill_type(fill_type)
|
| 111 |
+
fig = self._get_figure(ax)
|
| 112 |
+
color = self._convert_color(color)
|
| 113 |
+
xs, ys = filled_to_bokeh(filled, fill_type)
|
| 114 |
+
if len(xs) > 0:
|
| 115 |
+
fig.multi_polygons(xs=[xs], ys=[ys], color=color, fill_alpha=alpha, line_width=0)
|
| 116 |
+
|
| 117 |
+
def grid(
|
| 118 |
+
self,
|
| 119 |
+
x: ArrayLike,
|
| 120 |
+
y: ArrayLike,
|
| 121 |
+
ax: figure | int = 0,
|
| 122 |
+
color: str = "black",
|
| 123 |
+
alpha: float = 0.1,
|
| 124 |
+
point_color: str | None = None,
|
| 125 |
+
quad_as_tri_alpha: float = 0,
|
| 126 |
+
) -> None:
|
| 127 |
+
"""Plot quad grid lines on a single plot.
|
| 128 |
+
|
| 129 |
+
Args:
|
| 130 |
+
x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points.
|
| 131 |
+
y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points.
|
| 132 |
+
ax (int or Bokeh Figure, optional): Which plot to use, default ``0``.
|
| 133 |
+
color (str, optional): Color to plot grid lines, default ``"black"``.
|
| 134 |
+
alpha (float, optional): Opacity to plot lines with, default ``0.1``.
|
| 135 |
+
point_color (str, optional): Color to plot grid points or ``None`` if grid points
|
| 136 |
+
should not be plotted, default ``None``.
|
| 137 |
+
quad_as_tri_alpha (float, optional): Opacity to plot ``quad_as_tri`` grid, default
|
| 138 |
+
``0``.
|
| 139 |
+
|
| 140 |
+
Colors may be a string color or the letter ``"C"`` followed by an integer in the range
|
| 141 |
+
``"C0"`` to ``"C9"`` to use a color from the ``Category10`` palette.
|
| 142 |
+
|
| 143 |
+
Warning:
|
| 144 |
+
``quad_as_tri_alpha > 0`` plots all quads as though they are unmasked.
|
| 145 |
+
"""
|
| 146 |
+
fig = self._get_figure(ax)
|
| 147 |
+
x, y = self._grid_as_2d(x, y)
|
| 148 |
+
xs = list(x) + list(x.T)
|
| 149 |
+
ys = list(y) + list(y.T)
|
| 150 |
+
kwargs = {"line_color": color, "alpha": alpha}
|
| 151 |
+
fig.multi_line(xs, ys, **kwargs)
|
| 152 |
+
if quad_as_tri_alpha > 0:
|
| 153 |
+
# Assumes no quad mask.
|
| 154 |
+
xmid = (0.25*(x[:-1, :-1] + x[1:, :-1] + x[:-1, 1:] + x[1:, 1:])).ravel()
|
| 155 |
+
ymid = (0.25*(y[:-1, :-1] + y[1:, :-1] + y[:-1, 1:] + y[1:, 1:])).ravel()
|
| 156 |
+
fig.multi_line(
|
| 157 |
+
list(np.stack((x[:-1, :-1].ravel(), xmid, x[1:, 1:].ravel()), axis=1)),
|
| 158 |
+
list(np.stack((y[:-1, :-1].ravel(), ymid, y[1:, 1:].ravel()), axis=1)),
|
| 159 |
+
**kwargs)
|
| 160 |
+
fig.multi_line(
|
| 161 |
+
list(np.stack((x[:-1, 1:].ravel(), xmid, x[1:, :-1].ravel()), axis=1)),
|
| 162 |
+
list(np.stack((y[:-1, 1:].ravel(), ymid, y[1:, :-1].ravel()), axis=1)),
|
| 163 |
+
**kwargs)
|
| 164 |
+
if point_color is not None:
|
| 165 |
+
fig.circle(
|
| 166 |
+
x=x.ravel(), y=y.ravel(), fill_color=color, line_color=None, alpha=alpha, size=8)
|
| 167 |
+
|
| 168 |
+
def lines(
|
| 169 |
+
self,
|
| 170 |
+
lines: LineReturn,
|
| 171 |
+
line_type: LineType | str,
|
| 172 |
+
ax: figure | int = 0,
|
| 173 |
+
color: str = "C0",
|
| 174 |
+
alpha: float = 1.0,
|
| 175 |
+
linewidth: float = 1,
|
| 176 |
+
) -> None:
|
| 177 |
+
"""Plot contour lines on a single plot.
|
| 178 |
+
|
| 179 |
+
Args:
|
| 180 |
+
lines (sequence of arrays): Contour line data as returned by
|
| 181 |
+
:meth:`~.ContourGenerator.lines`.
|
| 182 |
+
line_type (LineType or str): Type of :meth:`~.ContourGenerator.lines` data as returned
|
| 183 |
+
by :attr:`~.ContourGenerator.line_type`, or a string equivalent.
|
| 184 |
+
ax (int or Bokeh Figure, optional): Which plot to use, default ``0``.
|
| 185 |
+
color (str, optional): Color to plot lines. May be a string color or the letter ``"C"``
|
| 186 |
+
followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the
|
| 187 |
+
``Category10`` palette. Default ``"C0"``.
|
| 188 |
+
alpha (float, optional): Opacity to plot lines with, default ``1.0``.
|
| 189 |
+
linewidth (float, optional): Width of lines, default ``1``.
|
| 190 |
+
|
| 191 |
+
Note:
|
| 192 |
+
Assumes all lines are open line strips not closed line loops.
|
| 193 |
+
"""
|
| 194 |
+
line_type = as_line_type(line_type)
|
| 195 |
+
fig = self._get_figure(ax)
|
| 196 |
+
color = self._convert_color(color)
|
| 197 |
+
xs, ys = lines_to_bokeh(lines, line_type)
|
| 198 |
+
if xs is not None:
|
| 199 |
+
fig.line(xs, ys, line_color=color, line_alpha=alpha, line_width=linewidth)
|
| 200 |
+
|
| 201 |
+
def mask(
|
| 202 |
+
self,
|
| 203 |
+
x: ArrayLike,
|
| 204 |
+
y: ArrayLike,
|
| 205 |
+
z: ArrayLike | np.ma.MaskedArray[Any, Any],
|
| 206 |
+
ax: figure | int = 0,
|
| 207 |
+
color: str = "black",
|
| 208 |
+
) -> None:
|
| 209 |
+
"""Plot masked out grid points as circles on a single plot.
|
| 210 |
+
|
| 211 |
+
Args:
|
| 212 |
+
x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points.
|
| 213 |
+
y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points.
|
| 214 |
+
z (masked array of shape (ny, nx): z-values.
|
| 215 |
+
ax (int or Bokeh Figure, optional): Which plot to use, default ``0``.
|
| 216 |
+
color (str, optional): Circle color, default ``"black"``.
|
| 217 |
+
"""
|
| 218 |
+
mask = np.ma.getmask(z) # type: ignore[no-untyped-call]
|
| 219 |
+
if mask is np.ma.nomask:
|
| 220 |
+
return
|
| 221 |
+
fig = self._get_figure(ax)
|
| 222 |
+
color = self._convert_color(color)
|
| 223 |
+
x, y = self._grid_as_2d(x, y)
|
| 224 |
+
fig.circle(x[mask], y[mask], fill_color=color, size=10)
|
| 225 |
+
|
| 226 |
+
def save(
|
| 227 |
+
self,
|
| 228 |
+
filename: str,
|
| 229 |
+
transparent: bool = False,
|
| 230 |
+
*,
|
| 231 |
+
webdriver: WebDriver | None = None,
|
| 232 |
+
) -> None:
|
| 233 |
+
"""Save plots to SVG or PNG file.
|
| 234 |
+
|
| 235 |
+
Args:
|
| 236 |
+
filename (str): Filename to save to.
|
| 237 |
+
transparent (bool, optional): Whether background should be transparent, default
|
| 238 |
+
``False``.
|
| 239 |
+
webdriver (WebDriver, optional): Selenium WebDriver instance to use to create the image.
|
| 240 |
+
|
| 241 |
+
.. versionadded:: 1.1.1
|
| 242 |
+
|
| 243 |
+
Warning:
|
| 244 |
+
To output to SVG file, ``want_svg=True`` must have been passed to the constructor.
|
| 245 |
+
"""
|
| 246 |
+
if transparent:
|
| 247 |
+
for fig in self._figures:
|
| 248 |
+
fig.background_fill_color = None # type: ignore[assignment]
|
| 249 |
+
fig.border_fill_color = None # type: ignore[assignment]
|
| 250 |
+
|
| 251 |
+
if self._want_svg:
|
| 252 |
+
export_svg(self._layout, filename=filename, webdriver=webdriver)
|
| 253 |
+
else:
|
| 254 |
+
export_png(self._layout, filename=filename, webdriver=webdriver)
|
| 255 |
+
|
| 256 |
+
def save_to_buffer(self, *, webdriver: WebDriver | None = None) -> io.BytesIO:
|
| 257 |
+
"""Save plots to an ``io.BytesIO`` buffer.
|
| 258 |
+
|
| 259 |
+
Args:
|
| 260 |
+
webdriver (WebDriver, optional): Selenium WebDriver instance to use to create the image.
|
| 261 |
+
|
| 262 |
+
.. versionadded:: 1.1.1
|
| 263 |
+
|
| 264 |
+
Return:
|
| 265 |
+
BytesIO: PNG image buffer.
|
| 266 |
+
"""
|
| 267 |
+
image = get_screenshot_as_png(self._layout, driver=webdriver)
|
| 268 |
+
buffer = io.BytesIO()
|
| 269 |
+
image.save(buffer, "png")
|
| 270 |
+
return buffer
|
| 271 |
+
|
| 272 |
+
def show(self) -> None:
|
| 273 |
+
"""Show plots in web browser, in usual Bokeh manner.
|
| 274 |
+
"""
|
| 275 |
+
show(self._layout)
|
| 276 |
+
|
| 277 |
+
def title(self, title: str, ax: figure | int = 0, color: str | None = None) -> None:
|
| 278 |
+
"""Set the title of a single plot.
|
| 279 |
+
|
| 280 |
+
Args:
|
| 281 |
+
title (str): Title text.
|
| 282 |
+
ax (int or Bokeh Figure, optional): Which plot to set the title of, default ``0``.
|
| 283 |
+
color (str, optional): Color to set title. May be a string color or the letter ``"C"``
|
| 284 |
+
followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the
|
| 285 |
+
``Category10`` palette. Default ``None`` which is ``black``.
|
| 286 |
+
"""
|
| 287 |
+
fig = self._get_figure(ax)
|
| 288 |
+
fig.title = title # type: ignore[assignment]
|
| 289 |
+
fig.title.align = "center" # type: ignore[attr-defined]
|
| 290 |
+
if color is not None:
|
| 291 |
+
fig.title.text_color = self._convert_color(color) # type: ignore[attr-defined]
|
| 292 |
+
|
| 293 |
+
def z_values(
|
| 294 |
+
self,
|
| 295 |
+
x: ArrayLike,
|
| 296 |
+
y: ArrayLike,
|
| 297 |
+
z: ArrayLike,
|
| 298 |
+
ax: figure | int = 0,
|
| 299 |
+
color: str = "green",
|
| 300 |
+
fmt: str = ".1f",
|
| 301 |
+
quad_as_tri: bool = False,
|
| 302 |
+
) -> None:
|
| 303 |
+
"""Show ``z`` values on a single plot.
|
| 304 |
+
|
| 305 |
+
Args:
|
| 306 |
+
x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points.
|
| 307 |
+
y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points.
|
| 308 |
+
z (array-like of shape (ny, nx): z-values.
|
| 309 |
+
ax (int or Bokeh Figure, optional): Which plot to use, default ``0``.
|
| 310 |
+
color (str, optional): Color of added text. May be a string color or the letter ``"C"``
|
| 311 |
+
followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the
|
| 312 |
+
``Category10`` palette. Default ``"green"``.
|
| 313 |
+
fmt (str, optional): Format to display z-values, default ``".1f"``.
|
| 314 |
+
quad_as_tri (bool, optional): Whether to show z-values at the ``quad_as_tri`` centres
|
| 315 |
+
of quads.
|
| 316 |
+
|
| 317 |
+
Warning:
|
| 318 |
+
``quad_as_tri=True`` shows z-values for all quads, even if masked.
|
| 319 |
+
"""
|
| 320 |
+
fig = self._get_figure(ax)
|
| 321 |
+
color = self._convert_color(color)
|
| 322 |
+
x, y = self._grid_as_2d(x, y)
|
| 323 |
+
z = np.asarray(z)
|
| 324 |
+
ny, nx = z.shape
|
| 325 |
+
kwargs = {"text_color": color, "text_align": "center", "text_baseline": "middle"}
|
| 326 |
+
for j in range(ny):
|
| 327 |
+
for i in range(nx):
|
| 328 |
+
fig.add_layout(Label(x=x[j, i], y=y[j, i], text=f"{z[j, i]:{fmt}}", **kwargs))
|
| 329 |
+
if quad_as_tri:
|
| 330 |
+
for j in range(ny-1):
|
| 331 |
+
for i in range(nx-1):
|
| 332 |
+
xx = np.mean(x[j:j+2, i:i+2])
|
| 333 |
+
yy = np.mean(y[j:j+2, i:i+2])
|
| 334 |
+
zz = np.mean(z[j:j+2, i:i+2])
|
| 335 |
+
fig.add_layout(Label(x=xx, y=yy, text=f"{zz:{fmt}}", **kwargs))
|