Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- parrot/share/terminfo/q/qume +0 -0
- parrot/share/terminfo/q/qume5 +0 -0
- parrot/share/terminfo/q/qvt119p-25-w +0 -0
- parrot/share/terminfo/q/qvt203-25-w +0 -0
- parrot/share/terminfo/t/tek4025ex +0 -0
- parrot/share/terminfo/t/teken-vt+fkeys +0 -0
- parrot/share/terminfo/t/terminet300 +0 -0
- parrot/share/terminfo/t/ti735 +0 -0
- parrot/share/terminfo/t/tty43 +0 -0
- parrot/share/terminfo/t/tvi912c-unk-vb +0 -0
- parrot/share/terminfo/t/tvi920c-mc +0 -0
- parrot/share/terminfo/t/tvi920c-mc-vb +0 -0
- parrot/share/terminfo/t/tvi920c-unk-2p +0 -0
- parrot/share/terminfo/t/tvi920c-vb +0 -0
- parrot/share/terminfo/w/wyse120-25 +0 -0
- parrot/share/terminfo/x/xnuppc+112x37 +0 -0
- parrot/share/terminfo/x/xnuppc+144x48 +0 -0
- parrot/share/terminfo/x/xnuppc+160x64 +0 -0
- parrot/share/terminfo/x/xnuppc+200x75 +0 -0
- parrot/share/terminfo/x/xnuppc-100x37 +0 -0
- parrot/share/terminfo/x/xnuppc-100x37-m +0 -0
- parrot/share/terminfo/x/xnuppc-128x40 +0 -0
- parrot/share/terminfo/x/xnuppc-160x64-m +0 -0
- parrot/share/terminfo/x/xnuppc-200x64 +0 -0
- parrot/share/terminfo/x/xnuppc-256x96 +0 -0
- parrot/share/terminfo/x/xnuppc-b +0 -0
- parrot/share/terminfo/x/xterm+x11hilite +0 -0
- parrot/share/terminfo/x/xterm-24 +0 -0
- parrot/share/terminfo/x/xterm-mono +0 -0
- parrot/share/terminfo/x/xterm-nic +0 -0
- parrot/share/terminfo/x/xterm-r5 +0 -0
- parrot/share/terminfo/x/xterm-r6 +0 -0
- videollama2/lib/python3.10/site-packages/pandas/core/internals/api.py +156 -0
- videollama2/lib/python3.10/site-packages/pandas/core/internals/array_manager.py +1340 -0
- videollama2/lib/python3.10/site-packages/pandas/core/internals/base.py +407 -0
- videollama2/lib/python3.10/site-packages/pandas/core/internals/blocks.py +0 -0
- videollama2/lib/python3.10/site-packages/pandas/core/internals/managers.py +2375 -0
- videollama2/lib/python3.10/site-packages/pandas/core/internals/ops.py +154 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/ES2_compatibility.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/arrays_of_arrays.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/clear_buffer_object.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/clear_texture.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/compressed_texture_pixel_storage.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/compute_shader.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/draw_instanced.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/fragment_layer_viewport.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/fragment_program_shadow.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/geometry_shader4.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/gpu_shader_fp64.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/multi_bind.cpython-310.pyc +0 -0
parrot/share/terminfo/q/qume
ADDED
|
Binary file (364 Bytes). View file
|
|
|
parrot/share/terminfo/q/qume5
ADDED
|
Binary file (364 Bytes). View file
|
|
|
parrot/share/terminfo/q/qvt119p-25-w
ADDED
|
Binary file (595 Bytes). View file
|
|
|
parrot/share/terminfo/q/qvt203-25-w
ADDED
|
Binary file (855 Bytes). View file
|
|
|
parrot/share/terminfo/t/tek4025ex
ADDED
|
Binary file (681 Bytes). View file
|
|
|
parrot/share/terminfo/t/teken-vt+fkeys
ADDED
|
Binary file (582 Bytes). View file
|
|
|
parrot/share/terminfo/t/terminet300
ADDED
|
Binary file (384 Bytes). View file
|
|
|
parrot/share/terminfo/t/ti735
ADDED
|
Binary file (412 Bytes). View file
|
|
|
parrot/share/terminfo/t/tty43
ADDED
|
Binary file (348 Bytes). View file
|
|
|
parrot/share/terminfo/t/tvi912c-unk-vb
ADDED
|
Binary file (1.32 kB). View file
|
|
|
parrot/share/terminfo/t/tvi920c-mc
ADDED
|
Binary file (1.4 kB). View file
|
|
|
parrot/share/terminfo/t/tvi920c-mc-vb
ADDED
|
Binary file (1.53 kB). View file
|
|
|
parrot/share/terminfo/t/tvi920c-unk-2p
ADDED
|
Binary file (1.39 kB). View file
|
|
|
parrot/share/terminfo/t/tvi920c-vb
ADDED
|
Binary file (1.42 kB). View file
|
|
|
parrot/share/terminfo/w/wyse120-25
ADDED
|
Binary file (1.27 kB). View file
|
|
|
parrot/share/terminfo/x/xnuppc+112x37
ADDED
|
Binary file (88 Bytes). View file
|
|
|
parrot/share/terminfo/x/xnuppc+144x48
ADDED
|
Binary file (88 Bytes). View file
|
|
|
parrot/share/terminfo/x/xnuppc+160x64
ADDED
|
Binary file (90 Bytes). View file
|
|
|
parrot/share/terminfo/x/xnuppc+200x75
ADDED
|
Binary file (90 Bytes). View file
|
|
|
parrot/share/terminfo/x/xnuppc-100x37
ADDED
|
Binary file (1.22 kB). View file
|
|
|
parrot/share/terminfo/x/xnuppc-100x37-m
ADDED
|
Binary file (987 Bytes). View file
|
|
|
parrot/share/terminfo/x/xnuppc-128x40
ADDED
|
Binary file (1.22 kB). View file
|
|
|
parrot/share/terminfo/x/xnuppc-160x64-m
ADDED
|
Binary file (987 Bytes). View file
|
|
|
parrot/share/terminfo/x/xnuppc-200x64
ADDED
|
Binary file (1.22 kB). View file
|
|
|
parrot/share/terminfo/x/xnuppc-256x96
ADDED
|
Binary file (1.22 kB). View file
|
|
|
parrot/share/terminfo/x/xnuppc-b
ADDED
|
Binary file (1.22 kB). View file
|
|
|
parrot/share/terminfo/x/xterm+x11hilite
ADDED
|
Binary file (903 Bytes). View file
|
|
|
parrot/share/terminfo/x/xterm-24
ADDED
|
Binary file (1.53 kB). View file
|
|
|
parrot/share/terminfo/x/xterm-mono
ADDED
|
Binary file (1.49 kB). View file
|
|
|
parrot/share/terminfo/x/xterm-nic
ADDED
|
Binary file (3.83 kB). View file
|
|
|
parrot/share/terminfo/x/xterm-r5
ADDED
|
Binary file (1.3 kB). View file
|
|
|
parrot/share/terminfo/x/xterm-r6
ADDED
|
Binary file (1.49 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/pandas/core/internals/api.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This is a pseudo-public API for downstream libraries. We ask that downstream
|
| 3 |
+
authors
|
| 4 |
+
|
| 5 |
+
1) Try to avoid using internals directly altogether, and failing that,
|
| 6 |
+
2) Use only functions exposed here (or in core.internals)
|
| 7 |
+
|
| 8 |
+
"""
|
| 9 |
+
from __future__ import annotations
|
| 10 |
+
|
| 11 |
+
from typing import TYPE_CHECKING
|
| 12 |
+
|
| 13 |
+
import numpy as np
|
| 14 |
+
|
| 15 |
+
from pandas._libs.internals import BlockPlacement
|
| 16 |
+
|
| 17 |
+
from pandas.core.dtypes.common import pandas_dtype
|
| 18 |
+
from pandas.core.dtypes.dtypes import (
|
| 19 |
+
DatetimeTZDtype,
|
| 20 |
+
PeriodDtype,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from pandas.core.arrays import DatetimeArray
|
| 24 |
+
from pandas.core.construction import extract_array
|
| 25 |
+
from pandas.core.internals.blocks import (
|
| 26 |
+
check_ndim,
|
| 27 |
+
ensure_block_shape,
|
| 28 |
+
extract_pandas_array,
|
| 29 |
+
get_block_type,
|
| 30 |
+
maybe_coerce_values,
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
if TYPE_CHECKING:
|
| 34 |
+
from pandas._typing import Dtype
|
| 35 |
+
|
| 36 |
+
from pandas.core.internals.blocks import Block
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def make_block(
|
| 40 |
+
values, placement, klass=None, ndim=None, dtype: Dtype | None = None
|
| 41 |
+
) -> Block:
|
| 42 |
+
"""
|
| 43 |
+
This is a pseudo-public analogue to blocks.new_block.
|
| 44 |
+
|
| 45 |
+
We ask that downstream libraries use this rather than any fully-internal
|
| 46 |
+
APIs, including but not limited to:
|
| 47 |
+
|
| 48 |
+
- core.internals.blocks.make_block
|
| 49 |
+
- Block.make_block
|
| 50 |
+
- Block.make_block_same_class
|
| 51 |
+
- Block.__init__
|
| 52 |
+
"""
|
| 53 |
+
if dtype is not None:
|
| 54 |
+
dtype = pandas_dtype(dtype)
|
| 55 |
+
|
| 56 |
+
values, dtype = extract_pandas_array(values, dtype, ndim)
|
| 57 |
+
|
| 58 |
+
from pandas.core.internals.blocks import (
|
| 59 |
+
DatetimeTZBlock,
|
| 60 |
+
ExtensionBlock,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
if klass is ExtensionBlock and isinstance(values.dtype, PeriodDtype):
|
| 64 |
+
# GH-44681 changed PeriodArray to be stored in the 2D
|
| 65 |
+
# NDArrayBackedExtensionBlock instead of ExtensionBlock
|
| 66 |
+
# -> still allow ExtensionBlock to be passed in this case for back compat
|
| 67 |
+
klass = None
|
| 68 |
+
|
| 69 |
+
if klass is None:
|
| 70 |
+
dtype = dtype or values.dtype
|
| 71 |
+
klass = get_block_type(dtype)
|
| 72 |
+
|
| 73 |
+
elif klass is DatetimeTZBlock and not isinstance(values.dtype, DatetimeTZDtype):
|
| 74 |
+
# pyarrow calls get here
|
| 75 |
+
values = DatetimeArray._simple_new(
|
| 76 |
+
# error: Argument "dtype" to "_simple_new" of "DatetimeArray" has
|
| 77 |
+
# incompatible type "Union[ExtensionDtype, dtype[Any], None]";
|
| 78 |
+
# expected "Union[dtype[datetime64], DatetimeTZDtype]"
|
| 79 |
+
values,
|
| 80 |
+
dtype=dtype, # type: ignore[arg-type]
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
if not isinstance(placement, BlockPlacement):
|
| 84 |
+
placement = BlockPlacement(placement)
|
| 85 |
+
|
| 86 |
+
ndim = maybe_infer_ndim(values, placement, ndim)
|
| 87 |
+
if isinstance(values.dtype, (PeriodDtype, DatetimeTZDtype)):
|
| 88 |
+
# GH#41168 ensure we can pass 1D dt64tz values
|
| 89 |
+
# More generally, any EA dtype that isn't is_1d_only_ea_dtype
|
| 90 |
+
values = extract_array(values, extract_numpy=True)
|
| 91 |
+
values = ensure_block_shape(values, ndim)
|
| 92 |
+
|
| 93 |
+
check_ndim(values, placement, ndim)
|
| 94 |
+
values = maybe_coerce_values(values)
|
| 95 |
+
return klass(values, ndim=ndim, placement=placement)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def maybe_infer_ndim(values, placement: BlockPlacement, ndim: int | None) -> int:
|
| 99 |
+
"""
|
| 100 |
+
If `ndim` is not provided, infer it from placement and values.
|
| 101 |
+
"""
|
| 102 |
+
if ndim is None:
|
| 103 |
+
# GH#38134 Block constructor now assumes ndim is not None
|
| 104 |
+
if not isinstance(values.dtype, np.dtype):
|
| 105 |
+
if len(placement) != 1:
|
| 106 |
+
ndim = 1
|
| 107 |
+
else:
|
| 108 |
+
ndim = 2
|
| 109 |
+
else:
|
| 110 |
+
ndim = values.ndim
|
| 111 |
+
return ndim
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def __getattr__(name: str):
|
| 115 |
+
# GH#55139
|
| 116 |
+
import warnings
|
| 117 |
+
|
| 118 |
+
if name in [
|
| 119 |
+
"Block",
|
| 120 |
+
"ExtensionBlock",
|
| 121 |
+
"DatetimeTZBlock",
|
| 122 |
+
"create_block_manager_from_blocks",
|
| 123 |
+
]:
|
| 124 |
+
# GH#33892
|
| 125 |
+
warnings.warn(
|
| 126 |
+
f"{name} is deprecated and will be removed in a future version. "
|
| 127 |
+
"Use public APIs instead.",
|
| 128 |
+
DeprecationWarning,
|
| 129 |
+
# https://github.com/pandas-dev/pandas/pull/55139#pullrequestreview-1720690758
|
| 130 |
+
# on hard-coding stacklevel
|
| 131 |
+
stacklevel=2,
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
if name == "create_block_manager_from_blocks":
|
| 135 |
+
from pandas.core.internals.managers import create_block_manager_from_blocks
|
| 136 |
+
|
| 137 |
+
return create_block_manager_from_blocks
|
| 138 |
+
|
| 139 |
+
elif name == "Block":
|
| 140 |
+
from pandas.core.internals.blocks import Block
|
| 141 |
+
|
| 142 |
+
return Block
|
| 143 |
+
|
| 144 |
+
elif name == "DatetimeTZBlock":
|
| 145 |
+
from pandas.core.internals.blocks import DatetimeTZBlock
|
| 146 |
+
|
| 147 |
+
return DatetimeTZBlock
|
| 148 |
+
|
| 149 |
+
elif name == "ExtensionBlock":
|
| 150 |
+
from pandas.core.internals.blocks import ExtensionBlock
|
| 151 |
+
|
| 152 |
+
return ExtensionBlock
|
| 153 |
+
|
| 154 |
+
raise AttributeError(
|
| 155 |
+
f"module 'pandas.core.internals.api' has no attribute '{name}'"
|
| 156 |
+
)
|
videollama2/lib/python3.10/site-packages/pandas/core/internals/array_manager.py
ADDED
|
@@ -0,0 +1,1340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Experimental manager based on storing a collection of 1D arrays
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import itertools
|
| 7 |
+
from typing import (
|
| 8 |
+
TYPE_CHECKING,
|
| 9 |
+
Callable,
|
| 10 |
+
Literal,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
import numpy as np
|
| 14 |
+
|
| 15 |
+
from pandas._libs import (
|
| 16 |
+
NaT,
|
| 17 |
+
lib,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from pandas.core.dtypes.astype import (
|
| 21 |
+
astype_array,
|
| 22 |
+
astype_array_safe,
|
| 23 |
+
)
|
| 24 |
+
from pandas.core.dtypes.cast import (
|
| 25 |
+
ensure_dtype_can_hold_na,
|
| 26 |
+
find_common_type,
|
| 27 |
+
infer_dtype_from_scalar,
|
| 28 |
+
np_find_common_type,
|
| 29 |
+
)
|
| 30 |
+
from pandas.core.dtypes.common import (
|
| 31 |
+
ensure_platform_int,
|
| 32 |
+
is_datetime64_ns_dtype,
|
| 33 |
+
is_integer,
|
| 34 |
+
is_numeric_dtype,
|
| 35 |
+
is_object_dtype,
|
| 36 |
+
is_timedelta64_ns_dtype,
|
| 37 |
+
)
|
| 38 |
+
from pandas.core.dtypes.dtypes import ExtensionDtype
|
| 39 |
+
from pandas.core.dtypes.generic import (
|
| 40 |
+
ABCDataFrame,
|
| 41 |
+
ABCSeries,
|
| 42 |
+
)
|
| 43 |
+
from pandas.core.dtypes.missing import (
|
| 44 |
+
array_equals,
|
| 45 |
+
isna,
|
| 46 |
+
na_value_for_dtype,
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
import pandas.core.algorithms as algos
|
| 50 |
+
from pandas.core.array_algos.quantile import quantile_compat
|
| 51 |
+
from pandas.core.array_algos.take import take_1d
|
| 52 |
+
from pandas.core.arrays import (
|
| 53 |
+
DatetimeArray,
|
| 54 |
+
ExtensionArray,
|
| 55 |
+
NumpyExtensionArray,
|
| 56 |
+
TimedeltaArray,
|
| 57 |
+
)
|
| 58 |
+
from pandas.core.construction import (
|
| 59 |
+
ensure_wrapped_if_datetimelike,
|
| 60 |
+
extract_array,
|
| 61 |
+
sanitize_array,
|
| 62 |
+
)
|
| 63 |
+
from pandas.core.indexers import (
|
| 64 |
+
maybe_convert_indices,
|
| 65 |
+
validate_indices,
|
| 66 |
+
)
|
| 67 |
+
from pandas.core.indexes.api import (
|
| 68 |
+
Index,
|
| 69 |
+
ensure_index,
|
| 70 |
+
)
|
| 71 |
+
from pandas.core.indexes.base import get_values_for_csv
|
| 72 |
+
from pandas.core.internals.base import (
|
| 73 |
+
DataManager,
|
| 74 |
+
SingleDataManager,
|
| 75 |
+
ensure_np_dtype,
|
| 76 |
+
interleaved_dtype,
|
| 77 |
+
)
|
| 78 |
+
from pandas.core.internals.blocks import (
|
| 79 |
+
BlockPlacement,
|
| 80 |
+
ensure_block_shape,
|
| 81 |
+
external_values,
|
| 82 |
+
extract_pandas_array,
|
| 83 |
+
maybe_coerce_values,
|
| 84 |
+
new_block,
|
| 85 |
+
)
|
| 86 |
+
from pandas.core.internals.managers import make_na_array
|
| 87 |
+
|
| 88 |
+
if TYPE_CHECKING:
|
| 89 |
+
from collections.abc import Hashable
|
| 90 |
+
|
| 91 |
+
from pandas._typing import (
|
| 92 |
+
ArrayLike,
|
| 93 |
+
AxisInt,
|
| 94 |
+
DtypeObj,
|
| 95 |
+
QuantileInterpolation,
|
| 96 |
+
Self,
|
| 97 |
+
npt,
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class BaseArrayManager(DataManager):
|
| 102 |
+
"""
|
| 103 |
+
Core internal data structure to implement DataFrame and Series.
|
| 104 |
+
|
| 105 |
+
Alternative to the BlockManager, storing a list of 1D arrays instead of
|
| 106 |
+
Blocks.
|
| 107 |
+
|
| 108 |
+
This is *not* a public API class
|
| 109 |
+
|
| 110 |
+
Parameters
|
| 111 |
+
----------
|
| 112 |
+
arrays : Sequence of arrays
|
| 113 |
+
axes : Sequence of Index
|
| 114 |
+
verify_integrity : bool, default True
|
| 115 |
+
|
| 116 |
+
"""
|
| 117 |
+
|
| 118 |
+
__slots__ = [
|
| 119 |
+
"_axes", # private attribute, because 'axes' has different order, see below
|
| 120 |
+
"arrays",
|
| 121 |
+
]
|
| 122 |
+
|
| 123 |
+
arrays: list[np.ndarray | ExtensionArray]
|
| 124 |
+
_axes: list[Index]
|
| 125 |
+
|
| 126 |
+
def __init__(
|
| 127 |
+
self,
|
| 128 |
+
arrays: list[np.ndarray | ExtensionArray],
|
| 129 |
+
axes: list[Index],
|
| 130 |
+
verify_integrity: bool = True,
|
| 131 |
+
) -> None:
|
| 132 |
+
raise NotImplementedError
|
| 133 |
+
|
| 134 |
+
def make_empty(self, axes=None) -> Self:
|
| 135 |
+
"""Return an empty ArrayManager with the items axis of len 0 (no columns)"""
|
| 136 |
+
if axes is None:
|
| 137 |
+
axes = [self.axes[1:], Index([])]
|
| 138 |
+
|
| 139 |
+
arrays: list[np.ndarray | ExtensionArray] = []
|
| 140 |
+
return type(self)(arrays, axes)
|
| 141 |
+
|
| 142 |
+
@property
|
| 143 |
+
def items(self) -> Index:
|
| 144 |
+
return self._axes[-1]
|
| 145 |
+
|
| 146 |
+
@property
|
| 147 |
+
# error: Signature of "axes" incompatible with supertype "DataManager"
|
| 148 |
+
def axes(self) -> list[Index]: # type: ignore[override]
|
| 149 |
+
# mypy doesn't work to override attribute with property
|
| 150 |
+
# see https://github.com/python/mypy/issues/4125
|
| 151 |
+
"""Axes is BlockManager-compatible order (columns, rows)"""
|
| 152 |
+
return [self._axes[1], self._axes[0]]
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def shape_proper(self) -> tuple[int, ...]:
|
| 156 |
+
# this returns (n_rows, n_columns)
|
| 157 |
+
return tuple(len(ax) for ax in self._axes)
|
| 158 |
+
|
| 159 |
+
@staticmethod
|
| 160 |
+
def _normalize_axis(axis: AxisInt) -> int:
|
| 161 |
+
# switch axis
|
| 162 |
+
axis = 1 if axis == 0 else 0
|
| 163 |
+
return axis
|
| 164 |
+
|
| 165 |
+
def set_axis(self, axis: AxisInt, new_labels: Index) -> None:
|
| 166 |
+
# Caller is responsible for ensuring we have an Index object.
|
| 167 |
+
self._validate_set_axis(axis, new_labels)
|
| 168 |
+
axis = self._normalize_axis(axis)
|
| 169 |
+
self._axes[axis] = new_labels
|
| 170 |
+
|
| 171 |
+
def get_dtypes(self) -> npt.NDArray[np.object_]:
|
| 172 |
+
return np.array([arr.dtype for arr in self.arrays], dtype="object")
|
| 173 |
+
|
| 174 |
+
def add_references(self, mgr: BaseArrayManager) -> None:
|
| 175 |
+
"""
|
| 176 |
+
Only implemented on the BlockManager level
|
| 177 |
+
"""
|
| 178 |
+
return
|
| 179 |
+
|
| 180 |
+
def __getstate__(self):
|
| 181 |
+
return self.arrays, self._axes
|
| 182 |
+
|
| 183 |
+
def __setstate__(self, state) -> None:
|
| 184 |
+
self.arrays = state[0]
|
| 185 |
+
self._axes = state[1]
|
| 186 |
+
|
| 187 |
+
def __repr__(self) -> str:
|
| 188 |
+
output = type(self).__name__
|
| 189 |
+
output += f"\nIndex: {self._axes[0]}"
|
| 190 |
+
if self.ndim == 2:
|
| 191 |
+
output += f"\nColumns: {self._axes[1]}"
|
| 192 |
+
output += f"\n{len(self.arrays)} arrays:"
|
| 193 |
+
for arr in self.arrays:
|
| 194 |
+
output += f"\n{arr.dtype}"
|
| 195 |
+
return output
|
| 196 |
+
|
| 197 |
+
def apply(
|
| 198 |
+
self,
|
| 199 |
+
f,
|
| 200 |
+
align_keys: list[str] | None = None,
|
| 201 |
+
**kwargs,
|
| 202 |
+
) -> Self:
|
| 203 |
+
"""
|
| 204 |
+
Iterate over the arrays, collect and create a new ArrayManager.
|
| 205 |
+
|
| 206 |
+
Parameters
|
| 207 |
+
----------
|
| 208 |
+
f : str or callable
|
| 209 |
+
Name of the Array method to apply.
|
| 210 |
+
align_keys: List[str] or None, default None
|
| 211 |
+
**kwargs
|
| 212 |
+
Keywords to pass to `f`
|
| 213 |
+
|
| 214 |
+
Returns
|
| 215 |
+
-------
|
| 216 |
+
ArrayManager
|
| 217 |
+
"""
|
| 218 |
+
assert "filter" not in kwargs
|
| 219 |
+
|
| 220 |
+
align_keys = align_keys or []
|
| 221 |
+
result_arrays: list[ArrayLike] = []
|
| 222 |
+
# fillna: Series/DataFrame is responsible for making sure value is aligned
|
| 223 |
+
|
| 224 |
+
aligned_args = {k: kwargs[k] for k in align_keys}
|
| 225 |
+
|
| 226 |
+
if f == "apply":
|
| 227 |
+
f = kwargs.pop("func")
|
| 228 |
+
|
| 229 |
+
for i, arr in enumerate(self.arrays):
|
| 230 |
+
if aligned_args:
|
| 231 |
+
for k, obj in aligned_args.items():
|
| 232 |
+
if isinstance(obj, (ABCSeries, ABCDataFrame)):
|
| 233 |
+
# The caller is responsible for ensuring that
|
| 234 |
+
# obj.axes[-1].equals(self.items)
|
| 235 |
+
if obj.ndim == 1:
|
| 236 |
+
kwargs[k] = obj.iloc[i]
|
| 237 |
+
else:
|
| 238 |
+
kwargs[k] = obj.iloc[:, i]._values
|
| 239 |
+
else:
|
| 240 |
+
# otherwise we have an array-like
|
| 241 |
+
kwargs[k] = obj[i]
|
| 242 |
+
|
| 243 |
+
if callable(f):
|
| 244 |
+
applied = f(arr, **kwargs)
|
| 245 |
+
else:
|
| 246 |
+
applied = getattr(arr, f)(**kwargs)
|
| 247 |
+
|
| 248 |
+
result_arrays.append(applied)
|
| 249 |
+
|
| 250 |
+
new_axes = self._axes
|
| 251 |
+
return type(self)(result_arrays, new_axes)
|
| 252 |
+
|
| 253 |
+
def apply_with_block(self, f, align_keys=None, **kwargs) -> Self:
|
| 254 |
+
# switch axis to follow BlockManager logic
|
| 255 |
+
swap_axis = True
|
| 256 |
+
if f == "interpolate":
|
| 257 |
+
swap_axis = False
|
| 258 |
+
if swap_axis and "axis" in kwargs and self.ndim == 2:
|
| 259 |
+
kwargs["axis"] = 1 if kwargs["axis"] == 0 else 0
|
| 260 |
+
|
| 261 |
+
align_keys = align_keys or []
|
| 262 |
+
aligned_args = {k: kwargs[k] for k in align_keys}
|
| 263 |
+
|
| 264 |
+
result_arrays = []
|
| 265 |
+
|
| 266 |
+
for i, arr in enumerate(self.arrays):
|
| 267 |
+
if aligned_args:
|
| 268 |
+
for k, obj in aligned_args.items():
|
| 269 |
+
if isinstance(obj, (ABCSeries, ABCDataFrame)):
|
| 270 |
+
# The caller is responsible for ensuring that
|
| 271 |
+
# obj.axes[-1].equals(self.items)
|
| 272 |
+
if obj.ndim == 1:
|
| 273 |
+
if self.ndim == 2:
|
| 274 |
+
kwargs[k] = obj.iloc[slice(i, i + 1)]._values
|
| 275 |
+
else:
|
| 276 |
+
kwargs[k] = obj.iloc[:]._values
|
| 277 |
+
else:
|
| 278 |
+
kwargs[k] = obj.iloc[:, [i]]._values
|
| 279 |
+
else:
|
| 280 |
+
# otherwise we have an ndarray
|
| 281 |
+
if obj.ndim == 2:
|
| 282 |
+
kwargs[k] = obj[[i]]
|
| 283 |
+
|
| 284 |
+
if isinstance(arr.dtype, np.dtype) and not isinstance(arr, np.ndarray):
|
| 285 |
+
# i.e. TimedeltaArray, DatetimeArray with tz=None. Need to
|
| 286 |
+
# convert for the Block constructors.
|
| 287 |
+
arr = np.asarray(arr)
|
| 288 |
+
|
| 289 |
+
arr = maybe_coerce_values(arr)
|
| 290 |
+
if self.ndim == 2:
|
| 291 |
+
arr = ensure_block_shape(arr, 2)
|
| 292 |
+
bp = BlockPlacement(slice(0, 1, 1))
|
| 293 |
+
block = new_block(arr, placement=bp, ndim=2)
|
| 294 |
+
else:
|
| 295 |
+
bp = BlockPlacement(slice(0, len(self), 1))
|
| 296 |
+
block = new_block(arr, placement=bp, ndim=1)
|
| 297 |
+
|
| 298 |
+
applied = getattr(block, f)(**kwargs)
|
| 299 |
+
if isinstance(applied, list):
|
| 300 |
+
applied = applied[0]
|
| 301 |
+
arr = applied.values
|
| 302 |
+
if self.ndim == 2 and arr.ndim == 2:
|
| 303 |
+
# 2D for np.ndarray or DatetimeArray/TimedeltaArray
|
| 304 |
+
assert len(arr) == 1
|
| 305 |
+
# error: No overload variant of "__getitem__" of "ExtensionArray"
|
| 306 |
+
# matches argument type "Tuple[int, slice]"
|
| 307 |
+
arr = arr[0, :] # type: ignore[call-overload]
|
| 308 |
+
result_arrays.append(arr)
|
| 309 |
+
|
| 310 |
+
return type(self)(result_arrays, self._axes)
|
| 311 |
+
|
| 312 |
+
def setitem(self, indexer, value, warn: bool = True) -> Self:
|
| 313 |
+
return self.apply_with_block("setitem", indexer=indexer, value=value)
|
| 314 |
+
|
| 315 |
+
def diff(self, n: int) -> Self:
|
| 316 |
+
assert self.ndim == 2 # caller ensures
|
| 317 |
+
return self.apply(algos.diff, n=n)
|
| 318 |
+
|
| 319 |
+
def astype(self, dtype, copy: bool | None = False, errors: str = "raise") -> Self:
|
| 320 |
+
if copy is None:
|
| 321 |
+
copy = True
|
| 322 |
+
|
| 323 |
+
return self.apply(astype_array_safe, dtype=dtype, copy=copy, errors=errors)
|
| 324 |
+
|
| 325 |
+
def convert(self, copy: bool | None) -> Self:
|
| 326 |
+
if copy is None:
|
| 327 |
+
copy = True
|
| 328 |
+
|
| 329 |
+
def _convert(arr):
|
| 330 |
+
if is_object_dtype(arr.dtype):
|
| 331 |
+
# extract NumpyExtensionArray for tests that patch
|
| 332 |
+
# NumpyExtensionArray._typ
|
| 333 |
+
arr = np.asarray(arr)
|
| 334 |
+
result = lib.maybe_convert_objects(
|
| 335 |
+
arr,
|
| 336 |
+
convert_non_numeric=True,
|
| 337 |
+
)
|
| 338 |
+
if result is arr and copy:
|
| 339 |
+
return arr.copy()
|
| 340 |
+
return result
|
| 341 |
+
else:
|
| 342 |
+
return arr.copy() if copy else arr
|
| 343 |
+
|
| 344 |
+
return self.apply(_convert)
|
| 345 |
+
|
| 346 |
+
def get_values_for_csv(
|
| 347 |
+
self, *, float_format, date_format, decimal, na_rep: str = "nan", quoting=None
|
| 348 |
+
) -> Self:
|
| 349 |
+
return self.apply(
|
| 350 |
+
get_values_for_csv,
|
| 351 |
+
na_rep=na_rep,
|
| 352 |
+
quoting=quoting,
|
| 353 |
+
float_format=float_format,
|
| 354 |
+
date_format=date_format,
|
| 355 |
+
decimal=decimal,
|
| 356 |
+
)
|
| 357 |
+
|
| 358 |
+
@property
|
| 359 |
+
def any_extension_types(self) -> bool:
|
| 360 |
+
"""Whether any of the blocks in this manager are extension blocks"""
|
| 361 |
+
return False # any(block.is_extension for block in self.blocks)
|
| 362 |
+
|
| 363 |
+
@property
|
| 364 |
+
def is_view(self) -> bool:
|
| 365 |
+
"""return a boolean if we are a single block and are a view"""
|
| 366 |
+
# TODO what is this used for?
|
| 367 |
+
return False
|
| 368 |
+
|
| 369 |
+
@property
|
| 370 |
+
def is_single_block(self) -> bool:
|
| 371 |
+
return len(self.arrays) == 1
|
| 372 |
+
|
| 373 |
+
def _get_data_subset(self, predicate: Callable) -> Self:
|
| 374 |
+
indices = [i for i, arr in enumerate(self.arrays) if predicate(arr)]
|
| 375 |
+
arrays = [self.arrays[i] for i in indices]
|
| 376 |
+
# TODO copy?
|
| 377 |
+
# Note: using Index.take ensures we can retain e.g. DatetimeIndex.freq,
|
| 378 |
+
# see test_describe_datetime_columns
|
| 379 |
+
taker = np.array(indices, dtype="intp")
|
| 380 |
+
new_cols = self._axes[1].take(taker)
|
| 381 |
+
new_axes = [self._axes[0], new_cols]
|
| 382 |
+
return type(self)(arrays, new_axes, verify_integrity=False)
|
| 383 |
+
|
| 384 |
+
def get_bool_data(self, copy: bool = False) -> Self:
|
| 385 |
+
"""
|
| 386 |
+
Select columns that are bool-dtype and object-dtype columns that are all-bool.
|
| 387 |
+
|
| 388 |
+
Parameters
|
| 389 |
+
----------
|
| 390 |
+
copy : bool, default False
|
| 391 |
+
Whether to copy the blocks
|
| 392 |
+
"""
|
| 393 |
+
return self._get_data_subset(lambda x: x.dtype == np.dtype(bool))
|
| 394 |
+
|
| 395 |
+
def get_numeric_data(self, copy: bool = False) -> Self:
|
| 396 |
+
"""
|
| 397 |
+
Select columns that have a numeric dtype.
|
| 398 |
+
|
| 399 |
+
Parameters
|
| 400 |
+
----------
|
| 401 |
+
copy : bool, default False
|
| 402 |
+
Whether to copy the blocks
|
| 403 |
+
"""
|
| 404 |
+
return self._get_data_subset(
|
| 405 |
+
lambda arr: is_numeric_dtype(arr.dtype)
|
| 406 |
+
or getattr(arr.dtype, "_is_numeric", False)
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
def copy(self, deep: bool | Literal["all"] | None = True) -> Self:
|
| 410 |
+
"""
|
| 411 |
+
Make deep or shallow copy of ArrayManager
|
| 412 |
+
|
| 413 |
+
Parameters
|
| 414 |
+
----------
|
| 415 |
+
deep : bool or string, default True
|
| 416 |
+
If False, return shallow copy (do not copy data)
|
| 417 |
+
If 'all', copy data and a deep copy of the index
|
| 418 |
+
|
| 419 |
+
Returns
|
| 420 |
+
-------
|
| 421 |
+
BlockManager
|
| 422 |
+
"""
|
| 423 |
+
if deep is None:
|
| 424 |
+
# ArrayManager does not yet support CoW, so deep=None always means
|
| 425 |
+
# deep=True for now
|
| 426 |
+
deep = True
|
| 427 |
+
|
| 428 |
+
# this preserves the notion of view copying of axes
|
| 429 |
+
if deep:
|
| 430 |
+
# hit in e.g. tests.io.json.test_pandas
|
| 431 |
+
|
| 432 |
+
def copy_func(ax):
|
| 433 |
+
return ax.copy(deep=True) if deep == "all" else ax.view()
|
| 434 |
+
|
| 435 |
+
new_axes = [copy_func(ax) for ax in self._axes]
|
| 436 |
+
else:
|
| 437 |
+
new_axes = list(self._axes)
|
| 438 |
+
|
| 439 |
+
if deep:
|
| 440 |
+
new_arrays = [arr.copy() for arr in self.arrays]
|
| 441 |
+
else:
|
| 442 |
+
new_arrays = list(self.arrays)
|
| 443 |
+
return type(self)(new_arrays, new_axes, verify_integrity=False)
|
| 444 |
+
|
| 445 |
+
def reindex_indexer(
|
| 446 |
+
self,
|
| 447 |
+
new_axis,
|
| 448 |
+
indexer,
|
| 449 |
+
axis: AxisInt,
|
| 450 |
+
fill_value=None,
|
| 451 |
+
allow_dups: bool = False,
|
| 452 |
+
copy: bool | None = True,
|
| 453 |
+
# ignored keywords
|
| 454 |
+
only_slice: bool = False,
|
| 455 |
+
# ArrayManager specific keywords
|
| 456 |
+
use_na_proxy: bool = False,
|
| 457 |
+
) -> Self:
|
| 458 |
+
axis = self._normalize_axis(axis)
|
| 459 |
+
return self._reindex_indexer(
|
| 460 |
+
new_axis,
|
| 461 |
+
indexer,
|
| 462 |
+
axis,
|
| 463 |
+
fill_value,
|
| 464 |
+
allow_dups,
|
| 465 |
+
copy,
|
| 466 |
+
use_na_proxy,
|
| 467 |
+
)
|
| 468 |
+
|
| 469 |
+
def _reindex_indexer(
|
| 470 |
+
self,
|
| 471 |
+
new_axis,
|
| 472 |
+
indexer: npt.NDArray[np.intp] | None,
|
| 473 |
+
axis: AxisInt,
|
| 474 |
+
fill_value=None,
|
| 475 |
+
allow_dups: bool = False,
|
| 476 |
+
copy: bool | None = True,
|
| 477 |
+
use_na_proxy: bool = False,
|
| 478 |
+
) -> Self:
|
| 479 |
+
"""
|
| 480 |
+
Parameters
|
| 481 |
+
----------
|
| 482 |
+
new_axis : Index
|
| 483 |
+
indexer : ndarray[intp] or None
|
| 484 |
+
axis : int
|
| 485 |
+
fill_value : object, default None
|
| 486 |
+
allow_dups : bool, default False
|
| 487 |
+
copy : bool, default True
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
pandas-indexer with -1's only.
|
| 491 |
+
"""
|
| 492 |
+
if copy is None:
|
| 493 |
+
# ArrayManager does not yet support CoW, so deep=None always means
|
| 494 |
+
# deep=True for now
|
| 495 |
+
copy = True
|
| 496 |
+
|
| 497 |
+
if indexer is None:
|
| 498 |
+
if new_axis is self._axes[axis] and not copy:
|
| 499 |
+
return self
|
| 500 |
+
|
| 501 |
+
result = self.copy(deep=copy)
|
| 502 |
+
result._axes = list(self._axes)
|
| 503 |
+
result._axes[axis] = new_axis
|
| 504 |
+
return result
|
| 505 |
+
|
| 506 |
+
# some axes don't allow reindexing with dups
|
| 507 |
+
if not allow_dups:
|
| 508 |
+
self._axes[axis]._validate_can_reindex(indexer)
|
| 509 |
+
|
| 510 |
+
if axis >= self.ndim:
|
| 511 |
+
raise IndexError("Requested axis not found in manager")
|
| 512 |
+
|
| 513 |
+
if axis == 1:
|
| 514 |
+
new_arrays = []
|
| 515 |
+
for i in indexer:
|
| 516 |
+
if i == -1:
|
| 517 |
+
arr = self._make_na_array(
|
| 518 |
+
fill_value=fill_value, use_na_proxy=use_na_proxy
|
| 519 |
+
)
|
| 520 |
+
else:
|
| 521 |
+
arr = self.arrays[i]
|
| 522 |
+
if copy:
|
| 523 |
+
arr = arr.copy()
|
| 524 |
+
new_arrays.append(arr)
|
| 525 |
+
|
| 526 |
+
else:
|
| 527 |
+
validate_indices(indexer, len(self._axes[0]))
|
| 528 |
+
indexer = ensure_platform_int(indexer)
|
| 529 |
+
mask = indexer == -1
|
| 530 |
+
needs_masking = mask.any()
|
| 531 |
+
new_arrays = [
|
| 532 |
+
take_1d(
|
| 533 |
+
arr,
|
| 534 |
+
indexer,
|
| 535 |
+
allow_fill=needs_masking,
|
| 536 |
+
fill_value=fill_value,
|
| 537 |
+
mask=mask,
|
| 538 |
+
# if fill_value is not None else blk.fill_value
|
| 539 |
+
)
|
| 540 |
+
for arr in self.arrays
|
| 541 |
+
]
|
| 542 |
+
|
| 543 |
+
new_axes = list(self._axes)
|
| 544 |
+
new_axes[axis] = new_axis
|
| 545 |
+
|
| 546 |
+
return type(self)(new_arrays, new_axes, verify_integrity=False)
|
| 547 |
+
|
| 548 |
+
def take(
|
| 549 |
+
self,
|
| 550 |
+
indexer: npt.NDArray[np.intp],
|
| 551 |
+
axis: AxisInt = 1,
|
| 552 |
+
verify: bool = True,
|
| 553 |
+
) -> Self:
|
| 554 |
+
"""
|
| 555 |
+
Take items along any axis.
|
| 556 |
+
"""
|
| 557 |
+
assert isinstance(indexer, np.ndarray), type(indexer)
|
| 558 |
+
assert indexer.dtype == np.intp, indexer.dtype
|
| 559 |
+
|
| 560 |
+
axis = self._normalize_axis(axis)
|
| 561 |
+
|
| 562 |
+
if not indexer.ndim == 1:
|
| 563 |
+
raise ValueError("indexer should be 1-dimensional")
|
| 564 |
+
|
| 565 |
+
n = self.shape_proper[axis]
|
| 566 |
+
indexer = maybe_convert_indices(indexer, n, verify=verify)
|
| 567 |
+
|
| 568 |
+
new_labels = self._axes[axis].take(indexer)
|
| 569 |
+
return self._reindex_indexer(
|
| 570 |
+
new_axis=new_labels, indexer=indexer, axis=axis, allow_dups=True
|
| 571 |
+
)
|
| 572 |
+
|
| 573 |
+
def _make_na_array(self, fill_value=None, use_na_proxy: bool = False):
|
| 574 |
+
if use_na_proxy:
|
| 575 |
+
assert fill_value is None
|
| 576 |
+
return NullArrayProxy(self.shape_proper[0])
|
| 577 |
+
|
| 578 |
+
if fill_value is None:
|
| 579 |
+
fill_value = np.nan
|
| 580 |
+
|
| 581 |
+
dtype, fill_value = infer_dtype_from_scalar(fill_value)
|
| 582 |
+
array_values = make_na_array(dtype, self.shape_proper[:1], fill_value)
|
| 583 |
+
return array_values
|
| 584 |
+
|
| 585 |
+
def _equal_values(self, other) -> bool:
|
| 586 |
+
"""
|
| 587 |
+
Used in .equals defined in base class. Only check the column values
|
| 588 |
+
assuming shape and indexes have already been checked.
|
| 589 |
+
"""
|
| 590 |
+
for left, right in zip(self.arrays, other.arrays):
|
| 591 |
+
if not array_equals(left, right):
|
| 592 |
+
return False
|
| 593 |
+
return True
|
| 594 |
+
|
| 595 |
+
# TODO
|
| 596 |
+
# to_dict
|
| 597 |
+
|
| 598 |
+
|
| 599 |
+
class ArrayManager(BaseArrayManager):
|
| 600 |
+
@property
|
| 601 |
+
def ndim(self) -> Literal[2]:
|
| 602 |
+
return 2
|
| 603 |
+
|
| 604 |
+
def __init__(
|
| 605 |
+
self,
|
| 606 |
+
arrays: list[np.ndarray | ExtensionArray],
|
| 607 |
+
axes: list[Index],
|
| 608 |
+
verify_integrity: bool = True,
|
| 609 |
+
) -> None:
|
| 610 |
+
# Note: we are storing the axes in "_axes" in the (row, columns) order
|
| 611 |
+
# which contrasts the order how it is stored in BlockManager
|
| 612 |
+
self._axes = axes
|
| 613 |
+
self.arrays = arrays
|
| 614 |
+
|
| 615 |
+
if verify_integrity:
|
| 616 |
+
self._axes = [ensure_index(ax) for ax in axes]
|
| 617 |
+
arrays = [extract_pandas_array(x, None, 1)[0] for x in arrays]
|
| 618 |
+
self.arrays = [maybe_coerce_values(arr) for arr in arrays]
|
| 619 |
+
self._verify_integrity()
|
| 620 |
+
|
| 621 |
+
def _verify_integrity(self) -> None:
|
| 622 |
+
n_rows, n_columns = self.shape_proper
|
| 623 |
+
if not len(self.arrays) == n_columns:
|
| 624 |
+
raise ValueError(
|
| 625 |
+
"Number of passed arrays must equal the size of the column Index: "
|
| 626 |
+
f"{len(self.arrays)} arrays vs {n_columns} columns."
|
| 627 |
+
)
|
| 628 |
+
for arr in self.arrays:
|
| 629 |
+
if not len(arr) == n_rows:
|
| 630 |
+
raise ValueError(
|
| 631 |
+
"Passed arrays should have the same length as the rows Index: "
|
| 632 |
+
f"{len(arr)} vs {n_rows} rows"
|
| 633 |
+
)
|
| 634 |
+
if not isinstance(arr, (np.ndarray, ExtensionArray)):
|
| 635 |
+
raise ValueError(
|
| 636 |
+
"Passed arrays should be np.ndarray or ExtensionArray instances, "
|
| 637 |
+
f"got {type(arr)} instead"
|
| 638 |
+
)
|
| 639 |
+
if not arr.ndim == 1:
|
| 640 |
+
raise ValueError(
|
| 641 |
+
"Passed arrays should be 1-dimensional, got array with "
|
| 642 |
+
f"{arr.ndim} dimensions instead."
|
| 643 |
+
)
|
| 644 |
+
|
| 645 |
+
# --------------------------------------------------------------------
|
| 646 |
+
# Indexing
|
| 647 |
+
|
| 648 |
+
def fast_xs(self, loc: int) -> SingleArrayManager:
|
| 649 |
+
"""
|
| 650 |
+
Return the array corresponding to `frame.iloc[loc]`.
|
| 651 |
+
|
| 652 |
+
Parameters
|
| 653 |
+
----------
|
| 654 |
+
loc : int
|
| 655 |
+
|
| 656 |
+
Returns
|
| 657 |
+
-------
|
| 658 |
+
np.ndarray or ExtensionArray
|
| 659 |
+
"""
|
| 660 |
+
dtype = interleaved_dtype([arr.dtype for arr in self.arrays])
|
| 661 |
+
|
| 662 |
+
values = [arr[loc] for arr in self.arrays]
|
| 663 |
+
if isinstance(dtype, ExtensionDtype):
|
| 664 |
+
result = dtype.construct_array_type()._from_sequence(values, dtype=dtype)
|
| 665 |
+
# for datetime64/timedelta64, the np.ndarray constructor cannot handle pd.NaT
|
| 666 |
+
elif is_datetime64_ns_dtype(dtype):
|
| 667 |
+
result = DatetimeArray._from_sequence(values, dtype=dtype)._ndarray
|
| 668 |
+
elif is_timedelta64_ns_dtype(dtype):
|
| 669 |
+
result = TimedeltaArray._from_sequence(values, dtype=dtype)._ndarray
|
| 670 |
+
else:
|
| 671 |
+
result = np.array(values, dtype=dtype)
|
| 672 |
+
return SingleArrayManager([result], [self._axes[1]])
|
| 673 |
+
|
| 674 |
+
def get_slice(self, slobj: slice, axis: AxisInt = 0) -> ArrayManager:
|
| 675 |
+
axis = self._normalize_axis(axis)
|
| 676 |
+
|
| 677 |
+
if axis == 0:
|
| 678 |
+
arrays = [arr[slobj] for arr in self.arrays]
|
| 679 |
+
elif axis == 1:
|
| 680 |
+
arrays = self.arrays[slobj]
|
| 681 |
+
|
| 682 |
+
new_axes = list(self._axes)
|
| 683 |
+
new_axes[axis] = new_axes[axis]._getitem_slice(slobj)
|
| 684 |
+
|
| 685 |
+
return type(self)(arrays, new_axes, verify_integrity=False)
|
| 686 |
+
|
| 687 |
+
def iget(self, i: int) -> SingleArrayManager:
|
| 688 |
+
"""
|
| 689 |
+
Return the data as a SingleArrayManager.
|
| 690 |
+
"""
|
| 691 |
+
values = self.arrays[i]
|
| 692 |
+
return SingleArrayManager([values], [self._axes[0]])
|
| 693 |
+
|
| 694 |
+
def iget_values(self, i: int) -> ArrayLike:
|
| 695 |
+
"""
|
| 696 |
+
Return the data for column i as the values (ndarray or ExtensionArray).
|
| 697 |
+
"""
|
| 698 |
+
return self.arrays[i]
|
| 699 |
+
|
| 700 |
+
@property
|
| 701 |
+
def column_arrays(self) -> list[ArrayLike]:
|
| 702 |
+
"""
|
| 703 |
+
Used in the JSON C code to access column arrays.
|
| 704 |
+
"""
|
| 705 |
+
|
| 706 |
+
return [np.asarray(arr) for arr in self.arrays]
|
| 707 |
+
|
| 708 |
+
def iset(
|
| 709 |
+
self,
|
| 710 |
+
loc: int | slice | np.ndarray,
|
| 711 |
+
value: ArrayLike,
|
| 712 |
+
inplace: bool = False,
|
| 713 |
+
refs=None,
|
| 714 |
+
) -> None:
|
| 715 |
+
"""
|
| 716 |
+
Set new column(s).
|
| 717 |
+
|
| 718 |
+
This changes the ArrayManager in-place, but replaces (an) existing
|
| 719 |
+
column(s), not changing column values in-place).
|
| 720 |
+
|
| 721 |
+
Parameters
|
| 722 |
+
----------
|
| 723 |
+
loc : integer, slice or boolean mask
|
| 724 |
+
Positional location (already bounds checked)
|
| 725 |
+
value : np.ndarray or ExtensionArray
|
| 726 |
+
inplace : bool, default False
|
| 727 |
+
Whether overwrite existing array as opposed to replacing it.
|
| 728 |
+
"""
|
| 729 |
+
# single column -> single integer index
|
| 730 |
+
if lib.is_integer(loc):
|
| 731 |
+
# TODO can we avoid needing to unpack this here? That means converting
|
| 732 |
+
# DataFrame into 1D array when loc is an integer
|
| 733 |
+
if isinstance(value, np.ndarray) and value.ndim == 2:
|
| 734 |
+
assert value.shape[1] == 1
|
| 735 |
+
value = value[:, 0]
|
| 736 |
+
|
| 737 |
+
# TODO we receive a datetime/timedelta64 ndarray from DataFrame._iset_item
|
| 738 |
+
# but we should avoid that and pass directly the proper array
|
| 739 |
+
value = maybe_coerce_values(value)
|
| 740 |
+
|
| 741 |
+
assert isinstance(value, (np.ndarray, ExtensionArray))
|
| 742 |
+
assert value.ndim == 1
|
| 743 |
+
assert len(value) == len(self._axes[0])
|
| 744 |
+
self.arrays[loc] = value
|
| 745 |
+
return
|
| 746 |
+
|
| 747 |
+
# multiple columns -> convert slice or array to integer indices
|
| 748 |
+
elif isinstance(loc, slice):
|
| 749 |
+
indices: range | np.ndarray = range(
|
| 750 |
+
loc.start if loc.start is not None else 0,
|
| 751 |
+
loc.stop if loc.stop is not None else self.shape_proper[1],
|
| 752 |
+
loc.step if loc.step is not None else 1,
|
| 753 |
+
)
|
| 754 |
+
else:
|
| 755 |
+
assert isinstance(loc, np.ndarray)
|
| 756 |
+
assert loc.dtype == "bool"
|
| 757 |
+
indices = np.nonzero(loc)[0]
|
| 758 |
+
|
| 759 |
+
assert value.ndim == 2
|
| 760 |
+
assert value.shape[0] == len(self._axes[0])
|
| 761 |
+
|
| 762 |
+
for value_idx, mgr_idx in enumerate(indices):
|
| 763 |
+
# error: No overload variant of "__getitem__" of "ExtensionArray" matches
|
| 764 |
+
# argument type "Tuple[slice, int]"
|
| 765 |
+
value_arr = value[:, value_idx] # type: ignore[call-overload]
|
| 766 |
+
self.arrays[mgr_idx] = value_arr
|
| 767 |
+
return
|
| 768 |
+
|
| 769 |
+
def column_setitem(
|
| 770 |
+
self, loc: int, idx: int | slice | np.ndarray, value, inplace_only: bool = False
|
| 771 |
+
) -> None:
|
| 772 |
+
"""
|
| 773 |
+
Set values ("setitem") into a single column (not setting the full column).
|
| 774 |
+
|
| 775 |
+
This is a method on the ArrayManager level, to avoid creating an
|
| 776 |
+
intermediate Series at the DataFrame level (`s = df[loc]; s[idx] = value`)
|
| 777 |
+
"""
|
| 778 |
+
if not is_integer(loc):
|
| 779 |
+
raise TypeError("The column index should be an integer")
|
| 780 |
+
arr = self.arrays[loc]
|
| 781 |
+
mgr = SingleArrayManager([arr], [self._axes[0]])
|
| 782 |
+
if inplace_only:
|
| 783 |
+
mgr.setitem_inplace(idx, value)
|
| 784 |
+
else:
|
| 785 |
+
new_mgr = mgr.setitem((idx,), value)
|
| 786 |
+
# update existing ArrayManager in-place
|
| 787 |
+
self.arrays[loc] = new_mgr.arrays[0]
|
| 788 |
+
|
| 789 |
+
def insert(self, loc: int, item: Hashable, value: ArrayLike, refs=None) -> None:
|
| 790 |
+
"""
|
| 791 |
+
Insert item at selected position.
|
| 792 |
+
|
| 793 |
+
Parameters
|
| 794 |
+
----------
|
| 795 |
+
loc : int
|
| 796 |
+
item : hashable
|
| 797 |
+
value : np.ndarray or ExtensionArray
|
| 798 |
+
"""
|
| 799 |
+
# insert to the axis; this could possibly raise a TypeError
|
| 800 |
+
new_axis = self.items.insert(loc, item)
|
| 801 |
+
|
| 802 |
+
value = extract_array(value, extract_numpy=True)
|
| 803 |
+
if value.ndim == 2:
|
| 804 |
+
if value.shape[0] == 1:
|
| 805 |
+
# error: No overload variant of "__getitem__" of "ExtensionArray"
|
| 806 |
+
# matches argument type "Tuple[int, slice]"
|
| 807 |
+
value = value[0, :] # type: ignore[call-overload]
|
| 808 |
+
else:
|
| 809 |
+
raise ValueError(
|
| 810 |
+
f"Expected a 1D array, got an array with shape {value.shape}"
|
| 811 |
+
)
|
| 812 |
+
value = maybe_coerce_values(value)
|
| 813 |
+
|
| 814 |
+
# TODO self.arrays can be empty
|
| 815 |
+
# assert len(value) == len(self.arrays[0])
|
| 816 |
+
|
| 817 |
+
# TODO is this copy needed?
|
| 818 |
+
arrays = self.arrays.copy()
|
| 819 |
+
arrays.insert(loc, value)
|
| 820 |
+
|
| 821 |
+
self.arrays = arrays
|
| 822 |
+
self._axes[1] = new_axis
|
| 823 |
+
|
| 824 |
+
def idelete(self, indexer) -> ArrayManager:
|
| 825 |
+
"""
|
| 826 |
+
Delete selected locations in-place (new block and array, same BlockManager)
|
| 827 |
+
"""
|
| 828 |
+
to_keep = np.ones(self.shape[0], dtype=np.bool_)
|
| 829 |
+
to_keep[indexer] = False
|
| 830 |
+
|
| 831 |
+
self.arrays = [self.arrays[i] for i in np.nonzero(to_keep)[0]]
|
| 832 |
+
self._axes = [self._axes[0], self._axes[1][to_keep]]
|
| 833 |
+
return self
|
| 834 |
+
|
| 835 |
+
# --------------------------------------------------------------------
|
| 836 |
+
# Array-wise Operation
|
| 837 |
+
|
| 838 |
+
def grouped_reduce(self, func: Callable) -> Self:
|
| 839 |
+
"""
|
| 840 |
+
Apply grouped reduction function columnwise, returning a new ArrayManager.
|
| 841 |
+
|
| 842 |
+
Parameters
|
| 843 |
+
----------
|
| 844 |
+
func : grouped reduction function
|
| 845 |
+
|
| 846 |
+
Returns
|
| 847 |
+
-------
|
| 848 |
+
ArrayManager
|
| 849 |
+
"""
|
| 850 |
+
result_arrays: list[np.ndarray] = []
|
| 851 |
+
result_indices: list[int] = []
|
| 852 |
+
|
| 853 |
+
for i, arr in enumerate(self.arrays):
|
| 854 |
+
# grouped_reduce functions all expect 2D arrays
|
| 855 |
+
arr = ensure_block_shape(arr, ndim=2)
|
| 856 |
+
res = func(arr)
|
| 857 |
+
if res.ndim == 2:
|
| 858 |
+
# reverse of ensure_block_shape
|
| 859 |
+
assert res.shape[0] == 1
|
| 860 |
+
res = res[0]
|
| 861 |
+
|
| 862 |
+
result_arrays.append(res)
|
| 863 |
+
result_indices.append(i)
|
| 864 |
+
|
| 865 |
+
if len(result_arrays) == 0:
|
| 866 |
+
nrows = 0
|
| 867 |
+
else:
|
| 868 |
+
nrows = result_arrays[0].shape[0]
|
| 869 |
+
index = Index(range(nrows))
|
| 870 |
+
|
| 871 |
+
columns = self.items
|
| 872 |
+
|
| 873 |
+
# error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]";
|
| 874 |
+
# expected "List[Union[ndarray, ExtensionArray]]"
|
| 875 |
+
return type(self)(result_arrays, [index, columns]) # type: ignore[arg-type]
|
| 876 |
+
|
| 877 |
+
def reduce(self, func: Callable) -> Self:
|
| 878 |
+
"""
|
| 879 |
+
Apply reduction function column-wise, returning a single-row ArrayManager.
|
| 880 |
+
|
| 881 |
+
Parameters
|
| 882 |
+
----------
|
| 883 |
+
func : reduction function
|
| 884 |
+
|
| 885 |
+
Returns
|
| 886 |
+
-------
|
| 887 |
+
ArrayManager
|
| 888 |
+
"""
|
| 889 |
+
result_arrays: list[np.ndarray] = []
|
| 890 |
+
for i, arr in enumerate(self.arrays):
|
| 891 |
+
res = func(arr, axis=0)
|
| 892 |
+
|
| 893 |
+
# TODO NaT doesn't preserve dtype, so we need to ensure to create
|
| 894 |
+
# a timedelta result array if original was timedelta
|
| 895 |
+
# what if datetime results in timedelta? (eg std)
|
| 896 |
+
dtype = arr.dtype if res is NaT else None
|
| 897 |
+
result_arrays.append(
|
| 898 |
+
sanitize_array([res], None, dtype=dtype) # type: ignore[arg-type]
|
| 899 |
+
)
|
| 900 |
+
|
| 901 |
+
index = Index._simple_new(np.array([None], dtype=object)) # placeholder
|
| 902 |
+
columns = self.items
|
| 903 |
+
|
| 904 |
+
# error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]";
|
| 905 |
+
# expected "List[Union[ndarray, ExtensionArray]]"
|
| 906 |
+
new_mgr = type(self)(result_arrays, [index, columns]) # type: ignore[arg-type]
|
| 907 |
+
return new_mgr
|
| 908 |
+
|
| 909 |
+
def operate_blockwise(self, other: ArrayManager, array_op) -> ArrayManager:
|
| 910 |
+
"""
|
| 911 |
+
Apply array_op blockwise with another (aligned) BlockManager.
|
| 912 |
+
"""
|
| 913 |
+
# TODO what if `other` is BlockManager ?
|
| 914 |
+
left_arrays = self.arrays
|
| 915 |
+
right_arrays = other.arrays
|
| 916 |
+
result_arrays = [
|
| 917 |
+
array_op(left, right) for left, right in zip(left_arrays, right_arrays)
|
| 918 |
+
]
|
| 919 |
+
return type(self)(result_arrays, self._axes)
|
| 920 |
+
|
| 921 |
+
def quantile(
|
| 922 |
+
self,
|
| 923 |
+
*,
|
| 924 |
+
qs: Index, # with dtype float64
|
| 925 |
+
transposed: bool = False,
|
| 926 |
+
interpolation: QuantileInterpolation = "linear",
|
| 927 |
+
) -> ArrayManager:
|
| 928 |
+
arrs = [ensure_block_shape(x, 2) for x in self.arrays]
|
| 929 |
+
new_arrs = [
|
| 930 |
+
quantile_compat(x, np.asarray(qs._values), interpolation) for x in arrs
|
| 931 |
+
]
|
| 932 |
+
for i, arr in enumerate(new_arrs):
|
| 933 |
+
if arr.ndim == 2:
|
| 934 |
+
assert arr.shape[0] == 1, arr.shape
|
| 935 |
+
new_arrs[i] = arr[0]
|
| 936 |
+
|
| 937 |
+
axes = [qs, self._axes[1]]
|
| 938 |
+
return type(self)(new_arrs, axes)
|
| 939 |
+
|
| 940 |
+
# ----------------------------------------------------------------
|
| 941 |
+
|
| 942 |
+
def unstack(self, unstacker, fill_value) -> ArrayManager:
|
| 943 |
+
"""
|
| 944 |
+
Return a BlockManager with all blocks unstacked.
|
| 945 |
+
|
| 946 |
+
Parameters
|
| 947 |
+
----------
|
| 948 |
+
unstacker : reshape._Unstacker
|
| 949 |
+
fill_value : Any
|
| 950 |
+
fill_value for newly introduced missing values.
|
| 951 |
+
|
| 952 |
+
Returns
|
| 953 |
+
-------
|
| 954 |
+
unstacked : BlockManager
|
| 955 |
+
"""
|
| 956 |
+
indexer, _ = unstacker._indexer_and_to_sort
|
| 957 |
+
if unstacker.mask.all():
|
| 958 |
+
new_indexer = indexer
|
| 959 |
+
allow_fill = False
|
| 960 |
+
new_mask2D = None
|
| 961 |
+
needs_masking = None
|
| 962 |
+
else:
|
| 963 |
+
new_indexer = np.full(unstacker.mask.shape, -1)
|
| 964 |
+
new_indexer[unstacker.mask] = indexer
|
| 965 |
+
allow_fill = True
|
| 966 |
+
# calculating the full mask once and passing it to take_1d is faster
|
| 967 |
+
# than letting take_1d calculate it in each repeated call
|
| 968 |
+
new_mask2D = (~unstacker.mask).reshape(*unstacker.full_shape)
|
| 969 |
+
needs_masking = new_mask2D.any(axis=0)
|
| 970 |
+
new_indexer2D = new_indexer.reshape(*unstacker.full_shape)
|
| 971 |
+
new_indexer2D = ensure_platform_int(new_indexer2D)
|
| 972 |
+
|
| 973 |
+
new_arrays = []
|
| 974 |
+
for arr in self.arrays:
|
| 975 |
+
for i in range(unstacker.full_shape[1]):
|
| 976 |
+
if allow_fill:
|
| 977 |
+
# error: Value of type "Optional[Any]" is not indexable [index]
|
| 978 |
+
new_arr = take_1d(
|
| 979 |
+
arr,
|
| 980 |
+
new_indexer2D[:, i],
|
| 981 |
+
allow_fill=needs_masking[i], # type: ignore[index]
|
| 982 |
+
fill_value=fill_value,
|
| 983 |
+
mask=new_mask2D[:, i], # type: ignore[index]
|
| 984 |
+
)
|
| 985 |
+
else:
|
| 986 |
+
new_arr = take_1d(arr, new_indexer2D[:, i], allow_fill=False)
|
| 987 |
+
new_arrays.append(new_arr)
|
| 988 |
+
|
| 989 |
+
new_index = unstacker.new_index
|
| 990 |
+
new_columns = unstacker.get_new_columns(self._axes[1])
|
| 991 |
+
new_axes = [new_index, new_columns]
|
| 992 |
+
|
| 993 |
+
return type(self)(new_arrays, new_axes, verify_integrity=False)
|
| 994 |
+
|
| 995 |
+
def as_array(
|
| 996 |
+
self,
|
| 997 |
+
dtype=None,
|
| 998 |
+
copy: bool = False,
|
| 999 |
+
na_value: object = lib.no_default,
|
| 1000 |
+
) -> np.ndarray:
|
| 1001 |
+
"""
|
| 1002 |
+
Convert the blockmanager data into an numpy array.
|
| 1003 |
+
|
| 1004 |
+
Parameters
|
| 1005 |
+
----------
|
| 1006 |
+
dtype : object, default None
|
| 1007 |
+
Data type of the return array.
|
| 1008 |
+
copy : bool, default False
|
| 1009 |
+
If True then guarantee that a copy is returned. A value of
|
| 1010 |
+
False does not guarantee that the underlying data is not
|
| 1011 |
+
copied.
|
| 1012 |
+
na_value : object, default lib.no_default
|
| 1013 |
+
Value to be used as the missing value sentinel.
|
| 1014 |
+
|
| 1015 |
+
Returns
|
| 1016 |
+
-------
|
| 1017 |
+
arr : ndarray
|
| 1018 |
+
"""
|
| 1019 |
+
if len(self.arrays) == 0:
|
| 1020 |
+
empty_arr = np.empty(self.shape, dtype=float)
|
| 1021 |
+
return empty_arr.transpose()
|
| 1022 |
+
|
| 1023 |
+
# We want to copy when na_value is provided to avoid
|
| 1024 |
+
# mutating the original object
|
| 1025 |
+
copy = copy or na_value is not lib.no_default
|
| 1026 |
+
|
| 1027 |
+
if not dtype:
|
| 1028 |
+
dtype = interleaved_dtype([arr.dtype for arr in self.arrays])
|
| 1029 |
+
|
| 1030 |
+
dtype = ensure_np_dtype(dtype)
|
| 1031 |
+
|
| 1032 |
+
result = np.empty(self.shape_proper, dtype=dtype)
|
| 1033 |
+
|
| 1034 |
+
for i, arr in enumerate(self.arrays):
|
| 1035 |
+
arr = arr.astype(dtype, copy=copy)
|
| 1036 |
+
result[:, i] = arr
|
| 1037 |
+
|
| 1038 |
+
if na_value is not lib.no_default:
|
| 1039 |
+
result[isna(result)] = na_value
|
| 1040 |
+
|
| 1041 |
+
return result
|
| 1042 |
+
|
| 1043 |
+
@classmethod
|
| 1044 |
+
def concat_horizontal(cls, mgrs: list[Self], axes: list[Index]) -> Self:
|
| 1045 |
+
"""
|
| 1046 |
+
Concatenate uniformly-indexed ArrayManagers horizontally.
|
| 1047 |
+
"""
|
| 1048 |
+
# concatting along the columns -> combine reindexed arrays in a single manager
|
| 1049 |
+
arrays = list(itertools.chain.from_iterable([mgr.arrays for mgr in mgrs]))
|
| 1050 |
+
new_mgr = cls(arrays, [axes[1], axes[0]], verify_integrity=False)
|
| 1051 |
+
return new_mgr
|
| 1052 |
+
|
| 1053 |
+
@classmethod
|
| 1054 |
+
def concat_vertical(cls, mgrs: list[Self], axes: list[Index]) -> Self:
|
| 1055 |
+
"""
|
| 1056 |
+
Concatenate uniformly-indexed ArrayManagers vertically.
|
| 1057 |
+
"""
|
| 1058 |
+
# concatting along the rows -> concat the reindexed arrays
|
| 1059 |
+
# TODO(ArrayManager) doesn't yet preserve the correct dtype
|
| 1060 |
+
arrays = [
|
| 1061 |
+
concat_arrays([mgrs[i].arrays[j] for i in range(len(mgrs))])
|
| 1062 |
+
for j in range(len(mgrs[0].arrays))
|
| 1063 |
+
]
|
| 1064 |
+
new_mgr = cls(arrays, [axes[1], axes[0]], verify_integrity=False)
|
| 1065 |
+
return new_mgr
|
| 1066 |
+
|
| 1067 |
+
|
| 1068 |
+
class SingleArrayManager(BaseArrayManager, SingleDataManager):
|
| 1069 |
+
__slots__ = [
|
| 1070 |
+
"_axes", # private attribute, because 'axes' has different order, see below
|
| 1071 |
+
"arrays",
|
| 1072 |
+
]
|
| 1073 |
+
|
| 1074 |
+
arrays: list[np.ndarray | ExtensionArray]
|
| 1075 |
+
_axes: list[Index]
|
| 1076 |
+
|
| 1077 |
+
@property
|
| 1078 |
+
def ndim(self) -> Literal[1]:
|
| 1079 |
+
return 1
|
| 1080 |
+
|
| 1081 |
+
def __init__(
|
| 1082 |
+
self,
|
| 1083 |
+
arrays: list[np.ndarray | ExtensionArray],
|
| 1084 |
+
axes: list[Index],
|
| 1085 |
+
verify_integrity: bool = True,
|
| 1086 |
+
) -> None:
|
| 1087 |
+
self._axes = axes
|
| 1088 |
+
self.arrays = arrays
|
| 1089 |
+
|
| 1090 |
+
if verify_integrity:
|
| 1091 |
+
assert len(axes) == 1
|
| 1092 |
+
assert len(arrays) == 1
|
| 1093 |
+
self._axes = [ensure_index(ax) for ax in self._axes]
|
| 1094 |
+
arr = arrays[0]
|
| 1095 |
+
arr = maybe_coerce_values(arr)
|
| 1096 |
+
arr = extract_pandas_array(arr, None, 1)[0]
|
| 1097 |
+
self.arrays = [arr]
|
| 1098 |
+
self._verify_integrity()
|
| 1099 |
+
|
| 1100 |
+
def _verify_integrity(self) -> None:
|
| 1101 |
+
(n_rows,) = self.shape
|
| 1102 |
+
assert len(self.arrays) == 1
|
| 1103 |
+
arr = self.arrays[0]
|
| 1104 |
+
assert len(arr) == n_rows
|
| 1105 |
+
if not arr.ndim == 1:
|
| 1106 |
+
raise ValueError(
|
| 1107 |
+
"Passed array should be 1-dimensional, got array with "
|
| 1108 |
+
f"{arr.ndim} dimensions instead."
|
| 1109 |
+
)
|
| 1110 |
+
|
| 1111 |
+
@staticmethod
|
| 1112 |
+
def _normalize_axis(axis):
|
| 1113 |
+
return axis
|
| 1114 |
+
|
| 1115 |
+
def make_empty(self, axes=None) -> Self:
|
| 1116 |
+
"""Return an empty ArrayManager with index/array of length 0"""
|
| 1117 |
+
if axes is None:
|
| 1118 |
+
axes = [Index([], dtype=object)]
|
| 1119 |
+
array: np.ndarray = np.array([], dtype=self.dtype)
|
| 1120 |
+
return type(self)([array], axes)
|
| 1121 |
+
|
| 1122 |
+
@classmethod
|
| 1123 |
+
def from_array(cls, array, index) -> SingleArrayManager:
|
| 1124 |
+
return cls([array], [index])
|
| 1125 |
+
|
| 1126 |
+
# error: Cannot override writeable attribute with read-only property
|
| 1127 |
+
@property
|
| 1128 |
+
def axes(self) -> list[Index]: # type: ignore[override]
|
| 1129 |
+
return self._axes
|
| 1130 |
+
|
| 1131 |
+
@property
|
| 1132 |
+
def index(self) -> Index:
|
| 1133 |
+
return self._axes[0]
|
| 1134 |
+
|
| 1135 |
+
@property
|
| 1136 |
+
def dtype(self):
|
| 1137 |
+
return self.array.dtype
|
| 1138 |
+
|
| 1139 |
+
def external_values(self):
|
| 1140 |
+
"""The array that Series.values returns"""
|
| 1141 |
+
return external_values(self.array)
|
| 1142 |
+
|
| 1143 |
+
def internal_values(self):
|
| 1144 |
+
"""The array that Series._values returns"""
|
| 1145 |
+
return self.array
|
| 1146 |
+
|
| 1147 |
+
def array_values(self):
|
| 1148 |
+
"""The array that Series.array returns"""
|
| 1149 |
+
arr = self.array
|
| 1150 |
+
if isinstance(arr, np.ndarray):
|
| 1151 |
+
arr = NumpyExtensionArray(arr)
|
| 1152 |
+
return arr
|
| 1153 |
+
|
| 1154 |
+
@property
|
| 1155 |
+
def _can_hold_na(self) -> bool:
|
| 1156 |
+
if isinstance(self.array, np.ndarray):
|
| 1157 |
+
return self.array.dtype.kind not in "iub"
|
| 1158 |
+
else:
|
| 1159 |
+
# ExtensionArray
|
| 1160 |
+
return self.array._can_hold_na
|
| 1161 |
+
|
| 1162 |
+
@property
|
| 1163 |
+
def is_single_block(self) -> bool:
|
| 1164 |
+
return True
|
| 1165 |
+
|
| 1166 |
+
def fast_xs(self, loc: int) -> SingleArrayManager:
|
| 1167 |
+
raise NotImplementedError("Use series._values[loc] instead")
|
| 1168 |
+
|
| 1169 |
+
def get_slice(self, slobj: slice, axis: AxisInt = 0) -> SingleArrayManager:
|
| 1170 |
+
if axis >= self.ndim:
|
| 1171 |
+
raise IndexError("Requested axis not found in manager")
|
| 1172 |
+
|
| 1173 |
+
new_array = self.array[slobj]
|
| 1174 |
+
new_index = self.index._getitem_slice(slobj)
|
| 1175 |
+
return type(self)([new_array], [new_index], verify_integrity=False)
|
| 1176 |
+
|
| 1177 |
+
def get_rows_with_mask(self, indexer: npt.NDArray[np.bool_]) -> SingleArrayManager:
|
| 1178 |
+
new_array = self.array[indexer]
|
| 1179 |
+
new_index = self.index[indexer]
|
| 1180 |
+
return type(self)([new_array], [new_index])
|
| 1181 |
+
|
| 1182 |
+
# error: Signature of "apply" incompatible with supertype "BaseArrayManager"
|
| 1183 |
+
def apply(self, func, **kwargs) -> Self: # type: ignore[override]
|
| 1184 |
+
if callable(func):
|
| 1185 |
+
new_array = func(self.array, **kwargs)
|
| 1186 |
+
else:
|
| 1187 |
+
new_array = getattr(self.array, func)(**kwargs)
|
| 1188 |
+
return type(self)([new_array], self._axes)
|
| 1189 |
+
|
| 1190 |
+
def setitem(self, indexer, value, warn: bool = True) -> SingleArrayManager:
|
| 1191 |
+
"""
|
| 1192 |
+
Set values with indexer.
|
| 1193 |
+
|
| 1194 |
+
For SingleArrayManager, this backs s[indexer] = value
|
| 1195 |
+
|
| 1196 |
+
See `setitem_inplace` for a version that works inplace and doesn't
|
| 1197 |
+
return a new Manager.
|
| 1198 |
+
"""
|
| 1199 |
+
if isinstance(indexer, np.ndarray) and indexer.ndim > self.ndim:
|
| 1200 |
+
raise ValueError(f"Cannot set values with ndim > {self.ndim}")
|
| 1201 |
+
return self.apply_with_block("setitem", indexer=indexer, value=value)
|
| 1202 |
+
|
| 1203 |
+
def idelete(self, indexer) -> SingleArrayManager:
|
| 1204 |
+
"""
|
| 1205 |
+
Delete selected locations in-place (new array, same ArrayManager)
|
| 1206 |
+
"""
|
| 1207 |
+
to_keep = np.ones(self.shape[0], dtype=np.bool_)
|
| 1208 |
+
to_keep[indexer] = False
|
| 1209 |
+
|
| 1210 |
+
self.arrays = [self.arrays[0][to_keep]]
|
| 1211 |
+
self._axes = [self._axes[0][to_keep]]
|
| 1212 |
+
return self
|
| 1213 |
+
|
| 1214 |
+
def _get_data_subset(self, predicate: Callable) -> SingleArrayManager:
|
| 1215 |
+
# used in get_numeric_data / get_bool_data
|
| 1216 |
+
if predicate(self.array):
|
| 1217 |
+
return type(self)(self.arrays, self._axes, verify_integrity=False)
|
| 1218 |
+
else:
|
| 1219 |
+
return self.make_empty()
|
| 1220 |
+
|
| 1221 |
+
def set_values(self, values: ArrayLike) -> None:
|
| 1222 |
+
"""
|
| 1223 |
+
Set (replace) the values of the SingleArrayManager in place.
|
| 1224 |
+
|
| 1225 |
+
Use at your own risk! This does not check if the passed values are
|
| 1226 |
+
valid for the current SingleArrayManager (length, dtype, etc).
|
| 1227 |
+
"""
|
| 1228 |
+
self.arrays[0] = values
|
| 1229 |
+
|
| 1230 |
+
def to_2d_mgr(self, columns: Index) -> ArrayManager:
|
| 1231 |
+
"""
|
| 1232 |
+
Manager analogue of Series.to_frame
|
| 1233 |
+
"""
|
| 1234 |
+
arrays = [self.arrays[0]]
|
| 1235 |
+
axes = [self.axes[0], columns]
|
| 1236 |
+
|
| 1237 |
+
return ArrayManager(arrays, axes, verify_integrity=False)
|
| 1238 |
+
|
| 1239 |
+
|
| 1240 |
+
class NullArrayProxy:
|
| 1241 |
+
"""
|
| 1242 |
+
Proxy object for an all-NA array.
|
| 1243 |
+
|
| 1244 |
+
Only stores the length of the array, and not the dtype. The dtype
|
| 1245 |
+
will only be known when actually concatenating (after determining the
|
| 1246 |
+
common dtype, for which this proxy is ignored).
|
| 1247 |
+
Using this object avoids that the internals/concat.py needs to determine
|
| 1248 |
+
the proper dtype and array type.
|
| 1249 |
+
"""
|
| 1250 |
+
|
| 1251 |
+
ndim = 1
|
| 1252 |
+
|
| 1253 |
+
def __init__(self, n: int) -> None:
|
| 1254 |
+
self.n = n
|
| 1255 |
+
|
| 1256 |
+
@property
|
| 1257 |
+
def shape(self) -> tuple[int]:
|
| 1258 |
+
return (self.n,)
|
| 1259 |
+
|
| 1260 |
+
def to_array(self, dtype: DtypeObj) -> ArrayLike:
|
| 1261 |
+
"""
|
| 1262 |
+
Helper function to create the actual all-NA array from the NullArrayProxy
|
| 1263 |
+
object.
|
| 1264 |
+
|
| 1265 |
+
Parameters
|
| 1266 |
+
----------
|
| 1267 |
+
arr : NullArrayProxy
|
| 1268 |
+
dtype : the dtype for the resulting array
|
| 1269 |
+
|
| 1270 |
+
Returns
|
| 1271 |
+
-------
|
| 1272 |
+
np.ndarray or ExtensionArray
|
| 1273 |
+
"""
|
| 1274 |
+
if isinstance(dtype, ExtensionDtype):
|
| 1275 |
+
empty = dtype.construct_array_type()._from_sequence([], dtype=dtype)
|
| 1276 |
+
indexer = -np.ones(self.n, dtype=np.intp)
|
| 1277 |
+
return empty.take(indexer, allow_fill=True)
|
| 1278 |
+
else:
|
| 1279 |
+
# when introducing missing values, int becomes float, bool becomes object
|
| 1280 |
+
dtype = ensure_dtype_can_hold_na(dtype)
|
| 1281 |
+
fill_value = na_value_for_dtype(dtype)
|
| 1282 |
+
arr = np.empty(self.n, dtype=dtype)
|
| 1283 |
+
arr.fill(fill_value)
|
| 1284 |
+
return ensure_wrapped_if_datetimelike(arr)
|
| 1285 |
+
|
| 1286 |
+
|
| 1287 |
+
def concat_arrays(to_concat: list) -> ArrayLike:
|
| 1288 |
+
"""
|
| 1289 |
+
Alternative for concat_compat but specialized for use in the ArrayManager.
|
| 1290 |
+
|
| 1291 |
+
Differences: only deals with 1D arrays (no axis keyword), assumes
|
| 1292 |
+
ensure_wrapped_if_datetimelike and does not skip empty arrays to determine
|
| 1293 |
+
the dtype.
|
| 1294 |
+
In addition ensures that all NullArrayProxies get replaced with actual
|
| 1295 |
+
arrays.
|
| 1296 |
+
|
| 1297 |
+
Parameters
|
| 1298 |
+
----------
|
| 1299 |
+
to_concat : list of arrays
|
| 1300 |
+
|
| 1301 |
+
Returns
|
| 1302 |
+
-------
|
| 1303 |
+
np.ndarray or ExtensionArray
|
| 1304 |
+
"""
|
| 1305 |
+
# ignore the all-NA proxies to determine the resulting dtype
|
| 1306 |
+
to_concat_no_proxy = [x for x in to_concat if not isinstance(x, NullArrayProxy)]
|
| 1307 |
+
|
| 1308 |
+
dtypes = {x.dtype for x in to_concat_no_proxy}
|
| 1309 |
+
single_dtype = len(dtypes) == 1
|
| 1310 |
+
|
| 1311 |
+
if single_dtype:
|
| 1312 |
+
target_dtype = to_concat_no_proxy[0].dtype
|
| 1313 |
+
elif all(lib.is_np_dtype(x, "iub") for x in dtypes):
|
| 1314 |
+
# GH#42092
|
| 1315 |
+
target_dtype = np_find_common_type(*dtypes)
|
| 1316 |
+
else:
|
| 1317 |
+
target_dtype = find_common_type([arr.dtype for arr in to_concat_no_proxy])
|
| 1318 |
+
|
| 1319 |
+
to_concat = [
|
| 1320 |
+
arr.to_array(target_dtype)
|
| 1321 |
+
if isinstance(arr, NullArrayProxy)
|
| 1322 |
+
else astype_array(arr, target_dtype, copy=False)
|
| 1323 |
+
for arr in to_concat
|
| 1324 |
+
]
|
| 1325 |
+
|
| 1326 |
+
if isinstance(to_concat[0], ExtensionArray):
|
| 1327 |
+
cls = type(to_concat[0])
|
| 1328 |
+
return cls._concat_same_type(to_concat)
|
| 1329 |
+
|
| 1330 |
+
result = np.concatenate(to_concat)
|
| 1331 |
+
|
| 1332 |
+
# TODO decide on exact behaviour (we shouldn't do this only for empty result)
|
| 1333 |
+
# see https://github.com/pandas-dev/pandas/issues/39817
|
| 1334 |
+
if len(result) == 0:
|
| 1335 |
+
# all empties -> check for bool to not coerce to float
|
| 1336 |
+
kinds = {obj.dtype.kind for obj in to_concat_no_proxy}
|
| 1337 |
+
if len(kinds) != 1:
|
| 1338 |
+
if "b" in kinds:
|
| 1339 |
+
result = result.astype(object)
|
| 1340 |
+
return result
|
videollama2/lib/python3.10/site-packages/pandas/core/internals/base.py
ADDED
|
@@ -0,0 +1,407 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Base class for the internal managers. Both BlockManager and ArrayManager
|
| 3 |
+
inherit from this class.
|
| 4 |
+
"""
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
from typing import (
|
| 8 |
+
TYPE_CHECKING,
|
| 9 |
+
Any,
|
| 10 |
+
Literal,
|
| 11 |
+
cast,
|
| 12 |
+
final,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
import numpy as np
|
| 16 |
+
|
| 17 |
+
from pandas._config import (
|
| 18 |
+
using_copy_on_write,
|
| 19 |
+
warn_copy_on_write,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
from pandas._libs import (
|
| 23 |
+
algos as libalgos,
|
| 24 |
+
lib,
|
| 25 |
+
)
|
| 26 |
+
from pandas.errors import AbstractMethodError
|
| 27 |
+
from pandas.util._validators import validate_bool_kwarg
|
| 28 |
+
|
| 29 |
+
from pandas.core.dtypes.cast import (
|
| 30 |
+
find_common_type,
|
| 31 |
+
np_can_hold_element,
|
| 32 |
+
)
|
| 33 |
+
from pandas.core.dtypes.dtypes import (
|
| 34 |
+
ExtensionDtype,
|
| 35 |
+
SparseDtype,
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
from pandas.core.base import PandasObject
|
| 39 |
+
from pandas.core.construction import extract_array
|
| 40 |
+
from pandas.core.indexes.api import (
|
| 41 |
+
Index,
|
| 42 |
+
default_index,
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
if TYPE_CHECKING:
|
| 46 |
+
from pandas._typing import (
|
| 47 |
+
ArrayLike,
|
| 48 |
+
AxisInt,
|
| 49 |
+
DtypeObj,
|
| 50 |
+
Self,
|
| 51 |
+
Shape,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class _AlreadyWarned:
|
| 56 |
+
def __init__(self):
|
| 57 |
+
# This class is used on the manager level to the block level to
|
| 58 |
+
# ensure that we warn only once. The block method can update the
|
| 59 |
+
# warned_already option without returning a value to keep the
|
| 60 |
+
# interface consistent. This is only a temporary solution for
|
| 61 |
+
# CoW warnings.
|
| 62 |
+
self.warned_already = False
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class DataManager(PandasObject):
|
| 66 |
+
# TODO share more methods/attributes
|
| 67 |
+
|
| 68 |
+
axes: list[Index]
|
| 69 |
+
|
| 70 |
+
@property
|
| 71 |
+
def items(self) -> Index:
|
| 72 |
+
raise AbstractMethodError(self)
|
| 73 |
+
|
| 74 |
+
@final
|
| 75 |
+
def __len__(self) -> int:
|
| 76 |
+
return len(self.items)
|
| 77 |
+
|
| 78 |
+
@property
|
| 79 |
+
def ndim(self) -> int:
|
| 80 |
+
return len(self.axes)
|
| 81 |
+
|
| 82 |
+
@property
|
| 83 |
+
def shape(self) -> Shape:
|
| 84 |
+
return tuple(len(ax) for ax in self.axes)
|
| 85 |
+
|
| 86 |
+
@final
|
| 87 |
+
def _validate_set_axis(self, axis: AxisInt, new_labels: Index) -> None:
|
| 88 |
+
# Caller is responsible for ensuring we have an Index object.
|
| 89 |
+
old_len = len(self.axes[axis])
|
| 90 |
+
new_len = len(new_labels)
|
| 91 |
+
|
| 92 |
+
if axis == 1 and len(self.items) == 0:
|
| 93 |
+
# If we are setting the index on a DataFrame with no columns,
|
| 94 |
+
# it is OK to change the length.
|
| 95 |
+
pass
|
| 96 |
+
|
| 97 |
+
elif new_len != old_len:
|
| 98 |
+
raise ValueError(
|
| 99 |
+
f"Length mismatch: Expected axis has {old_len} elements, new "
|
| 100 |
+
f"values have {new_len} elements"
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
def reindex_indexer(
|
| 104 |
+
self,
|
| 105 |
+
new_axis,
|
| 106 |
+
indexer,
|
| 107 |
+
axis: AxisInt,
|
| 108 |
+
fill_value=None,
|
| 109 |
+
allow_dups: bool = False,
|
| 110 |
+
copy: bool = True,
|
| 111 |
+
only_slice: bool = False,
|
| 112 |
+
) -> Self:
|
| 113 |
+
raise AbstractMethodError(self)
|
| 114 |
+
|
| 115 |
+
@final
|
| 116 |
+
def reindex_axis(
|
| 117 |
+
self,
|
| 118 |
+
new_index: Index,
|
| 119 |
+
axis: AxisInt,
|
| 120 |
+
fill_value=None,
|
| 121 |
+
only_slice: bool = False,
|
| 122 |
+
) -> Self:
|
| 123 |
+
"""
|
| 124 |
+
Conform data manager to new index.
|
| 125 |
+
"""
|
| 126 |
+
new_index, indexer = self.axes[axis].reindex(new_index)
|
| 127 |
+
|
| 128 |
+
return self.reindex_indexer(
|
| 129 |
+
new_index,
|
| 130 |
+
indexer,
|
| 131 |
+
axis=axis,
|
| 132 |
+
fill_value=fill_value,
|
| 133 |
+
copy=False,
|
| 134 |
+
only_slice=only_slice,
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
def _equal_values(self, other: Self) -> bool:
|
| 138 |
+
"""
|
| 139 |
+
To be implemented by the subclasses. Only check the column values
|
| 140 |
+
assuming shape and indexes have already been checked.
|
| 141 |
+
"""
|
| 142 |
+
raise AbstractMethodError(self)
|
| 143 |
+
|
| 144 |
+
@final
|
| 145 |
+
def equals(self, other: object) -> bool:
|
| 146 |
+
"""
|
| 147 |
+
Implementation for DataFrame.equals
|
| 148 |
+
"""
|
| 149 |
+
if not isinstance(other, type(self)):
|
| 150 |
+
return False
|
| 151 |
+
|
| 152 |
+
self_axes, other_axes = self.axes, other.axes
|
| 153 |
+
if len(self_axes) != len(other_axes):
|
| 154 |
+
return False
|
| 155 |
+
if not all(ax1.equals(ax2) for ax1, ax2 in zip(self_axes, other_axes)):
|
| 156 |
+
return False
|
| 157 |
+
|
| 158 |
+
return self._equal_values(other)
|
| 159 |
+
|
| 160 |
+
def apply(
|
| 161 |
+
self,
|
| 162 |
+
f,
|
| 163 |
+
align_keys: list[str] | None = None,
|
| 164 |
+
**kwargs,
|
| 165 |
+
) -> Self:
|
| 166 |
+
raise AbstractMethodError(self)
|
| 167 |
+
|
| 168 |
+
def apply_with_block(
|
| 169 |
+
self,
|
| 170 |
+
f,
|
| 171 |
+
align_keys: list[str] | None = None,
|
| 172 |
+
**kwargs,
|
| 173 |
+
) -> Self:
|
| 174 |
+
raise AbstractMethodError(self)
|
| 175 |
+
|
| 176 |
+
@final
|
| 177 |
+
def isna(self, func) -> Self:
|
| 178 |
+
return self.apply("apply", func=func)
|
| 179 |
+
|
| 180 |
+
@final
|
| 181 |
+
def fillna(self, value, limit: int | None, inplace: bool, downcast) -> Self:
|
| 182 |
+
if limit is not None:
|
| 183 |
+
# Do this validation even if we go through one of the no-op paths
|
| 184 |
+
limit = libalgos.validate_limit(None, limit=limit)
|
| 185 |
+
|
| 186 |
+
return self.apply_with_block(
|
| 187 |
+
"fillna",
|
| 188 |
+
value=value,
|
| 189 |
+
limit=limit,
|
| 190 |
+
inplace=inplace,
|
| 191 |
+
downcast=downcast,
|
| 192 |
+
using_cow=using_copy_on_write(),
|
| 193 |
+
already_warned=_AlreadyWarned(),
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
@final
|
| 197 |
+
def where(self, other, cond, align: bool) -> Self:
|
| 198 |
+
if align:
|
| 199 |
+
align_keys = ["other", "cond"]
|
| 200 |
+
else:
|
| 201 |
+
align_keys = ["cond"]
|
| 202 |
+
other = extract_array(other, extract_numpy=True)
|
| 203 |
+
|
| 204 |
+
return self.apply_with_block(
|
| 205 |
+
"where",
|
| 206 |
+
align_keys=align_keys,
|
| 207 |
+
other=other,
|
| 208 |
+
cond=cond,
|
| 209 |
+
using_cow=using_copy_on_write(),
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
@final
|
| 213 |
+
def putmask(self, mask, new, align: bool = True, warn: bool = True) -> Self:
|
| 214 |
+
if align:
|
| 215 |
+
align_keys = ["new", "mask"]
|
| 216 |
+
else:
|
| 217 |
+
align_keys = ["mask"]
|
| 218 |
+
new = extract_array(new, extract_numpy=True)
|
| 219 |
+
|
| 220 |
+
already_warned = None
|
| 221 |
+
if warn_copy_on_write():
|
| 222 |
+
already_warned = _AlreadyWarned()
|
| 223 |
+
if not warn:
|
| 224 |
+
already_warned.warned_already = True
|
| 225 |
+
|
| 226 |
+
return self.apply_with_block(
|
| 227 |
+
"putmask",
|
| 228 |
+
align_keys=align_keys,
|
| 229 |
+
mask=mask,
|
| 230 |
+
new=new,
|
| 231 |
+
using_cow=using_copy_on_write(),
|
| 232 |
+
already_warned=already_warned,
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
@final
|
| 236 |
+
def round(self, decimals: int, using_cow: bool = False) -> Self:
|
| 237 |
+
return self.apply_with_block(
|
| 238 |
+
"round",
|
| 239 |
+
decimals=decimals,
|
| 240 |
+
using_cow=using_cow,
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
@final
|
| 244 |
+
def replace(self, to_replace, value, inplace: bool) -> Self:
|
| 245 |
+
inplace = validate_bool_kwarg(inplace, "inplace")
|
| 246 |
+
# NDFrame.replace ensures the not-is_list_likes here
|
| 247 |
+
assert not lib.is_list_like(to_replace)
|
| 248 |
+
assert not lib.is_list_like(value)
|
| 249 |
+
return self.apply_with_block(
|
| 250 |
+
"replace",
|
| 251 |
+
to_replace=to_replace,
|
| 252 |
+
value=value,
|
| 253 |
+
inplace=inplace,
|
| 254 |
+
using_cow=using_copy_on_write(),
|
| 255 |
+
already_warned=_AlreadyWarned(),
|
| 256 |
+
)
|
| 257 |
+
|
| 258 |
+
@final
|
| 259 |
+
def replace_regex(self, **kwargs) -> Self:
|
| 260 |
+
return self.apply_with_block(
|
| 261 |
+
"_replace_regex",
|
| 262 |
+
**kwargs,
|
| 263 |
+
using_cow=using_copy_on_write(),
|
| 264 |
+
already_warned=_AlreadyWarned(),
|
| 265 |
+
)
|
| 266 |
+
|
| 267 |
+
@final
|
| 268 |
+
def replace_list(
|
| 269 |
+
self,
|
| 270 |
+
src_list: list[Any],
|
| 271 |
+
dest_list: list[Any],
|
| 272 |
+
inplace: bool = False,
|
| 273 |
+
regex: bool = False,
|
| 274 |
+
) -> Self:
|
| 275 |
+
"""do a list replace"""
|
| 276 |
+
inplace = validate_bool_kwarg(inplace, "inplace")
|
| 277 |
+
|
| 278 |
+
bm = self.apply_with_block(
|
| 279 |
+
"replace_list",
|
| 280 |
+
src_list=src_list,
|
| 281 |
+
dest_list=dest_list,
|
| 282 |
+
inplace=inplace,
|
| 283 |
+
regex=regex,
|
| 284 |
+
using_cow=using_copy_on_write(),
|
| 285 |
+
already_warned=_AlreadyWarned(),
|
| 286 |
+
)
|
| 287 |
+
bm._consolidate_inplace()
|
| 288 |
+
return bm
|
| 289 |
+
|
| 290 |
+
def interpolate(self, inplace: bool, **kwargs) -> Self:
|
| 291 |
+
return self.apply_with_block(
|
| 292 |
+
"interpolate",
|
| 293 |
+
inplace=inplace,
|
| 294 |
+
**kwargs,
|
| 295 |
+
using_cow=using_copy_on_write(),
|
| 296 |
+
already_warned=_AlreadyWarned(),
|
| 297 |
+
)
|
| 298 |
+
|
| 299 |
+
def pad_or_backfill(self, inplace: bool, **kwargs) -> Self:
|
| 300 |
+
return self.apply_with_block(
|
| 301 |
+
"pad_or_backfill",
|
| 302 |
+
inplace=inplace,
|
| 303 |
+
**kwargs,
|
| 304 |
+
using_cow=using_copy_on_write(),
|
| 305 |
+
already_warned=_AlreadyWarned(),
|
| 306 |
+
)
|
| 307 |
+
|
| 308 |
+
def shift(self, periods: int, fill_value) -> Self:
|
| 309 |
+
if fill_value is lib.no_default:
|
| 310 |
+
fill_value = None
|
| 311 |
+
|
| 312 |
+
return self.apply_with_block("shift", periods=periods, fill_value=fill_value)
|
| 313 |
+
|
| 314 |
+
# --------------------------------------------------------------------
|
| 315 |
+
# Consolidation: No-ops for all but BlockManager
|
| 316 |
+
|
| 317 |
+
def is_consolidated(self) -> bool:
|
| 318 |
+
return True
|
| 319 |
+
|
| 320 |
+
def consolidate(self) -> Self:
|
| 321 |
+
return self
|
| 322 |
+
|
| 323 |
+
def _consolidate_inplace(self) -> None:
|
| 324 |
+
return
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
class SingleDataManager(DataManager):
|
| 328 |
+
@property
|
| 329 |
+
def ndim(self) -> Literal[1]:
|
| 330 |
+
return 1
|
| 331 |
+
|
| 332 |
+
@final
|
| 333 |
+
@property
|
| 334 |
+
def array(self) -> ArrayLike:
|
| 335 |
+
"""
|
| 336 |
+
Quick access to the backing array of the Block or SingleArrayManager.
|
| 337 |
+
"""
|
| 338 |
+
# error: "SingleDataManager" has no attribute "arrays"; maybe "array"
|
| 339 |
+
return self.arrays[0] # type: ignore[attr-defined]
|
| 340 |
+
|
| 341 |
+
def setitem_inplace(self, indexer, value, warn: bool = True) -> None:
|
| 342 |
+
"""
|
| 343 |
+
Set values with indexer.
|
| 344 |
+
|
| 345 |
+
For Single[Block/Array]Manager, this backs s[indexer] = value
|
| 346 |
+
|
| 347 |
+
This is an inplace version of `setitem()`, mutating the manager/values
|
| 348 |
+
in place, not returning a new Manager (and Block), and thus never changing
|
| 349 |
+
the dtype.
|
| 350 |
+
"""
|
| 351 |
+
arr = self.array
|
| 352 |
+
|
| 353 |
+
# EAs will do this validation in their own __setitem__ methods.
|
| 354 |
+
if isinstance(arr, np.ndarray):
|
| 355 |
+
# Note: checking for ndarray instead of np.dtype means we exclude
|
| 356 |
+
# dt64/td64, which do their own validation.
|
| 357 |
+
value = np_can_hold_element(arr.dtype, value)
|
| 358 |
+
|
| 359 |
+
if isinstance(value, np.ndarray) and value.ndim == 1 and len(value) == 1:
|
| 360 |
+
# NumPy 1.25 deprecation: https://github.com/numpy/numpy/pull/10615
|
| 361 |
+
value = value[0, ...]
|
| 362 |
+
|
| 363 |
+
arr[indexer] = value
|
| 364 |
+
|
| 365 |
+
def grouped_reduce(self, func):
|
| 366 |
+
arr = self.array
|
| 367 |
+
res = func(arr)
|
| 368 |
+
index = default_index(len(res))
|
| 369 |
+
|
| 370 |
+
mgr = type(self).from_array(res, index)
|
| 371 |
+
return mgr
|
| 372 |
+
|
| 373 |
+
@classmethod
|
| 374 |
+
def from_array(cls, arr: ArrayLike, index: Index):
|
| 375 |
+
raise AbstractMethodError(cls)
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
def interleaved_dtype(dtypes: list[DtypeObj]) -> DtypeObj | None:
|
| 379 |
+
"""
|
| 380 |
+
Find the common dtype for `blocks`.
|
| 381 |
+
|
| 382 |
+
Parameters
|
| 383 |
+
----------
|
| 384 |
+
blocks : List[DtypeObj]
|
| 385 |
+
|
| 386 |
+
Returns
|
| 387 |
+
-------
|
| 388 |
+
dtype : np.dtype, ExtensionDtype, or None
|
| 389 |
+
None is returned when `blocks` is empty.
|
| 390 |
+
"""
|
| 391 |
+
if not len(dtypes):
|
| 392 |
+
return None
|
| 393 |
+
|
| 394 |
+
return find_common_type(dtypes)
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
def ensure_np_dtype(dtype: DtypeObj) -> np.dtype:
|
| 398 |
+
# TODO: https://github.com/pandas-dev/pandas/issues/22791
|
| 399 |
+
# Give EAs some input on what happens here. Sparse needs this.
|
| 400 |
+
if isinstance(dtype, SparseDtype):
|
| 401 |
+
dtype = dtype.subtype
|
| 402 |
+
dtype = cast(np.dtype, dtype)
|
| 403 |
+
elif isinstance(dtype, ExtensionDtype):
|
| 404 |
+
dtype = np.dtype("object")
|
| 405 |
+
elif dtype == np.dtype(str):
|
| 406 |
+
dtype = np.dtype("object")
|
| 407 |
+
return dtype
|
videollama2/lib/python3.10/site-packages/pandas/core/internals/blocks.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
videollama2/lib/python3.10/site-packages/pandas/core/internals/managers.py
ADDED
|
@@ -0,0 +1,2375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from collections.abc import (
|
| 4 |
+
Hashable,
|
| 5 |
+
Sequence,
|
| 6 |
+
)
|
| 7 |
+
import itertools
|
| 8 |
+
from typing import (
|
| 9 |
+
TYPE_CHECKING,
|
| 10 |
+
Callable,
|
| 11 |
+
Literal,
|
| 12 |
+
cast,
|
| 13 |
+
)
|
| 14 |
+
import warnings
|
| 15 |
+
|
| 16 |
+
import numpy as np
|
| 17 |
+
|
| 18 |
+
from pandas._config import (
|
| 19 |
+
using_copy_on_write,
|
| 20 |
+
warn_copy_on_write,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from pandas._libs import (
|
| 24 |
+
internals as libinternals,
|
| 25 |
+
lib,
|
| 26 |
+
)
|
| 27 |
+
from pandas._libs.internals import (
|
| 28 |
+
BlockPlacement,
|
| 29 |
+
BlockValuesRefs,
|
| 30 |
+
)
|
| 31 |
+
from pandas._libs.tslibs import Timestamp
|
| 32 |
+
from pandas.errors import PerformanceWarning
|
| 33 |
+
from pandas.util._decorators import cache_readonly
|
| 34 |
+
from pandas.util._exceptions import find_stack_level
|
| 35 |
+
|
| 36 |
+
from pandas.core.dtypes.cast import infer_dtype_from_scalar
|
| 37 |
+
from pandas.core.dtypes.common import (
|
| 38 |
+
ensure_platform_int,
|
| 39 |
+
is_1d_only_ea_dtype,
|
| 40 |
+
is_list_like,
|
| 41 |
+
)
|
| 42 |
+
from pandas.core.dtypes.dtypes import (
|
| 43 |
+
DatetimeTZDtype,
|
| 44 |
+
ExtensionDtype,
|
| 45 |
+
)
|
| 46 |
+
from pandas.core.dtypes.generic import (
|
| 47 |
+
ABCDataFrame,
|
| 48 |
+
ABCSeries,
|
| 49 |
+
)
|
| 50 |
+
from pandas.core.dtypes.missing import (
|
| 51 |
+
array_equals,
|
| 52 |
+
isna,
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
import pandas.core.algorithms as algos
|
| 56 |
+
from pandas.core.arrays import (
|
| 57 |
+
ArrowExtensionArray,
|
| 58 |
+
ArrowStringArray,
|
| 59 |
+
DatetimeArray,
|
| 60 |
+
)
|
| 61 |
+
from pandas.core.arrays._mixins import NDArrayBackedExtensionArray
|
| 62 |
+
from pandas.core.construction import (
|
| 63 |
+
ensure_wrapped_if_datetimelike,
|
| 64 |
+
extract_array,
|
| 65 |
+
)
|
| 66 |
+
from pandas.core.indexers import maybe_convert_indices
|
| 67 |
+
from pandas.core.indexes.api import (
|
| 68 |
+
Index,
|
| 69 |
+
ensure_index,
|
| 70 |
+
)
|
| 71 |
+
from pandas.core.internals.base import (
|
| 72 |
+
DataManager,
|
| 73 |
+
SingleDataManager,
|
| 74 |
+
ensure_np_dtype,
|
| 75 |
+
interleaved_dtype,
|
| 76 |
+
)
|
| 77 |
+
from pandas.core.internals.blocks import (
|
| 78 |
+
COW_WARNING_GENERAL_MSG,
|
| 79 |
+
COW_WARNING_SETITEM_MSG,
|
| 80 |
+
Block,
|
| 81 |
+
NumpyBlock,
|
| 82 |
+
ensure_block_shape,
|
| 83 |
+
extend_blocks,
|
| 84 |
+
get_block_type,
|
| 85 |
+
maybe_coerce_values,
|
| 86 |
+
new_block,
|
| 87 |
+
new_block_2d,
|
| 88 |
+
)
|
| 89 |
+
from pandas.core.internals.ops import (
|
| 90 |
+
blockwise_all,
|
| 91 |
+
operate_blockwise,
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
if TYPE_CHECKING:
|
| 95 |
+
from pandas._typing import (
|
| 96 |
+
ArrayLike,
|
| 97 |
+
AxisInt,
|
| 98 |
+
DtypeObj,
|
| 99 |
+
QuantileInterpolation,
|
| 100 |
+
Self,
|
| 101 |
+
Shape,
|
| 102 |
+
npt,
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
from pandas.api.extensions import ExtensionArray
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class BaseBlockManager(DataManager):
|
| 109 |
+
"""
|
| 110 |
+
Core internal data structure to implement DataFrame, Series, etc.
|
| 111 |
+
|
| 112 |
+
Manage a bunch of labeled 2D mixed-type ndarrays. Essentially it's a
|
| 113 |
+
lightweight blocked set of labeled data to be manipulated by the DataFrame
|
| 114 |
+
public API class
|
| 115 |
+
|
| 116 |
+
Attributes
|
| 117 |
+
----------
|
| 118 |
+
shape
|
| 119 |
+
ndim
|
| 120 |
+
axes
|
| 121 |
+
values
|
| 122 |
+
items
|
| 123 |
+
|
| 124 |
+
Methods
|
| 125 |
+
-------
|
| 126 |
+
set_axis(axis, new_labels)
|
| 127 |
+
copy(deep=True)
|
| 128 |
+
|
| 129 |
+
get_dtypes
|
| 130 |
+
|
| 131 |
+
apply(func, axes, block_filter_fn)
|
| 132 |
+
|
| 133 |
+
get_bool_data
|
| 134 |
+
get_numeric_data
|
| 135 |
+
|
| 136 |
+
get_slice(slice_like, axis)
|
| 137 |
+
get(label)
|
| 138 |
+
iget(loc)
|
| 139 |
+
|
| 140 |
+
take(indexer, axis)
|
| 141 |
+
reindex_axis(new_labels, axis)
|
| 142 |
+
reindex_indexer(new_labels, indexer, axis)
|
| 143 |
+
|
| 144 |
+
delete(label)
|
| 145 |
+
insert(loc, label, value)
|
| 146 |
+
set(label, value)
|
| 147 |
+
|
| 148 |
+
Parameters
|
| 149 |
+
----------
|
| 150 |
+
blocks: Sequence of Block
|
| 151 |
+
axes: Sequence of Index
|
| 152 |
+
verify_integrity: bool, default True
|
| 153 |
+
|
| 154 |
+
Notes
|
| 155 |
+
-----
|
| 156 |
+
This is *not* a public API class
|
| 157 |
+
"""
|
| 158 |
+
|
| 159 |
+
__slots__ = ()
|
| 160 |
+
|
| 161 |
+
_blknos: npt.NDArray[np.intp]
|
| 162 |
+
_blklocs: npt.NDArray[np.intp]
|
| 163 |
+
blocks: tuple[Block, ...]
|
| 164 |
+
axes: list[Index]
|
| 165 |
+
|
| 166 |
+
@property
|
| 167 |
+
def ndim(self) -> int:
|
| 168 |
+
raise NotImplementedError
|
| 169 |
+
|
| 170 |
+
_known_consolidated: bool
|
| 171 |
+
_is_consolidated: bool
|
| 172 |
+
|
| 173 |
+
def __init__(self, blocks, axes, verify_integrity: bool = True) -> None:
|
| 174 |
+
raise NotImplementedError
|
| 175 |
+
|
| 176 |
+
@classmethod
|
| 177 |
+
def from_blocks(cls, blocks: list[Block], axes: list[Index]) -> Self:
|
| 178 |
+
raise NotImplementedError
|
| 179 |
+
|
| 180 |
+
@property
|
| 181 |
+
def blknos(self) -> npt.NDArray[np.intp]:
|
| 182 |
+
"""
|
| 183 |
+
Suppose we want to find the array corresponding to our i'th column.
|
| 184 |
+
|
| 185 |
+
blknos[i] identifies the block from self.blocks that contains this column.
|
| 186 |
+
|
| 187 |
+
blklocs[i] identifies the column of interest within
|
| 188 |
+
self.blocks[self.blknos[i]]
|
| 189 |
+
"""
|
| 190 |
+
if self._blknos is None:
|
| 191 |
+
# Note: these can be altered by other BlockManager methods.
|
| 192 |
+
self._rebuild_blknos_and_blklocs()
|
| 193 |
+
|
| 194 |
+
return self._blknos
|
| 195 |
+
|
| 196 |
+
@property
|
| 197 |
+
def blklocs(self) -> npt.NDArray[np.intp]:
|
| 198 |
+
"""
|
| 199 |
+
See blknos.__doc__
|
| 200 |
+
"""
|
| 201 |
+
if self._blklocs is None:
|
| 202 |
+
# Note: these can be altered by other BlockManager methods.
|
| 203 |
+
self._rebuild_blknos_and_blklocs()
|
| 204 |
+
|
| 205 |
+
return self._blklocs
|
| 206 |
+
|
| 207 |
+
def make_empty(self, axes=None) -> Self:
|
| 208 |
+
"""return an empty BlockManager with the items axis of len 0"""
|
| 209 |
+
if axes is None:
|
| 210 |
+
axes = [Index([])] + self.axes[1:]
|
| 211 |
+
|
| 212 |
+
# preserve dtype if possible
|
| 213 |
+
if self.ndim == 1:
|
| 214 |
+
assert isinstance(self, SingleBlockManager) # for mypy
|
| 215 |
+
blk = self.blocks[0]
|
| 216 |
+
arr = blk.values[:0]
|
| 217 |
+
bp = BlockPlacement(slice(0, 0))
|
| 218 |
+
nb = blk.make_block_same_class(arr, placement=bp)
|
| 219 |
+
blocks = [nb]
|
| 220 |
+
else:
|
| 221 |
+
blocks = []
|
| 222 |
+
return type(self).from_blocks(blocks, axes)
|
| 223 |
+
|
| 224 |
+
def __nonzero__(self) -> bool:
|
| 225 |
+
return True
|
| 226 |
+
|
| 227 |
+
# Python3 compat
|
| 228 |
+
__bool__ = __nonzero__
|
| 229 |
+
|
| 230 |
+
def _normalize_axis(self, axis: AxisInt) -> int:
|
| 231 |
+
# switch axis to follow BlockManager logic
|
| 232 |
+
if self.ndim == 2:
|
| 233 |
+
axis = 1 if axis == 0 else 0
|
| 234 |
+
return axis
|
| 235 |
+
|
| 236 |
+
def set_axis(self, axis: AxisInt, new_labels: Index) -> None:
|
| 237 |
+
# Caller is responsible for ensuring we have an Index object.
|
| 238 |
+
self._validate_set_axis(axis, new_labels)
|
| 239 |
+
self.axes[axis] = new_labels
|
| 240 |
+
|
| 241 |
+
@property
|
| 242 |
+
def is_single_block(self) -> bool:
|
| 243 |
+
# Assumes we are 2D; overridden by SingleBlockManager
|
| 244 |
+
return len(self.blocks) == 1
|
| 245 |
+
|
| 246 |
+
@property
|
| 247 |
+
def items(self) -> Index:
|
| 248 |
+
return self.axes[0]
|
| 249 |
+
|
| 250 |
+
def _has_no_reference(self, i: int) -> bool:
|
| 251 |
+
"""
|
| 252 |
+
Check for column `i` if it has references.
|
| 253 |
+
(whether it references another array or is itself being referenced)
|
| 254 |
+
Returns True if the column has no references.
|
| 255 |
+
"""
|
| 256 |
+
blkno = self.blknos[i]
|
| 257 |
+
return self._has_no_reference_block(blkno)
|
| 258 |
+
|
| 259 |
+
def _has_no_reference_block(self, blkno: int) -> bool:
|
| 260 |
+
"""
|
| 261 |
+
Check for block `i` if it has references.
|
| 262 |
+
(whether it references another array or is itself being referenced)
|
| 263 |
+
Returns True if the block has no references.
|
| 264 |
+
"""
|
| 265 |
+
return not self.blocks[blkno].refs.has_reference()
|
| 266 |
+
|
| 267 |
+
def add_references(self, mgr: BaseBlockManager) -> None:
|
| 268 |
+
"""
|
| 269 |
+
Adds the references from one manager to another. We assume that both
|
| 270 |
+
managers have the same block structure.
|
| 271 |
+
"""
|
| 272 |
+
if len(self.blocks) != len(mgr.blocks):
|
| 273 |
+
# If block structure changes, then we made a copy
|
| 274 |
+
return
|
| 275 |
+
for i, blk in enumerate(self.blocks):
|
| 276 |
+
blk.refs = mgr.blocks[i].refs
|
| 277 |
+
blk.refs.add_reference(blk)
|
| 278 |
+
|
| 279 |
+
def references_same_values(self, mgr: BaseBlockManager, blkno: int) -> bool:
|
| 280 |
+
"""
|
| 281 |
+
Checks if two blocks from two different block managers reference the
|
| 282 |
+
same underlying values.
|
| 283 |
+
"""
|
| 284 |
+
blk = self.blocks[blkno]
|
| 285 |
+
return any(blk is ref() for ref in mgr.blocks[blkno].refs.referenced_blocks)
|
| 286 |
+
|
| 287 |
+
def get_dtypes(self) -> npt.NDArray[np.object_]:
|
| 288 |
+
dtypes = np.array([blk.dtype for blk in self.blocks], dtype=object)
|
| 289 |
+
return dtypes.take(self.blknos)
|
| 290 |
+
|
| 291 |
+
@property
|
| 292 |
+
def arrays(self) -> list[ArrayLike]:
|
| 293 |
+
"""
|
| 294 |
+
Quick access to the backing arrays of the Blocks.
|
| 295 |
+
|
| 296 |
+
Only for compatibility with ArrayManager for testing convenience.
|
| 297 |
+
Not to be used in actual code, and return value is not the same as the
|
| 298 |
+
ArrayManager method (list of 1D arrays vs iterator of 2D ndarrays / 1D EAs).
|
| 299 |
+
|
| 300 |
+
Warning! The returned arrays don't handle Copy-on-Write, so this should
|
| 301 |
+
be used with caution (only in read-mode).
|
| 302 |
+
"""
|
| 303 |
+
return [blk.values for blk in self.blocks]
|
| 304 |
+
|
| 305 |
+
def __repr__(self) -> str:
|
| 306 |
+
output = type(self).__name__
|
| 307 |
+
for i, ax in enumerate(self.axes):
|
| 308 |
+
if i == 0:
|
| 309 |
+
output += f"\nItems: {ax}"
|
| 310 |
+
else:
|
| 311 |
+
output += f"\nAxis {i}: {ax}"
|
| 312 |
+
|
| 313 |
+
for block in self.blocks:
|
| 314 |
+
output += f"\n{block}"
|
| 315 |
+
return output
|
| 316 |
+
|
| 317 |
+
def apply(
|
| 318 |
+
self,
|
| 319 |
+
f,
|
| 320 |
+
align_keys: list[str] | None = None,
|
| 321 |
+
**kwargs,
|
| 322 |
+
) -> Self:
|
| 323 |
+
"""
|
| 324 |
+
Iterate over the blocks, collect and create a new BlockManager.
|
| 325 |
+
|
| 326 |
+
Parameters
|
| 327 |
+
----------
|
| 328 |
+
f : str or callable
|
| 329 |
+
Name of the Block method to apply.
|
| 330 |
+
align_keys: List[str] or None, default None
|
| 331 |
+
**kwargs
|
| 332 |
+
Keywords to pass to `f`
|
| 333 |
+
|
| 334 |
+
Returns
|
| 335 |
+
-------
|
| 336 |
+
BlockManager
|
| 337 |
+
"""
|
| 338 |
+
assert "filter" not in kwargs
|
| 339 |
+
|
| 340 |
+
align_keys = align_keys or []
|
| 341 |
+
result_blocks: list[Block] = []
|
| 342 |
+
# fillna: Series/DataFrame is responsible for making sure value is aligned
|
| 343 |
+
|
| 344 |
+
aligned_args = {k: kwargs[k] for k in align_keys}
|
| 345 |
+
|
| 346 |
+
for b in self.blocks:
|
| 347 |
+
if aligned_args:
|
| 348 |
+
for k, obj in aligned_args.items():
|
| 349 |
+
if isinstance(obj, (ABCSeries, ABCDataFrame)):
|
| 350 |
+
# The caller is responsible for ensuring that
|
| 351 |
+
# obj.axes[-1].equals(self.items)
|
| 352 |
+
if obj.ndim == 1:
|
| 353 |
+
kwargs[k] = obj.iloc[b.mgr_locs.indexer]._values
|
| 354 |
+
else:
|
| 355 |
+
kwargs[k] = obj.iloc[:, b.mgr_locs.indexer]._values
|
| 356 |
+
else:
|
| 357 |
+
# otherwise we have an ndarray
|
| 358 |
+
kwargs[k] = obj[b.mgr_locs.indexer]
|
| 359 |
+
|
| 360 |
+
if callable(f):
|
| 361 |
+
applied = b.apply(f, **kwargs)
|
| 362 |
+
else:
|
| 363 |
+
applied = getattr(b, f)(**kwargs)
|
| 364 |
+
result_blocks = extend_blocks(applied, result_blocks)
|
| 365 |
+
|
| 366 |
+
out = type(self).from_blocks(result_blocks, self.axes)
|
| 367 |
+
return out
|
| 368 |
+
|
| 369 |
+
# Alias so we can share code with ArrayManager
|
| 370 |
+
apply_with_block = apply
|
| 371 |
+
|
| 372 |
+
def setitem(self, indexer, value, warn: bool = True) -> Self:
|
| 373 |
+
"""
|
| 374 |
+
Set values with indexer.
|
| 375 |
+
|
| 376 |
+
For SingleBlockManager, this backs s[indexer] = value
|
| 377 |
+
"""
|
| 378 |
+
if isinstance(indexer, np.ndarray) and indexer.ndim > self.ndim:
|
| 379 |
+
raise ValueError(f"Cannot set values with ndim > {self.ndim}")
|
| 380 |
+
|
| 381 |
+
if warn and warn_copy_on_write() and not self._has_no_reference(0):
|
| 382 |
+
warnings.warn(
|
| 383 |
+
COW_WARNING_GENERAL_MSG,
|
| 384 |
+
FutureWarning,
|
| 385 |
+
stacklevel=find_stack_level(),
|
| 386 |
+
)
|
| 387 |
+
|
| 388 |
+
elif using_copy_on_write() and not self._has_no_reference(0):
|
| 389 |
+
# this method is only called if there is a single block -> hardcoded 0
|
| 390 |
+
# Split blocks to only copy the columns we want to modify
|
| 391 |
+
if self.ndim == 2 and isinstance(indexer, tuple):
|
| 392 |
+
blk_loc = self.blklocs[indexer[1]]
|
| 393 |
+
if is_list_like(blk_loc) and blk_loc.ndim == 2:
|
| 394 |
+
blk_loc = np.squeeze(blk_loc, axis=0)
|
| 395 |
+
elif not is_list_like(blk_loc):
|
| 396 |
+
# Keep dimension and copy data later
|
| 397 |
+
blk_loc = [blk_loc] # type: ignore[assignment]
|
| 398 |
+
if len(blk_loc) == 0:
|
| 399 |
+
return self.copy(deep=False)
|
| 400 |
+
|
| 401 |
+
values = self.blocks[0].values
|
| 402 |
+
if values.ndim == 2:
|
| 403 |
+
values = values[blk_loc]
|
| 404 |
+
# "T" has no attribute "_iset_split_block"
|
| 405 |
+
self._iset_split_block( # type: ignore[attr-defined]
|
| 406 |
+
0, blk_loc, values
|
| 407 |
+
)
|
| 408 |
+
# first block equals values
|
| 409 |
+
self.blocks[0].setitem((indexer[0], np.arange(len(blk_loc))), value)
|
| 410 |
+
return self
|
| 411 |
+
# No need to split if we either set all columns or on a single block
|
| 412 |
+
# manager
|
| 413 |
+
self = self.copy()
|
| 414 |
+
|
| 415 |
+
return self.apply("setitem", indexer=indexer, value=value)
|
| 416 |
+
|
| 417 |
+
def diff(self, n: int) -> Self:
|
| 418 |
+
# only reached with self.ndim == 2
|
| 419 |
+
return self.apply("diff", n=n)
|
| 420 |
+
|
| 421 |
+
def astype(self, dtype, copy: bool | None = False, errors: str = "raise") -> Self:
|
| 422 |
+
if copy is None:
|
| 423 |
+
if using_copy_on_write():
|
| 424 |
+
copy = False
|
| 425 |
+
else:
|
| 426 |
+
copy = True
|
| 427 |
+
elif using_copy_on_write():
|
| 428 |
+
copy = False
|
| 429 |
+
|
| 430 |
+
return self.apply(
|
| 431 |
+
"astype",
|
| 432 |
+
dtype=dtype,
|
| 433 |
+
copy=copy,
|
| 434 |
+
errors=errors,
|
| 435 |
+
using_cow=using_copy_on_write(),
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
def convert(self, copy: bool | None) -> Self:
|
| 439 |
+
if copy is None:
|
| 440 |
+
if using_copy_on_write():
|
| 441 |
+
copy = False
|
| 442 |
+
else:
|
| 443 |
+
copy = True
|
| 444 |
+
elif using_copy_on_write():
|
| 445 |
+
copy = False
|
| 446 |
+
|
| 447 |
+
return self.apply("convert", copy=copy, using_cow=using_copy_on_write())
|
| 448 |
+
|
| 449 |
+
def convert_dtypes(self, **kwargs):
|
| 450 |
+
if using_copy_on_write():
|
| 451 |
+
copy = False
|
| 452 |
+
else:
|
| 453 |
+
copy = True
|
| 454 |
+
|
| 455 |
+
return self.apply(
|
| 456 |
+
"convert_dtypes", copy=copy, using_cow=using_copy_on_write(), **kwargs
|
| 457 |
+
)
|
| 458 |
+
|
| 459 |
+
def get_values_for_csv(
|
| 460 |
+
self, *, float_format, date_format, decimal, na_rep: str = "nan", quoting=None
|
| 461 |
+
) -> Self:
|
| 462 |
+
"""
|
| 463 |
+
Convert values to native types (strings / python objects) that are used
|
| 464 |
+
in formatting (repr / csv).
|
| 465 |
+
"""
|
| 466 |
+
return self.apply(
|
| 467 |
+
"get_values_for_csv",
|
| 468 |
+
na_rep=na_rep,
|
| 469 |
+
quoting=quoting,
|
| 470 |
+
float_format=float_format,
|
| 471 |
+
date_format=date_format,
|
| 472 |
+
decimal=decimal,
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
@property
|
| 476 |
+
def any_extension_types(self) -> bool:
|
| 477 |
+
"""Whether any of the blocks in this manager are extension blocks"""
|
| 478 |
+
return any(block.is_extension for block in self.blocks)
|
| 479 |
+
|
| 480 |
+
@property
|
| 481 |
+
def is_view(self) -> bool:
|
| 482 |
+
"""return a boolean if we are a single block and are a view"""
|
| 483 |
+
if len(self.blocks) == 1:
|
| 484 |
+
return self.blocks[0].is_view
|
| 485 |
+
|
| 486 |
+
# It is technically possible to figure out which blocks are views
|
| 487 |
+
# e.g. [ b.values.base is not None for b in self.blocks ]
|
| 488 |
+
# but then we have the case of possibly some blocks being a view
|
| 489 |
+
# and some blocks not. setting in theory is possible on the non-view
|
| 490 |
+
# blocks w/o causing a SettingWithCopy raise/warn. But this is a bit
|
| 491 |
+
# complicated
|
| 492 |
+
|
| 493 |
+
return False
|
| 494 |
+
|
| 495 |
+
def _get_data_subset(self, predicate: Callable) -> Self:
|
| 496 |
+
blocks = [blk for blk in self.blocks if predicate(blk.values)]
|
| 497 |
+
return self._combine(blocks)
|
| 498 |
+
|
| 499 |
+
def get_bool_data(self) -> Self:
|
| 500 |
+
"""
|
| 501 |
+
Select blocks that are bool-dtype and columns from object-dtype blocks
|
| 502 |
+
that are all-bool.
|
| 503 |
+
"""
|
| 504 |
+
|
| 505 |
+
new_blocks = []
|
| 506 |
+
|
| 507 |
+
for blk in self.blocks:
|
| 508 |
+
if blk.dtype == bool:
|
| 509 |
+
new_blocks.append(blk)
|
| 510 |
+
|
| 511 |
+
elif blk.is_object:
|
| 512 |
+
nbs = blk._split()
|
| 513 |
+
new_blocks.extend(nb for nb in nbs if nb.is_bool)
|
| 514 |
+
|
| 515 |
+
return self._combine(new_blocks)
|
| 516 |
+
|
| 517 |
+
def get_numeric_data(self) -> Self:
|
| 518 |
+
numeric_blocks = [blk for blk in self.blocks if blk.is_numeric]
|
| 519 |
+
if len(numeric_blocks) == len(self.blocks):
|
| 520 |
+
# Avoid somewhat expensive _combine
|
| 521 |
+
return self
|
| 522 |
+
return self._combine(numeric_blocks)
|
| 523 |
+
|
| 524 |
+
def _combine(self, blocks: list[Block], index: Index | None = None) -> Self:
|
| 525 |
+
"""return a new manager with the blocks"""
|
| 526 |
+
if len(blocks) == 0:
|
| 527 |
+
if self.ndim == 2:
|
| 528 |
+
# retain our own Index dtype
|
| 529 |
+
if index is not None:
|
| 530 |
+
axes = [self.items[:0], index]
|
| 531 |
+
else:
|
| 532 |
+
axes = [self.items[:0]] + self.axes[1:]
|
| 533 |
+
return self.make_empty(axes)
|
| 534 |
+
return self.make_empty()
|
| 535 |
+
|
| 536 |
+
# FIXME: optimization potential
|
| 537 |
+
indexer = np.sort(np.concatenate([b.mgr_locs.as_array for b in blocks]))
|
| 538 |
+
inv_indexer = lib.get_reverse_indexer(indexer, self.shape[0])
|
| 539 |
+
|
| 540 |
+
new_blocks: list[Block] = []
|
| 541 |
+
for b in blocks:
|
| 542 |
+
nb = b.copy(deep=False)
|
| 543 |
+
nb.mgr_locs = BlockPlacement(inv_indexer[nb.mgr_locs.indexer])
|
| 544 |
+
new_blocks.append(nb)
|
| 545 |
+
|
| 546 |
+
axes = list(self.axes)
|
| 547 |
+
if index is not None:
|
| 548 |
+
axes[-1] = index
|
| 549 |
+
axes[0] = self.items.take(indexer)
|
| 550 |
+
|
| 551 |
+
return type(self).from_blocks(new_blocks, axes)
|
| 552 |
+
|
| 553 |
+
@property
|
| 554 |
+
def nblocks(self) -> int:
|
| 555 |
+
return len(self.blocks)
|
| 556 |
+
|
| 557 |
+
def copy(self, deep: bool | None | Literal["all"] = True) -> Self:
|
| 558 |
+
"""
|
| 559 |
+
Make deep or shallow copy of BlockManager
|
| 560 |
+
|
| 561 |
+
Parameters
|
| 562 |
+
----------
|
| 563 |
+
deep : bool, string or None, default True
|
| 564 |
+
If False or None, return a shallow copy (do not copy data)
|
| 565 |
+
If 'all', copy data and a deep copy of the index
|
| 566 |
+
|
| 567 |
+
Returns
|
| 568 |
+
-------
|
| 569 |
+
BlockManager
|
| 570 |
+
"""
|
| 571 |
+
if deep is None:
|
| 572 |
+
if using_copy_on_write():
|
| 573 |
+
# use shallow copy
|
| 574 |
+
deep = False
|
| 575 |
+
else:
|
| 576 |
+
# preserve deep copy for BlockManager with copy=None
|
| 577 |
+
deep = True
|
| 578 |
+
|
| 579 |
+
# this preserves the notion of view copying of axes
|
| 580 |
+
if deep:
|
| 581 |
+
# hit in e.g. tests.io.json.test_pandas
|
| 582 |
+
|
| 583 |
+
def copy_func(ax):
|
| 584 |
+
return ax.copy(deep=True) if deep == "all" else ax.view()
|
| 585 |
+
|
| 586 |
+
new_axes = [copy_func(ax) for ax in self.axes]
|
| 587 |
+
else:
|
| 588 |
+
if using_copy_on_write():
|
| 589 |
+
new_axes = [ax.view() for ax in self.axes]
|
| 590 |
+
else:
|
| 591 |
+
new_axes = list(self.axes)
|
| 592 |
+
|
| 593 |
+
res = self.apply("copy", deep=deep)
|
| 594 |
+
res.axes = new_axes
|
| 595 |
+
|
| 596 |
+
if self.ndim > 1:
|
| 597 |
+
# Avoid needing to re-compute these
|
| 598 |
+
blknos = self._blknos
|
| 599 |
+
if blknos is not None:
|
| 600 |
+
res._blknos = blknos.copy()
|
| 601 |
+
res._blklocs = self._blklocs.copy()
|
| 602 |
+
|
| 603 |
+
if deep:
|
| 604 |
+
res._consolidate_inplace()
|
| 605 |
+
return res
|
| 606 |
+
|
| 607 |
+
def consolidate(self) -> Self:
|
| 608 |
+
"""
|
| 609 |
+
Join together blocks having same dtype
|
| 610 |
+
|
| 611 |
+
Returns
|
| 612 |
+
-------
|
| 613 |
+
y : BlockManager
|
| 614 |
+
"""
|
| 615 |
+
if self.is_consolidated():
|
| 616 |
+
return self
|
| 617 |
+
|
| 618 |
+
bm = type(self)(self.blocks, self.axes, verify_integrity=False)
|
| 619 |
+
bm._is_consolidated = False
|
| 620 |
+
bm._consolidate_inplace()
|
| 621 |
+
return bm
|
| 622 |
+
|
| 623 |
+
def reindex_indexer(
|
| 624 |
+
self,
|
| 625 |
+
new_axis: Index,
|
| 626 |
+
indexer: npt.NDArray[np.intp] | None,
|
| 627 |
+
axis: AxisInt,
|
| 628 |
+
fill_value=None,
|
| 629 |
+
allow_dups: bool = False,
|
| 630 |
+
copy: bool | None = True,
|
| 631 |
+
only_slice: bool = False,
|
| 632 |
+
*,
|
| 633 |
+
use_na_proxy: bool = False,
|
| 634 |
+
) -> Self:
|
| 635 |
+
"""
|
| 636 |
+
Parameters
|
| 637 |
+
----------
|
| 638 |
+
new_axis : Index
|
| 639 |
+
indexer : ndarray[intp] or None
|
| 640 |
+
axis : int
|
| 641 |
+
fill_value : object, default None
|
| 642 |
+
allow_dups : bool, default False
|
| 643 |
+
copy : bool or None, default True
|
| 644 |
+
If None, regard as False to get shallow copy.
|
| 645 |
+
only_slice : bool, default False
|
| 646 |
+
Whether to take views, not copies, along columns.
|
| 647 |
+
use_na_proxy : bool, default False
|
| 648 |
+
Whether to use a np.void ndarray for newly introduced columns.
|
| 649 |
+
|
| 650 |
+
pandas-indexer with -1's only.
|
| 651 |
+
"""
|
| 652 |
+
if copy is None:
|
| 653 |
+
if using_copy_on_write():
|
| 654 |
+
# use shallow copy
|
| 655 |
+
copy = False
|
| 656 |
+
else:
|
| 657 |
+
# preserve deep copy for BlockManager with copy=None
|
| 658 |
+
copy = True
|
| 659 |
+
|
| 660 |
+
if indexer is None:
|
| 661 |
+
if new_axis is self.axes[axis] and not copy:
|
| 662 |
+
return self
|
| 663 |
+
|
| 664 |
+
result = self.copy(deep=copy)
|
| 665 |
+
result.axes = list(self.axes)
|
| 666 |
+
result.axes[axis] = new_axis
|
| 667 |
+
return result
|
| 668 |
+
|
| 669 |
+
# Should be intp, but in some cases we get int64 on 32bit builds
|
| 670 |
+
assert isinstance(indexer, np.ndarray)
|
| 671 |
+
|
| 672 |
+
# some axes don't allow reindexing with dups
|
| 673 |
+
if not allow_dups:
|
| 674 |
+
self.axes[axis]._validate_can_reindex(indexer)
|
| 675 |
+
|
| 676 |
+
if axis >= self.ndim:
|
| 677 |
+
raise IndexError("Requested axis not found in manager")
|
| 678 |
+
|
| 679 |
+
if axis == 0:
|
| 680 |
+
new_blocks = self._slice_take_blocks_ax0(
|
| 681 |
+
indexer,
|
| 682 |
+
fill_value=fill_value,
|
| 683 |
+
only_slice=only_slice,
|
| 684 |
+
use_na_proxy=use_na_proxy,
|
| 685 |
+
)
|
| 686 |
+
else:
|
| 687 |
+
new_blocks = [
|
| 688 |
+
blk.take_nd(
|
| 689 |
+
indexer,
|
| 690 |
+
axis=1,
|
| 691 |
+
fill_value=(
|
| 692 |
+
fill_value if fill_value is not None else blk.fill_value
|
| 693 |
+
),
|
| 694 |
+
)
|
| 695 |
+
for blk in self.blocks
|
| 696 |
+
]
|
| 697 |
+
|
| 698 |
+
new_axes = list(self.axes)
|
| 699 |
+
new_axes[axis] = new_axis
|
| 700 |
+
|
| 701 |
+
new_mgr = type(self).from_blocks(new_blocks, new_axes)
|
| 702 |
+
if axis == 1:
|
| 703 |
+
# We can avoid the need to rebuild these
|
| 704 |
+
new_mgr._blknos = self.blknos.copy()
|
| 705 |
+
new_mgr._blklocs = self.blklocs.copy()
|
| 706 |
+
return new_mgr
|
| 707 |
+
|
| 708 |
+
def _slice_take_blocks_ax0(
|
| 709 |
+
self,
|
| 710 |
+
slice_or_indexer: slice | np.ndarray,
|
| 711 |
+
fill_value=lib.no_default,
|
| 712 |
+
only_slice: bool = False,
|
| 713 |
+
*,
|
| 714 |
+
use_na_proxy: bool = False,
|
| 715 |
+
ref_inplace_op: bool = False,
|
| 716 |
+
) -> list[Block]:
|
| 717 |
+
"""
|
| 718 |
+
Slice/take blocks along axis=0.
|
| 719 |
+
|
| 720 |
+
Overloaded for SingleBlock
|
| 721 |
+
|
| 722 |
+
Parameters
|
| 723 |
+
----------
|
| 724 |
+
slice_or_indexer : slice or np.ndarray[int64]
|
| 725 |
+
fill_value : scalar, default lib.no_default
|
| 726 |
+
only_slice : bool, default False
|
| 727 |
+
If True, we always return views on existing arrays, never copies.
|
| 728 |
+
This is used when called from ops.blockwise.operate_blockwise.
|
| 729 |
+
use_na_proxy : bool, default False
|
| 730 |
+
Whether to use a np.void ndarray for newly introduced columns.
|
| 731 |
+
ref_inplace_op: bool, default False
|
| 732 |
+
Don't track refs if True because we operate inplace
|
| 733 |
+
|
| 734 |
+
Returns
|
| 735 |
+
-------
|
| 736 |
+
new_blocks : list of Block
|
| 737 |
+
"""
|
| 738 |
+
allow_fill = fill_value is not lib.no_default
|
| 739 |
+
|
| 740 |
+
sl_type, slobj, sllen = _preprocess_slice_or_indexer(
|
| 741 |
+
slice_or_indexer, self.shape[0], allow_fill=allow_fill
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
if self.is_single_block:
|
| 745 |
+
blk = self.blocks[0]
|
| 746 |
+
|
| 747 |
+
if sl_type == "slice":
|
| 748 |
+
# GH#32959 EABlock would fail since we can't make 0-width
|
| 749 |
+
# TODO(EA2D): special casing unnecessary with 2D EAs
|
| 750 |
+
if sllen == 0:
|
| 751 |
+
return []
|
| 752 |
+
bp = BlockPlacement(slice(0, sllen))
|
| 753 |
+
return [blk.getitem_block_columns(slobj, new_mgr_locs=bp)]
|
| 754 |
+
elif not allow_fill or self.ndim == 1:
|
| 755 |
+
if allow_fill and fill_value is None:
|
| 756 |
+
fill_value = blk.fill_value
|
| 757 |
+
|
| 758 |
+
if not allow_fill and only_slice:
|
| 759 |
+
# GH#33597 slice instead of take, so we get
|
| 760 |
+
# views instead of copies
|
| 761 |
+
blocks = [
|
| 762 |
+
blk.getitem_block_columns(
|
| 763 |
+
slice(ml, ml + 1),
|
| 764 |
+
new_mgr_locs=BlockPlacement(i),
|
| 765 |
+
ref_inplace_op=ref_inplace_op,
|
| 766 |
+
)
|
| 767 |
+
for i, ml in enumerate(slobj)
|
| 768 |
+
]
|
| 769 |
+
return blocks
|
| 770 |
+
else:
|
| 771 |
+
bp = BlockPlacement(slice(0, sllen))
|
| 772 |
+
return [
|
| 773 |
+
blk.take_nd(
|
| 774 |
+
slobj,
|
| 775 |
+
axis=0,
|
| 776 |
+
new_mgr_locs=bp,
|
| 777 |
+
fill_value=fill_value,
|
| 778 |
+
)
|
| 779 |
+
]
|
| 780 |
+
|
| 781 |
+
if sl_type == "slice":
|
| 782 |
+
blknos = self.blknos[slobj]
|
| 783 |
+
blklocs = self.blklocs[slobj]
|
| 784 |
+
else:
|
| 785 |
+
blknos = algos.take_nd(
|
| 786 |
+
self.blknos, slobj, fill_value=-1, allow_fill=allow_fill
|
| 787 |
+
)
|
| 788 |
+
blklocs = algos.take_nd(
|
| 789 |
+
self.blklocs, slobj, fill_value=-1, allow_fill=allow_fill
|
| 790 |
+
)
|
| 791 |
+
|
| 792 |
+
# When filling blknos, make sure blknos is updated before appending to
|
| 793 |
+
# blocks list, that way new blkno is exactly len(blocks).
|
| 794 |
+
blocks = []
|
| 795 |
+
group = not only_slice
|
| 796 |
+
for blkno, mgr_locs in libinternals.get_blkno_placements(blknos, group=group):
|
| 797 |
+
if blkno == -1:
|
| 798 |
+
# If we've got here, fill_value was not lib.no_default
|
| 799 |
+
|
| 800 |
+
blocks.append(
|
| 801 |
+
self._make_na_block(
|
| 802 |
+
placement=mgr_locs,
|
| 803 |
+
fill_value=fill_value,
|
| 804 |
+
use_na_proxy=use_na_proxy,
|
| 805 |
+
)
|
| 806 |
+
)
|
| 807 |
+
else:
|
| 808 |
+
blk = self.blocks[blkno]
|
| 809 |
+
|
| 810 |
+
# Otherwise, slicing along items axis is necessary.
|
| 811 |
+
if not blk._can_consolidate and not blk._validate_ndim:
|
| 812 |
+
# i.e. we dont go through here for DatetimeTZBlock
|
| 813 |
+
# A non-consolidatable block, it's easy, because there's
|
| 814 |
+
# only one item and each mgr loc is a copy of that single
|
| 815 |
+
# item.
|
| 816 |
+
deep = not (only_slice or using_copy_on_write())
|
| 817 |
+
for mgr_loc in mgr_locs:
|
| 818 |
+
newblk = blk.copy(deep=deep)
|
| 819 |
+
newblk.mgr_locs = BlockPlacement(slice(mgr_loc, mgr_loc + 1))
|
| 820 |
+
blocks.append(newblk)
|
| 821 |
+
|
| 822 |
+
else:
|
| 823 |
+
# GH#32779 to avoid the performance penalty of copying,
|
| 824 |
+
# we may try to only slice
|
| 825 |
+
taker = blklocs[mgr_locs.indexer]
|
| 826 |
+
max_len = max(len(mgr_locs), taker.max() + 1)
|
| 827 |
+
if only_slice or using_copy_on_write():
|
| 828 |
+
taker = lib.maybe_indices_to_slice(taker, max_len)
|
| 829 |
+
|
| 830 |
+
if isinstance(taker, slice):
|
| 831 |
+
nb = blk.getitem_block_columns(taker, new_mgr_locs=mgr_locs)
|
| 832 |
+
blocks.append(nb)
|
| 833 |
+
elif only_slice:
|
| 834 |
+
# GH#33597 slice instead of take, so we get
|
| 835 |
+
# views instead of copies
|
| 836 |
+
for i, ml in zip(taker, mgr_locs):
|
| 837 |
+
slc = slice(i, i + 1)
|
| 838 |
+
bp = BlockPlacement(ml)
|
| 839 |
+
nb = blk.getitem_block_columns(slc, new_mgr_locs=bp)
|
| 840 |
+
# We have np.shares_memory(nb.values, blk.values)
|
| 841 |
+
blocks.append(nb)
|
| 842 |
+
else:
|
| 843 |
+
nb = blk.take_nd(taker, axis=0, new_mgr_locs=mgr_locs)
|
| 844 |
+
blocks.append(nb)
|
| 845 |
+
|
| 846 |
+
return blocks
|
| 847 |
+
|
| 848 |
+
def _make_na_block(
|
| 849 |
+
self, placement: BlockPlacement, fill_value=None, use_na_proxy: bool = False
|
| 850 |
+
) -> Block:
|
| 851 |
+
# Note: we only get here with self.ndim == 2
|
| 852 |
+
|
| 853 |
+
if use_na_proxy:
|
| 854 |
+
assert fill_value is None
|
| 855 |
+
shape = (len(placement), self.shape[1])
|
| 856 |
+
vals = np.empty(shape, dtype=np.void)
|
| 857 |
+
nb = NumpyBlock(vals, placement, ndim=2)
|
| 858 |
+
return nb
|
| 859 |
+
|
| 860 |
+
if fill_value is None:
|
| 861 |
+
fill_value = np.nan
|
| 862 |
+
|
| 863 |
+
shape = (len(placement), self.shape[1])
|
| 864 |
+
|
| 865 |
+
dtype, fill_value = infer_dtype_from_scalar(fill_value)
|
| 866 |
+
block_values = make_na_array(dtype, shape, fill_value)
|
| 867 |
+
return new_block_2d(block_values, placement=placement)
|
| 868 |
+
|
| 869 |
+
def take(
|
| 870 |
+
self,
|
| 871 |
+
indexer: npt.NDArray[np.intp],
|
| 872 |
+
axis: AxisInt = 1,
|
| 873 |
+
verify: bool = True,
|
| 874 |
+
) -> Self:
|
| 875 |
+
"""
|
| 876 |
+
Take items along any axis.
|
| 877 |
+
|
| 878 |
+
indexer : np.ndarray[np.intp]
|
| 879 |
+
axis : int, default 1
|
| 880 |
+
verify : bool, default True
|
| 881 |
+
Check that all entries are between 0 and len(self) - 1, inclusive.
|
| 882 |
+
Pass verify=False if this check has been done by the caller.
|
| 883 |
+
|
| 884 |
+
Returns
|
| 885 |
+
-------
|
| 886 |
+
BlockManager
|
| 887 |
+
"""
|
| 888 |
+
# Caller is responsible for ensuring indexer annotation is accurate
|
| 889 |
+
|
| 890 |
+
n = self.shape[axis]
|
| 891 |
+
indexer = maybe_convert_indices(indexer, n, verify=verify)
|
| 892 |
+
|
| 893 |
+
new_labels = self.axes[axis].take(indexer)
|
| 894 |
+
return self.reindex_indexer(
|
| 895 |
+
new_axis=new_labels,
|
| 896 |
+
indexer=indexer,
|
| 897 |
+
axis=axis,
|
| 898 |
+
allow_dups=True,
|
| 899 |
+
copy=None,
|
| 900 |
+
)
|
| 901 |
+
|
| 902 |
+
|
| 903 |
+
class BlockManager(libinternals.BlockManager, BaseBlockManager):
|
| 904 |
+
"""
|
| 905 |
+
BaseBlockManager that holds 2D blocks.
|
| 906 |
+
"""
|
| 907 |
+
|
| 908 |
+
ndim = 2
|
| 909 |
+
|
| 910 |
+
# ----------------------------------------------------------------
|
| 911 |
+
# Constructors
|
| 912 |
+
|
| 913 |
+
def __init__(
|
| 914 |
+
self,
|
| 915 |
+
blocks: Sequence[Block],
|
| 916 |
+
axes: Sequence[Index],
|
| 917 |
+
verify_integrity: bool = True,
|
| 918 |
+
) -> None:
|
| 919 |
+
if verify_integrity:
|
| 920 |
+
# Assertion disabled for performance
|
| 921 |
+
# assert all(isinstance(x, Index) for x in axes)
|
| 922 |
+
|
| 923 |
+
for block in blocks:
|
| 924 |
+
if self.ndim != block.ndim:
|
| 925 |
+
raise AssertionError(
|
| 926 |
+
f"Number of Block dimensions ({block.ndim}) must equal "
|
| 927 |
+
f"number of axes ({self.ndim})"
|
| 928 |
+
)
|
| 929 |
+
# As of 2.0, the caller is responsible for ensuring that
|
| 930 |
+
# DatetimeTZBlock with block.ndim == 2 has block.values.ndim ==2;
|
| 931 |
+
# previously there was a special check for fastparquet compat.
|
| 932 |
+
|
| 933 |
+
self._verify_integrity()
|
| 934 |
+
|
| 935 |
+
def _verify_integrity(self) -> None:
|
| 936 |
+
mgr_shape = self.shape
|
| 937 |
+
tot_items = sum(len(x.mgr_locs) for x in self.blocks)
|
| 938 |
+
for block in self.blocks:
|
| 939 |
+
if block.shape[1:] != mgr_shape[1:]:
|
| 940 |
+
raise_construction_error(tot_items, block.shape[1:], self.axes)
|
| 941 |
+
if len(self.items) != tot_items:
|
| 942 |
+
raise AssertionError(
|
| 943 |
+
"Number of manager items must equal union of "
|
| 944 |
+
f"block items\n# manager items: {len(self.items)}, # "
|
| 945 |
+
f"tot_items: {tot_items}"
|
| 946 |
+
)
|
| 947 |
+
|
| 948 |
+
@classmethod
|
| 949 |
+
def from_blocks(cls, blocks: list[Block], axes: list[Index]) -> Self:
|
| 950 |
+
"""
|
| 951 |
+
Constructor for BlockManager and SingleBlockManager with same signature.
|
| 952 |
+
"""
|
| 953 |
+
return cls(blocks, axes, verify_integrity=False)
|
| 954 |
+
|
| 955 |
+
# ----------------------------------------------------------------
|
| 956 |
+
# Indexing
|
| 957 |
+
|
| 958 |
+
def fast_xs(self, loc: int) -> SingleBlockManager:
|
| 959 |
+
"""
|
| 960 |
+
Return the array corresponding to `frame.iloc[loc]`.
|
| 961 |
+
|
| 962 |
+
Parameters
|
| 963 |
+
----------
|
| 964 |
+
loc : int
|
| 965 |
+
|
| 966 |
+
Returns
|
| 967 |
+
-------
|
| 968 |
+
np.ndarray or ExtensionArray
|
| 969 |
+
"""
|
| 970 |
+
if len(self.blocks) == 1:
|
| 971 |
+
# TODO: this could be wrong if blk.mgr_locs is not slice(None)-like;
|
| 972 |
+
# is this ruled out in the general case?
|
| 973 |
+
result = self.blocks[0].iget((slice(None), loc))
|
| 974 |
+
# in the case of a single block, the new block is a view
|
| 975 |
+
bp = BlockPlacement(slice(0, len(result)))
|
| 976 |
+
block = new_block(
|
| 977 |
+
result,
|
| 978 |
+
placement=bp,
|
| 979 |
+
ndim=1,
|
| 980 |
+
refs=self.blocks[0].refs,
|
| 981 |
+
)
|
| 982 |
+
return SingleBlockManager(block, self.axes[0])
|
| 983 |
+
|
| 984 |
+
dtype = interleaved_dtype([blk.dtype for blk in self.blocks])
|
| 985 |
+
|
| 986 |
+
n = len(self)
|
| 987 |
+
|
| 988 |
+
if isinstance(dtype, ExtensionDtype):
|
| 989 |
+
# TODO: use object dtype as workaround for non-performant
|
| 990 |
+
# EA.__setitem__ methods. (primarily ArrowExtensionArray.__setitem__
|
| 991 |
+
# when iteratively setting individual values)
|
| 992 |
+
# https://github.com/pandas-dev/pandas/pull/54508#issuecomment-1675827918
|
| 993 |
+
result = np.empty(n, dtype=object)
|
| 994 |
+
else:
|
| 995 |
+
result = np.empty(n, dtype=dtype)
|
| 996 |
+
result = ensure_wrapped_if_datetimelike(result)
|
| 997 |
+
|
| 998 |
+
for blk in self.blocks:
|
| 999 |
+
# Such assignment may incorrectly coerce NaT to None
|
| 1000 |
+
# result[blk.mgr_locs] = blk._slice((slice(None), loc))
|
| 1001 |
+
for i, rl in enumerate(blk.mgr_locs):
|
| 1002 |
+
result[rl] = blk.iget((i, loc))
|
| 1003 |
+
|
| 1004 |
+
if isinstance(dtype, ExtensionDtype):
|
| 1005 |
+
cls = dtype.construct_array_type()
|
| 1006 |
+
result = cls._from_sequence(result, dtype=dtype)
|
| 1007 |
+
|
| 1008 |
+
bp = BlockPlacement(slice(0, len(result)))
|
| 1009 |
+
block = new_block(result, placement=bp, ndim=1)
|
| 1010 |
+
return SingleBlockManager(block, self.axes[0])
|
| 1011 |
+
|
| 1012 |
+
def iget(self, i: int, track_ref: bool = True) -> SingleBlockManager:
|
| 1013 |
+
"""
|
| 1014 |
+
Return the data as a SingleBlockManager.
|
| 1015 |
+
"""
|
| 1016 |
+
block = self.blocks[self.blknos[i]]
|
| 1017 |
+
values = block.iget(self.blklocs[i])
|
| 1018 |
+
|
| 1019 |
+
# shortcut for select a single-dim from a 2-dim BM
|
| 1020 |
+
bp = BlockPlacement(slice(0, len(values)))
|
| 1021 |
+
nb = type(block)(
|
| 1022 |
+
values, placement=bp, ndim=1, refs=block.refs if track_ref else None
|
| 1023 |
+
)
|
| 1024 |
+
return SingleBlockManager(nb, self.axes[1])
|
| 1025 |
+
|
| 1026 |
+
def iget_values(self, i: int) -> ArrayLike:
|
| 1027 |
+
"""
|
| 1028 |
+
Return the data for column i as the values (ndarray or ExtensionArray).
|
| 1029 |
+
|
| 1030 |
+
Warning! The returned array is a view but doesn't handle Copy-on-Write,
|
| 1031 |
+
so this should be used with caution.
|
| 1032 |
+
"""
|
| 1033 |
+
# TODO(CoW) making the arrays read-only might make this safer to use?
|
| 1034 |
+
block = self.blocks[self.blknos[i]]
|
| 1035 |
+
values = block.iget(self.blklocs[i])
|
| 1036 |
+
return values
|
| 1037 |
+
|
| 1038 |
+
@property
|
| 1039 |
+
def column_arrays(self) -> list[np.ndarray]:
|
| 1040 |
+
"""
|
| 1041 |
+
Used in the JSON C code to access column arrays.
|
| 1042 |
+
This optimizes compared to using `iget_values` by converting each
|
| 1043 |
+
|
| 1044 |
+
Warning! This doesn't handle Copy-on-Write, so should be used with
|
| 1045 |
+
caution (current use case of consuming this in the JSON code is fine).
|
| 1046 |
+
"""
|
| 1047 |
+
# This is an optimized equivalent to
|
| 1048 |
+
# result = [self.iget_values(i) for i in range(len(self.items))]
|
| 1049 |
+
result: list[np.ndarray | None] = [None] * len(self.items)
|
| 1050 |
+
|
| 1051 |
+
for blk in self.blocks:
|
| 1052 |
+
mgr_locs = blk._mgr_locs
|
| 1053 |
+
values = blk.array_values._values_for_json()
|
| 1054 |
+
if values.ndim == 1:
|
| 1055 |
+
# TODO(EA2D): special casing not needed with 2D EAs
|
| 1056 |
+
result[mgr_locs[0]] = values
|
| 1057 |
+
|
| 1058 |
+
else:
|
| 1059 |
+
for i, loc in enumerate(mgr_locs):
|
| 1060 |
+
result[loc] = values[i]
|
| 1061 |
+
|
| 1062 |
+
# error: Incompatible return value type (got "List[None]",
|
| 1063 |
+
# expected "List[ndarray[Any, Any]]")
|
| 1064 |
+
return result # type: ignore[return-value]
|
| 1065 |
+
|
| 1066 |
+
def iset(
|
| 1067 |
+
self,
|
| 1068 |
+
loc: int | slice | np.ndarray,
|
| 1069 |
+
value: ArrayLike,
|
| 1070 |
+
inplace: bool = False,
|
| 1071 |
+
refs: BlockValuesRefs | None = None,
|
| 1072 |
+
) -> None:
|
| 1073 |
+
"""
|
| 1074 |
+
Set new item in-place. Does not consolidate. Adds new Block if not
|
| 1075 |
+
contained in the current set of items
|
| 1076 |
+
"""
|
| 1077 |
+
|
| 1078 |
+
# FIXME: refactor, clearly separate broadcasting & zip-like assignment
|
| 1079 |
+
# can prob also fix the various if tests for sparse/categorical
|
| 1080 |
+
if self._blklocs is None and self.ndim > 1:
|
| 1081 |
+
self._rebuild_blknos_and_blklocs()
|
| 1082 |
+
|
| 1083 |
+
# Note: we exclude DTA/TDA here
|
| 1084 |
+
value_is_extension_type = is_1d_only_ea_dtype(value.dtype)
|
| 1085 |
+
if not value_is_extension_type:
|
| 1086 |
+
if value.ndim == 2:
|
| 1087 |
+
value = value.T
|
| 1088 |
+
else:
|
| 1089 |
+
value = ensure_block_shape(value, ndim=2)
|
| 1090 |
+
|
| 1091 |
+
if value.shape[1:] != self.shape[1:]:
|
| 1092 |
+
raise AssertionError(
|
| 1093 |
+
"Shape of new values must be compatible with manager shape"
|
| 1094 |
+
)
|
| 1095 |
+
|
| 1096 |
+
if lib.is_integer(loc):
|
| 1097 |
+
# We have 6 tests where loc is _not_ an int.
|
| 1098 |
+
# In this case, get_blkno_placements will yield only one tuple,
|
| 1099 |
+
# containing (self._blknos[loc], BlockPlacement(slice(0, 1, 1)))
|
| 1100 |
+
|
| 1101 |
+
# Check if we can use _iset_single fastpath
|
| 1102 |
+
loc = cast(int, loc)
|
| 1103 |
+
blkno = self.blknos[loc]
|
| 1104 |
+
blk = self.blocks[blkno]
|
| 1105 |
+
if len(blk._mgr_locs) == 1: # TODO: fastest way to check this?
|
| 1106 |
+
return self._iset_single(
|
| 1107 |
+
loc,
|
| 1108 |
+
value,
|
| 1109 |
+
inplace=inplace,
|
| 1110 |
+
blkno=blkno,
|
| 1111 |
+
blk=blk,
|
| 1112 |
+
refs=refs,
|
| 1113 |
+
)
|
| 1114 |
+
|
| 1115 |
+
# error: Incompatible types in assignment (expression has type
|
| 1116 |
+
# "List[Union[int, slice, ndarray]]", variable has type "Union[int,
|
| 1117 |
+
# slice, ndarray]")
|
| 1118 |
+
loc = [loc] # type: ignore[assignment]
|
| 1119 |
+
|
| 1120 |
+
# categorical/sparse/datetimetz
|
| 1121 |
+
if value_is_extension_type:
|
| 1122 |
+
|
| 1123 |
+
def value_getitem(placement):
|
| 1124 |
+
return value
|
| 1125 |
+
|
| 1126 |
+
else:
|
| 1127 |
+
|
| 1128 |
+
def value_getitem(placement):
|
| 1129 |
+
return value[placement.indexer]
|
| 1130 |
+
|
| 1131 |
+
# Accessing public blknos ensures the public versions are initialized
|
| 1132 |
+
blknos = self.blknos[loc]
|
| 1133 |
+
blklocs = self.blklocs[loc].copy()
|
| 1134 |
+
|
| 1135 |
+
unfit_mgr_locs = []
|
| 1136 |
+
unfit_val_locs = []
|
| 1137 |
+
removed_blknos = []
|
| 1138 |
+
for blkno_l, val_locs in libinternals.get_blkno_placements(blknos, group=True):
|
| 1139 |
+
blk = self.blocks[blkno_l]
|
| 1140 |
+
blk_locs = blklocs[val_locs.indexer]
|
| 1141 |
+
if inplace and blk.should_store(value):
|
| 1142 |
+
# Updating inplace -> check if we need to do Copy-on-Write
|
| 1143 |
+
if using_copy_on_write() and not self._has_no_reference_block(blkno_l):
|
| 1144 |
+
self._iset_split_block(
|
| 1145 |
+
blkno_l, blk_locs, value_getitem(val_locs), refs=refs
|
| 1146 |
+
)
|
| 1147 |
+
else:
|
| 1148 |
+
blk.set_inplace(blk_locs, value_getitem(val_locs))
|
| 1149 |
+
continue
|
| 1150 |
+
else:
|
| 1151 |
+
unfit_mgr_locs.append(blk.mgr_locs.as_array[blk_locs])
|
| 1152 |
+
unfit_val_locs.append(val_locs)
|
| 1153 |
+
|
| 1154 |
+
# If all block items are unfit, schedule the block for removal.
|
| 1155 |
+
if len(val_locs) == len(blk.mgr_locs):
|
| 1156 |
+
removed_blknos.append(blkno_l)
|
| 1157 |
+
continue
|
| 1158 |
+
else:
|
| 1159 |
+
# Defer setting the new values to enable consolidation
|
| 1160 |
+
self._iset_split_block(blkno_l, blk_locs, refs=refs)
|
| 1161 |
+
|
| 1162 |
+
if len(removed_blknos):
|
| 1163 |
+
# Remove blocks & update blknos accordingly
|
| 1164 |
+
is_deleted = np.zeros(self.nblocks, dtype=np.bool_)
|
| 1165 |
+
is_deleted[removed_blknos] = True
|
| 1166 |
+
|
| 1167 |
+
new_blknos = np.empty(self.nblocks, dtype=np.intp)
|
| 1168 |
+
new_blknos.fill(-1)
|
| 1169 |
+
new_blknos[~is_deleted] = np.arange(self.nblocks - len(removed_blknos))
|
| 1170 |
+
self._blknos = new_blknos[self._blknos]
|
| 1171 |
+
self.blocks = tuple(
|
| 1172 |
+
blk for i, blk in enumerate(self.blocks) if i not in set(removed_blknos)
|
| 1173 |
+
)
|
| 1174 |
+
|
| 1175 |
+
if unfit_val_locs:
|
| 1176 |
+
unfit_idxr = np.concatenate(unfit_mgr_locs)
|
| 1177 |
+
unfit_count = len(unfit_idxr)
|
| 1178 |
+
|
| 1179 |
+
new_blocks: list[Block] = []
|
| 1180 |
+
if value_is_extension_type:
|
| 1181 |
+
# This code (ab-)uses the fact that EA blocks contain only
|
| 1182 |
+
# one item.
|
| 1183 |
+
# TODO(EA2D): special casing unnecessary with 2D EAs
|
| 1184 |
+
new_blocks.extend(
|
| 1185 |
+
new_block_2d(
|
| 1186 |
+
values=value,
|
| 1187 |
+
placement=BlockPlacement(slice(mgr_loc, mgr_loc + 1)),
|
| 1188 |
+
refs=refs,
|
| 1189 |
+
)
|
| 1190 |
+
for mgr_loc in unfit_idxr
|
| 1191 |
+
)
|
| 1192 |
+
|
| 1193 |
+
self._blknos[unfit_idxr] = np.arange(unfit_count) + len(self.blocks)
|
| 1194 |
+
self._blklocs[unfit_idxr] = 0
|
| 1195 |
+
|
| 1196 |
+
else:
|
| 1197 |
+
# unfit_val_locs contains BlockPlacement objects
|
| 1198 |
+
unfit_val_items = unfit_val_locs[0].append(unfit_val_locs[1:])
|
| 1199 |
+
|
| 1200 |
+
new_blocks.append(
|
| 1201 |
+
new_block_2d(
|
| 1202 |
+
values=value_getitem(unfit_val_items),
|
| 1203 |
+
placement=BlockPlacement(unfit_idxr),
|
| 1204 |
+
refs=refs,
|
| 1205 |
+
)
|
| 1206 |
+
)
|
| 1207 |
+
|
| 1208 |
+
self._blknos[unfit_idxr] = len(self.blocks)
|
| 1209 |
+
self._blklocs[unfit_idxr] = np.arange(unfit_count)
|
| 1210 |
+
|
| 1211 |
+
self.blocks += tuple(new_blocks)
|
| 1212 |
+
|
| 1213 |
+
# Newly created block's dtype may already be present.
|
| 1214 |
+
self._known_consolidated = False
|
| 1215 |
+
|
| 1216 |
+
def _iset_split_block(
|
| 1217 |
+
self,
|
| 1218 |
+
blkno_l: int,
|
| 1219 |
+
blk_locs: np.ndarray | list[int],
|
| 1220 |
+
value: ArrayLike | None = None,
|
| 1221 |
+
refs: BlockValuesRefs | None = None,
|
| 1222 |
+
) -> None:
|
| 1223 |
+
"""Removes columns from a block by splitting the block.
|
| 1224 |
+
|
| 1225 |
+
Avoids copying the whole block through slicing and updates the manager
|
| 1226 |
+
after determinint the new block structure. Optionally adds a new block,
|
| 1227 |
+
otherwise has to be done by the caller.
|
| 1228 |
+
|
| 1229 |
+
Parameters
|
| 1230 |
+
----------
|
| 1231 |
+
blkno_l: The block number to operate on, relevant for updating the manager
|
| 1232 |
+
blk_locs: The locations of our block that should be deleted.
|
| 1233 |
+
value: The value to set as a replacement.
|
| 1234 |
+
refs: The reference tracking object of the value to set.
|
| 1235 |
+
"""
|
| 1236 |
+
blk = self.blocks[blkno_l]
|
| 1237 |
+
|
| 1238 |
+
if self._blklocs is None:
|
| 1239 |
+
self._rebuild_blknos_and_blklocs()
|
| 1240 |
+
|
| 1241 |
+
nbs_tup = tuple(blk.delete(blk_locs))
|
| 1242 |
+
if value is not None:
|
| 1243 |
+
locs = blk.mgr_locs.as_array[blk_locs]
|
| 1244 |
+
first_nb = new_block_2d(value, BlockPlacement(locs), refs=refs)
|
| 1245 |
+
else:
|
| 1246 |
+
first_nb = nbs_tup[0]
|
| 1247 |
+
nbs_tup = tuple(nbs_tup[1:])
|
| 1248 |
+
|
| 1249 |
+
nr_blocks = len(self.blocks)
|
| 1250 |
+
blocks_tup = (
|
| 1251 |
+
self.blocks[:blkno_l] + (first_nb,) + self.blocks[blkno_l + 1 :] + nbs_tup
|
| 1252 |
+
)
|
| 1253 |
+
self.blocks = blocks_tup
|
| 1254 |
+
|
| 1255 |
+
if not nbs_tup and value is not None:
|
| 1256 |
+
# No need to update anything if split did not happen
|
| 1257 |
+
return
|
| 1258 |
+
|
| 1259 |
+
self._blklocs[first_nb.mgr_locs.indexer] = np.arange(len(first_nb))
|
| 1260 |
+
|
| 1261 |
+
for i, nb in enumerate(nbs_tup):
|
| 1262 |
+
self._blklocs[nb.mgr_locs.indexer] = np.arange(len(nb))
|
| 1263 |
+
self._blknos[nb.mgr_locs.indexer] = i + nr_blocks
|
| 1264 |
+
|
| 1265 |
+
def _iset_single(
|
| 1266 |
+
self,
|
| 1267 |
+
loc: int,
|
| 1268 |
+
value: ArrayLike,
|
| 1269 |
+
inplace: bool,
|
| 1270 |
+
blkno: int,
|
| 1271 |
+
blk: Block,
|
| 1272 |
+
refs: BlockValuesRefs | None = None,
|
| 1273 |
+
) -> None:
|
| 1274 |
+
"""
|
| 1275 |
+
Fastpath for iset when we are only setting a single position and
|
| 1276 |
+
the Block currently in that position is itself single-column.
|
| 1277 |
+
|
| 1278 |
+
In this case we can swap out the entire Block and blklocs and blknos
|
| 1279 |
+
are unaffected.
|
| 1280 |
+
"""
|
| 1281 |
+
# Caller is responsible for verifying value.shape
|
| 1282 |
+
|
| 1283 |
+
if inplace and blk.should_store(value):
|
| 1284 |
+
copy = False
|
| 1285 |
+
if using_copy_on_write() and not self._has_no_reference_block(blkno):
|
| 1286 |
+
# perform Copy-on-Write and clear the reference
|
| 1287 |
+
copy = True
|
| 1288 |
+
iloc = self.blklocs[loc]
|
| 1289 |
+
blk.set_inplace(slice(iloc, iloc + 1), value, copy=copy)
|
| 1290 |
+
return
|
| 1291 |
+
|
| 1292 |
+
nb = new_block_2d(value, placement=blk._mgr_locs, refs=refs)
|
| 1293 |
+
old_blocks = self.blocks
|
| 1294 |
+
new_blocks = old_blocks[:blkno] + (nb,) + old_blocks[blkno + 1 :]
|
| 1295 |
+
self.blocks = new_blocks
|
| 1296 |
+
return
|
| 1297 |
+
|
| 1298 |
+
def column_setitem(
|
| 1299 |
+
self, loc: int, idx: int | slice | np.ndarray, value, inplace_only: bool = False
|
| 1300 |
+
) -> None:
|
| 1301 |
+
"""
|
| 1302 |
+
Set values ("setitem") into a single column (not setting the full column).
|
| 1303 |
+
|
| 1304 |
+
This is a method on the BlockManager level, to avoid creating an
|
| 1305 |
+
intermediate Series at the DataFrame level (`s = df[loc]; s[idx] = value`)
|
| 1306 |
+
"""
|
| 1307 |
+
needs_to_warn = False
|
| 1308 |
+
if warn_copy_on_write() and not self._has_no_reference(loc):
|
| 1309 |
+
if not isinstance(
|
| 1310 |
+
self.blocks[self.blknos[loc]].values,
|
| 1311 |
+
(ArrowExtensionArray, ArrowStringArray),
|
| 1312 |
+
):
|
| 1313 |
+
# We might raise if we are in an expansion case, so defer
|
| 1314 |
+
# warning till we actually updated
|
| 1315 |
+
needs_to_warn = True
|
| 1316 |
+
|
| 1317 |
+
elif using_copy_on_write() and not self._has_no_reference(loc):
|
| 1318 |
+
blkno = self.blknos[loc]
|
| 1319 |
+
# Split blocks to only copy the column we want to modify
|
| 1320 |
+
blk_loc = self.blklocs[loc]
|
| 1321 |
+
# Copy our values
|
| 1322 |
+
values = self.blocks[blkno].values
|
| 1323 |
+
if values.ndim == 1:
|
| 1324 |
+
values = values.copy()
|
| 1325 |
+
else:
|
| 1326 |
+
# Use [blk_loc] as indexer to keep ndim=2, this already results in a
|
| 1327 |
+
# copy
|
| 1328 |
+
values = values[[blk_loc]]
|
| 1329 |
+
self._iset_split_block(blkno, [blk_loc], values)
|
| 1330 |
+
|
| 1331 |
+
# this manager is only created temporarily to mutate the values in place
|
| 1332 |
+
# so don't track references, otherwise the `setitem` would perform CoW again
|
| 1333 |
+
col_mgr = self.iget(loc, track_ref=False)
|
| 1334 |
+
if inplace_only:
|
| 1335 |
+
col_mgr.setitem_inplace(idx, value)
|
| 1336 |
+
else:
|
| 1337 |
+
new_mgr = col_mgr.setitem((idx,), value)
|
| 1338 |
+
self.iset(loc, new_mgr._block.values, inplace=True)
|
| 1339 |
+
|
| 1340 |
+
if needs_to_warn:
|
| 1341 |
+
warnings.warn(
|
| 1342 |
+
COW_WARNING_GENERAL_MSG,
|
| 1343 |
+
FutureWarning,
|
| 1344 |
+
stacklevel=find_stack_level(),
|
| 1345 |
+
)
|
| 1346 |
+
|
| 1347 |
+
def insert(self, loc: int, item: Hashable, value: ArrayLike, refs=None) -> None:
|
| 1348 |
+
"""
|
| 1349 |
+
Insert item at selected position.
|
| 1350 |
+
|
| 1351 |
+
Parameters
|
| 1352 |
+
----------
|
| 1353 |
+
loc : int
|
| 1354 |
+
item : hashable
|
| 1355 |
+
value : np.ndarray or ExtensionArray
|
| 1356 |
+
refs : The reference tracking object of the value to set.
|
| 1357 |
+
"""
|
| 1358 |
+
with warnings.catch_warnings():
|
| 1359 |
+
# TODO: re-issue this with setitem-specific message?
|
| 1360 |
+
warnings.filterwarnings(
|
| 1361 |
+
"ignore",
|
| 1362 |
+
"The behavior of Index.insert with object-dtype is deprecated",
|
| 1363 |
+
category=FutureWarning,
|
| 1364 |
+
)
|
| 1365 |
+
new_axis = self.items.insert(loc, item)
|
| 1366 |
+
|
| 1367 |
+
if value.ndim == 2:
|
| 1368 |
+
value = value.T
|
| 1369 |
+
if len(value) > 1:
|
| 1370 |
+
raise ValueError(
|
| 1371 |
+
f"Expected a 1D array, got an array with shape {value.T.shape}"
|
| 1372 |
+
)
|
| 1373 |
+
else:
|
| 1374 |
+
value = ensure_block_shape(value, ndim=self.ndim)
|
| 1375 |
+
|
| 1376 |
+
bp = BlockPlacement(slice(loc, loc + 1))
|
| 1377 |
+
block = new_block_2d(values=value, placement=bp, refs=refs)
|
| 1378 |
+
|
| 1379 |
+
if not len(self.blocks):
|
| 1380 |
+
# Fastpath
|
| 1381 |
+
self._blklocs = np.array([0], dtype=np.intp)
|
| 1382 |
+
self._blknos = np.array([0], dtype=np.intp)
|
| 1383 |
+
else:
|
| 1384 |
+
self._insert_update_mgr_locs(loc)
|
| 1385 |
+
self._insert_update_blklocs_and_blknos(loc)
|
| 1386 |
+
|
| 1387 |
+
self.axes[0] = new_axis
|
| 1388 |
+
self.blocks += (block,)
|
| 1389 |
+
|
| 1390 |
+
self._known_consolidated = False
|
| 1391 |
+
|
| 1392 |
+
if sum(not block.is_extension for block in self.blocks) > 100:
|
| 1393 |
+
warnings.warn(
|
| 1394 |
+
"DataFrame is highly fragmented. This is usually the result "
|
| 1395 |
+
"of calling `frame.insert` many times, which has poor performance. "
|
| 1396 |
+
"Consider joining all columns at once using pd.concat(axis=1) "
|
| 1397 |
+
"instead. To get a de-fragmented frame, use `newframe = frame.copy()`",
|
| 1398 |
+
PerformanceWarning,
|
| 1399 |
+
stacklevel=find_stack_level(),
|
| 1400 |
+
)
|
| 1401 |
+
|
| 1402 |
+
def _insert_update_mgr_locs(self, loc) -> None:
|
| 1403 |
+
"""
|
| 1404 |
+
When inserting a new Block at location 'loc', we increment
|
| 1405 |
+
all of the mgr_locs of blocks above that by one.
|
| 1406 |
+
"""
|
| 1407 |
+
for blkno, count in _fast_count_smallints(self.blknos[loc:]):
|
| 1408 |
+
# .620 this way, .326 of which is in increment_above
|
| 1409 |
+
blk = self.blocks[blkno]
|
| 1410 |
+
blk._mgr_locs = blk._mgr_locs.increment_above(loc)
|
| 1411 |
+
|
| 1412 |
+
def _insert_update_blklocs_and_blknos(self, loc) -> None:
|
| 1413 |
+
"""
|
| 1414 |
+
When inserting a new Block at location 'loc', we update our
|
| 1415 |
+
_blklocs and _blknos.
|
| 1416 |
+
"""
|
| 1417 |
+
|
| 1418 |
+
# Accessing public blklocs ensures the public versions are initialized
|
| 1419 |
+
if loc == self.blklocs.shape[0]:
|
| 1420 |
+
# np.append is a lot faster, let's use it if we can.
|
| 1421 |
+
self._blklocs = np.append(self._blklocs, 0)
|
| 1422 |
+
self._blknos = np.append(self._blknos, len(self.blocks))
|
| 1423 |
+
elif loc == 0:
|
| 1424 |
+
# np.append is a lot faster, let's use it if we can.
|
| 1425 |
+
self._blklocs = np.append(self._blklocs[::-1], 0)[::-1]
|
| 1426 |
+
self._blknos = np.append(self._blknos[::-1], len(self.blocks))[::-1]
|
| 1427 |
+
else:
|
| 1428 |
+
new_blklocs, new_blknos = libinternals.update_blklocs_and_blknos(
|
| 1429 |
+
self.blklocs, self.blknos, loc, len(self.blocks)
|
| 1430 |
+
)
|
| 1431 |
+
self._blklocs = new_blklocs
|
| 1432 |
+
self._blknos = new_blknos
|
| 1433 |
+
|
| 1434 |
+
def idelete(self, indexer) -> BlockManager:
|
| 1435 |
+
"""
|
| 1436 |
+
Delete selected locations, returning a new BlockManager.
|
| 1437 |
+
"""
|
| 1438 |
+
is_deleted = np.zeros(self.shape[0], dtype=np.bool_)
|
| 1439 |
+
is_deleted[indexer] = True
|
| 1440 |
+
taker = (~is_deleted).nonzero()[0]
|
| 1441 |
+
|
| 1442 |
+
nbs = self._slice_take_blocks_ax0(taker, only_slice=True, ref_inplace_op=True)
|
| 1443 |
+
new_columns = self.items[~is_deleted]
|
| 1444 |
+
axes = [new_columns, self.axes[1]]
|
| 1445 |
+
return type(self)(tuple(nbs), axes, verify_integrity=False)
|
| 1446 |
+
|
| 1447 |
+
# ----------------------------------------------------------------
|
| 1448 |
+
# Block-wise Operation
|
| 1449 |
+
|
| 1450 |
+
def grouped_reduce(self, func: Callable) -> Self:
|
| 1451 |
+
"""
|
| 1452 |
+
Apply grouped reduction function blockwise, returning a new BlockManager.
|
| 1453 |
+
|
| 1454 |
+
Parameters
|
| 1455 |
+
----------
|
| 1456 |
+
func : grouped reduction function
|
| 1457 |
+
|
| 1458 |
+
Returns
|
| 1459 |
+
-------
|
| 1460 |
+
BlockManager
|
| 1461 |
+
"""
|
| 1462 |
+
result_blocks: list[Block] = []
|
| 1463 |
+
|
| 1464 |
+
for blk in self.blocks:
|
| 1465 |
+
if blk.is_object:
|
| 1466 |
+
# split on object-dtype blocks bc some columns may raise
|
| 1467 |
+
# while others do not.
|
| 1468 |
+
for sb in blk._split():
|
| 1469 |
+
applied = sb.apply(func)
|
| 1470 |
+
result_blocks = extend_blocks(applied, result_blocks)
|
| 1471 |
+
else:
|
| 1472 |
+
applied = blk.apply(func)
|
| 1473 |
+
result_blocks = extend_blocks(applied, result_blocks)
|
| 1474 |
+
|
| 1475 |
+
if len(result_blocks) == 0:
|
| 1476 |
+
nrows = 0
|
| 1477 |
+
else:
|
| 1478 |
+
nrows = result_blocks[0].values.shape[-1]
|
| 1479 |
+
index = Index(range(nrows))
|
| 1480 |
+
|
| 1481 |
+
return type(self).from_blocks(result_blocks, [self.axes[0], index])
|
| 1482 |
+
|
| 1483 |
+
def reduce(self, func: Callable) -> Self:
|
| 1484 |
+
"""
|
| 1485 |
+
Apply reduction function blockwise, returning a single-row BlockManager.
|
| 1486 |
+
|
| 1487 |
+
Parameters
|
| 1488 |
+
----------
|
| 1489 |
+
func : reduction function
|
| 1490 |
+
|
| 1491 |
+
Returns
|
| 1492 |
+
-------
|
| 1493 |
+
BlockManager
|
| 1494 |
+
"""
|
| 1495 |
+
# If 2D, we assume that we're operating column-wise
|
| 1496 |
+
assert self.ndim == 2
|
| 1497 |
+
|
| 1498 |
+
res_blocks: list[Block] = []
|
| 1499 |
+
for blk in self.blocks:
|
| 1500 |
+
nbs = blk.reduce(func)
|
| 1501 |
+
res_blocks.extend(nbs)
|
| 1502 |
+
|
| 1503 |
+
index = Index([None]) # placeholder
|
| 1504 |
+
new_mgr = type(self).from_blocks(res_blocks, [self.items, index])
|
| 1505 |
+
return new_mgr
|
| 1506 |
+
|
| 1507 |
+
def operate_blockwise(self, other: BlockManager, array_op) -> BlockManager:
|
| 1508 |
+
"""
|
| 1509 |
+
Apply array_op blockwise with another (aligned) BlockManager.
|
| 1510 |
+
"""
|
| 1511 |
+
return operate_blockwise(self, other, array_op)
|
| 1512 |
+
|
| 1513 |
+
def _equal_values(self: BlockManager, other: BlockManager) -> bool:
|
| 1514 |
+
"""
|
| 1515 |
+
Used in .equals defined in base class. Only check the column values
|
| 1516 |
+
assuming shape and indexes have already been checked.
|
| 1517 |
+
"""
|
| 1518 |
+
return blockwise_all(self, other, array_equals)
|
| 1519 |
+
|
| 1520 |
+
def quantile(
|
| 1521 |
+
self,
|
| 1522 |
+
*,
|
| 1523 |
+
qs: Index, # with dtype float 64
|
| 1524 |
+
interpolation: QuantileInterpolation = "linear",
|
| 1525 |
+
) -> Self:
|
| 1526 |
+
"""
|
| 1527 |
+
Iterate over blocks applying quantile reduction.
|
| 1528 |
+
This routine is intended for reduction type operations and
|
| 1529 |
+
will do inference on the generated blocks.
|
| 1530 |
+
|
| 1531 |
+
Parameters
|
| 1532 |
+
----------
|
| 1533 |
+
interpolation : type of interpolation, default 'linear'
|
| 1534 |
+
qs : list of the quantiles to be computed
|
| 1535 |
+
|
| 1536 |
+
Returns
|
| 1537 |
+
-------
|
| 1538 |
+
BlockManager
|
| 1539 |
+
"""
|
| 1540 |
+
# Series dispatches to DataFrame for quantile, which allows us to
|
| 1541 |
+
# simplify some of the code here and in the blocks
|
| 1542 |
+
assert self.ndim >= 2
|
| 1543 |
+
assert is_list_like(qs) # caller is responsible for this
|
| 1544 |
+
|
| 1545 |
+
new_axes = list(self.axes)
|
| 1546 |
+
new_axes[1] = Index(qs, dtype=np.float64)
|
| 1547 |
+
|
| 1548 |
+
blocks = [
|
| 1549 |
+
blk.quantile(qs=qs, interpolation=interpolation) for blk in self.blocks
|
| 1550 |
+
]
|
| 1551 |
+
|
| 1552 |
+
return type(self)(blocks, new_axes)
|
| 1553 |
+
|
| 1554 |
+
# ----------------------------------------------------------------
|
| 1555 |
+
|
| 1556 |
+
def unstack(self, unstacker, fill_value) -> BlockManager:
|
| 1557 |
+
"""
|
| 1558 |
+
Return a BlockManager with all blocks unstacked.
|
| 1559 |
+
|
| 1560 |
+
Parameters
|
| 1561 |
+
----------
|
| 1562 |
+
unstacker : reshape._Unstacker
|
| 1563 |
+
fill_value : Any
|
| 1564 |
+
fill_value for newly introduced missing values.
|
| 1565 |
+
|
| 1566 |
+
Returns
|
| 1567 |
+
-------
|
| 1568 |
+
unstacked : BlockManager
|
| 1569 |
+
"""
|
| 1570 |
+
new_columns = unstacker.get_new_columns(self.items)
|
| 1571 |
+
new_index = unstacker.new_index
|
| 1572 |
+
|
| 1573 |
+
allow_fill = not unstacker.mask_all
|
| 1574 |
+
if allow_fill:
|
| 1575 |
+
# calculating the full mask once and passing it to Block._unstack is
|
| 1576 |
+
# faster than letting calculating it in each repeated call
|
| 1577 |
+
new_mask2D = (~unstacker.mask).reshape(*unstacker.full_shape)
|
| 1578 |
+
needs_masking = new_mask2D.any(axis=0)
|
| 1579 |
+
else:
|
| 1580 |
+
needs_masking = np.zeros(unstacker.full_shape[1], dtype=bool)
|
| 1581 |
+
|
| 1582 |
+
new_blocks: list[Block] = []
|
| 1583 |
+
columns_mask: list[np.ndarray] = []
|
| 1584 |
+
|
| 1585 |
+
if len(self.items) == 0:
|
| 1586 |
+
factor = 1
|
| 1587 |
+
else:
|
| 1588 |
+
fac = len(new_columns) / len(self.items)
|
| 1589 |
+
assert fac == int(fac)
|
| 1590 |
+
factor = int(fac)
|
| 1591 |
+
|
| 1592 |
+
for blk in self.blocks:
|
| 1593 |
+
mgr_locs = blk.mgr_locs
|
| 1594 |
+
new_placement = mgr_locs.tile_for_unstack(factor)
|
| 1595 |
+
|
| 1596 |
+
blocks, mask = blk._unstack(
|
| 1597 |
+
unstacker,
|
| 1598 |
+
fill_value,
|
| 1599 |
+
new_placement=new_placement,
|
| 1600 |
+
needs_masking=needs_masking,
|
| 1601 |
+
)
|
| 1602 |
+
|
| 1603 |
+
new_blocks.extend(blocks)
|
| 1604 |
+
columns_mask.extend(mask)
|
| 1605 |
+
|
| 1606 |
+
# Block._unstack should ensure this holds,
|
| 1607 |
+
assert mask.sum() == sum(len(nb._mgr_locs) for nb in blocks)
|
| 1608 |
+
# In turn this ensures that in the BlockManager call below
|
| 1609 |
+
# we have len(new_columns) == sum(x.shape[0] for x in new_blocks)
|
| 1610 |
+
# which suffices to allow us to pass verify_inegrity=False
|
| 1611 |
+
|
| 1612 |
+
new_columns = new_columns[columns_mask]
|
| 1613 |
+
|
| 1614 |
+
bm = BlockManager(new_blocks, [new_columns, new_index], verify_integrity=False)
|
| 1615 |
+
return bm
|
| 1616 |
+
|
| 1617 |
+
def to_dict(self) -> dict[str, Self]:
|
| 1618 |
+
"""
|
| 1619 |
+
Return a dict of str(dtype) -> BlockManager
|
| 1620 |
+
|
| 1621 |
+
Returns
|
| 1622 |
+
-------
|
| 1623 |
+
values : a dict of dtype -> BlockManager
|
| 1624 |
+
"""
|
| 1625 |
+
|
| 1626 |
+
bd: dict[str, list[Block]] = {}
|
| 1627 |
+
for b in self.blocks:
|
| 1628 |
+
bd.setdefault(str(b.dtype), []).append(b)
|
| 1629 |
+
|
| 1630 |
+
# TODO(EA2D): the combine will be unnecessary with 2D EAs
|
| 1631 |
+
return {dtype: self._combine(blocks) for dtype, blocks in bd.items()}
|
| 1632 |
+
|
| 1633 |
+
def as_array(
|
| 1634 |
+
self,
|
| 1635 |
+
dtype: np.dtype | None = None,
|
| 1636 |
+
copy: bool = False,
|
| 1637 |
+
na_value: object = lib.no_default,
|
| 1638 |
+
) -> np.ndarray:
|
| 1639 |
+
"""
|
| 1640 |
+
Convert the blockmanager data into an numpy array.
|
| 1641 |
+
|
| 1642 |
+
Parameters
|
| 1643 |
+
----------
|
| 1644 |
+
dtype : np.dtype or None, default None
|
| 1645 |
+
Data type of the return array.
|
| 1646 |
+
copy : bool, default False
|
| 1647 |
+
If True then guarantee that a copy is returned. A value of
|
| 1648 |
+
False does not guarantee that the underlying data is not
|
| 1649 |
+
copied.
|
| 1650 |
+
na_value : object, default lib.no_default
|
| 1651 |
+
Value to be used as the missing value sentinel.
|
| 1652 |
+
|
| 1653 |
+
Returns
|
| 1654 |
+
-------
|
| 1655 |
+
arr : ndarray
|
| 1656 |
+
"""
|
| 1657 |
+
passed_nan = lib.is_float(na_value) and isna(na_value)
|
| 1658 |
+
|
| 1659 |
+
if len(self.blocks) == 0:
|
| 1660 |
+
arr = np.empty(self.shape, dtype=float)
|
| 1661 |
+
return arr.transpose()
|
| 1662 |
+
|
| 1663 |
+
if self.is_single_block:
|
| 1664 |
+
blk = self.blocks[0]
|
| 1665 |
+
|
| 1666 |
+
if na_value is not lib.no_default:
|
| 1667 |
+
# We want to copy when na_value is provided to avoid
|
| 1668 |
+
# mutating the original object
|
| 1669 |
+
if lib.is_np_dtype(blk.dtype, "f") and passed_nan:
|
| 1670 |
+
# We are already numpy-float and na_value=np.nan
|
| 1671 |
+
pass
|
| 1672 |
+
else:
|
| 1673 |
+
copy = True
|
| 1674 |
+
|
| 1675 |
+
if blk.is_extension:
|
| 1676 |
+
# Avoid implicit conversion of extension blocks to object
|
| 1677 |
+
|
| 1678 |
+
# error: Item "ndarray" of "Union[ndarray, ExtensionArray]" has no
|
| 1679 |
+
# attribute "to_numpy"
|
| 1680 |
+
arr = blk.values.to_numpy( # type: ignore[union-attr]
|
| 1681 |
+
dtype=dtype,
|
| 1682 |
+
na_value=na_value,
|
| 1683 |
+
copy=copy,
|
| 1684 |
+
).reshape(blk.shape)
|
| 1685 |
+
elif not copy:
|
| 1686 |
+
arr = np.asarray(blk.values, dtype=dtype)
|
| 1687 |
+
else:
|
| 1688 |
+
arr = np.array(blk.values, dtype=dtype, copy=copy)
|
| 1689 |
+
|
| 1690 |
+
if using_copy_on_write() and not copy:
|
| 1691 |
+
arr = arr.view()
|
| 1692 |
+
arr.flags.writeable = False
|
| 1693 |
+
else:
|
| 1694 |
+
arr = self._interleave(dtype=dtype, na_value=na_value)
|
| 1695 |
+
# The underlying data was copied within _interleave, so no need
|
| 1696 |
+
# to further copy if copy=True or setting na_value
|
| 1697 |
+
|
| 1698 |
+
if na_value is lib.no_default:
|
| 1699 |
+
pass
|
| 1700 |
+
elif arr.dtype.kind == "f" and passed_nan:
|
| 1701 |
+
pass
|
| 1702 |
+
else:
|
| 1703 |
+
arr[isna(arr)] = na_value
|
| 1704 |
+
|
| 1705 |
+
return arr.transpose()
|
| 1706 |
+
|
| 1707 |
+
def _interleave(
|
| 1708 |
+
self,
|
| 1709 |
+
dtype: np.dtype | None = None,
|
| 1710 |
+
na_value: object = lib.no_default,
|
| 1711 |
+
) -> np.ndarray:
|
| 1712 |
+
"""
|
| 1713 |
+
Return ndarray from blocks with specified item order
|
| 1714 |
+
Items must be contained in the blocks
|
| 1715 |
+
"""
|
| 1716 |
+
if not dtype:
|
| 1717 |
+
# Incompatible types in assignment (expression has type
|
| 1718 |
+
# "Optional[Union[dtype[Any], ExtensionDtype]]", variable has
|
| 1719 |
+
# type "Optional[dtype[Any]]")
|
| 1720 |
+
dtype = interleaved_dtype( # type: ignore[assignment]
|
| 1721 |
+
[blk.dtype for blk in self.blocks]
|
| 1722 |
+
)
|
| 1723 |
+
|
| 1724 |
+
# error: Argument 1 to "ensure_np_dtype" has incompatible type
|
| 1725 |
+
# "Optional[dtype[Any]]"; expected "Union[dtype[Any], ExtensionDtype]"
|
| 1726 |
+
dtype = ensure_np_dtype(dtype) # type: ignore[arg-type]
|
| 1727 |
+
result = np.empty(self.shape, dtype=dtype)
|
| 1728 |
+
|
| 1729 |
+
itemmask = np.zeros(self.shape[0])
|
| 1730 |
+
|
| 1731 |
+
if dtype == np.dtype("object") and na_value is lib.no_default:
|
| 1732 |
+
# much more performant than using to_numpy below
|
| 1733 |
+
for blk in self.blocks:
|
| 1734 |
+
rl = blk.mgr_locs
|
| 1735 |
+
arr = blk.get_values(dtype)
|
| 1736 |
+
result[rl.indexer] = arr
|
| 1737 |
+
itemmask[rl.indexer] = 1
|
| 1738 |
+
return result
|
| 1739 |
+
|
| 1740 |
+
for blk in self.blocks:
|
| 1741 |
+
rl = blk.mgr_locs
|
| 1742 |
+
if blk.is_extension:
|
| 1743 |
+
# Avoid implicit conversion of extension blocks to object
|
| 1744 |
+
|
| 1745 |
+
# error: Item "ndarray" of "Union[ndarray, ExtensionArray]" has no
|
| 1746 |
+
# attribute "to_numpy"
|
| 1747 |
+
arr = blk.values.to_numpy( # type: ignore[union-attr]
|
| 1748 |
+
dtype=dtype,
|
| 1749 |
+
na_value=na_value,
|
| 1750 |
+
)
|
| 1751 |
+
else:
|
| 1752 |
+
arr = blk.get_values(dtype)
|
| 1753 |
+
result[rl.indexer] = arr
|
| 1754 |
+
itemmask[rl.indexer] = 1
|
| 1755 |
+
|
| 1756 |
+
if not itemmask.all():
|
| 1757 |
+
raise AssertionError("Some items were not contained in blocks")
|
| 1758 |
+
|
| 1759 |
+
return result
|
| 1760 |
+
|
| 1761 |
+
# ----------------------------------------------------------------
|
| 1762 |
+
# Consolidation
|
| 1763 |
+
|
| 1764 |
+
def is_consolidated(self) -> bool:
|
| 1765 |
+
"""
|
| 1766 |
+
Return True if more than one block with the same dtype
|
| 1767 |
+
"""
|
| 1768 |
+
if not self._known_consolidated:
|
| 1769 |
+
self._consolidate_check()
|
| 1770 |
+
return self._is_consolidated
|
| 1771 |
+
|
| 1772 |
+
def _consolidate_check(self) -> None:
|
| 1773 |
+
if len(self.blocks) == 1:
|
| 1774 |
+
# fastpath
|
| 1775 |
+
self._is_consolidated = True
|
| 1776 |
+
self._known_consolidated = True
|
| 1777 |
+
return
|
| 1778 |
+
dtypes = [blk.dtype for blk in self.blocks if blk._can_consolidate]
|
| 1779 |
+
self._is_consolidated = len(dtypes) == len(set(dtypes))
|
| 1780 |
+
self._known_consolidated = True
|
| 1781 |
+
|
| 1782 |
+
def _consolidate_inplace(self) -> None:
|
| 1783 |
+
# In general, _consolidate_inplace should only be called via
|
| 1784 |
+
# DataFrame._consolidate_inplace, otherwise we will fail to invalidate
|
| 1785 |
+
# the DataFrame's _item_cache. The exception is for newly-created
|
| 1786 |
+
# BlockManager objects not yet attached to a DataFrame.
|
| 1787 |
+
if not self.is_consolidated():
|
| 1788 |
+
self.blocks = _consolidate(self.blocks)
|
| 1789 |
+
self._is_consolidated = True
|
| 1790 |
+
self._known_consolidated = True
|
| 1791 |
+
self._rebuild_blknos_and_blklocs()
|
| 1792 |
+
|
| 1793 |
+
# ----------------------------------------------------------------
|
| 1794 |
+
# Concatenation
|
| 1795 |
+
|
| 1796 |
+
@classmethod
|
| 1797 |
+
def concat_horizontal(cls, mgrs: list[Self], axes: list[Index]) -> Self:
|
| 1798 |
+
"""
|
| 1799 |
+
Concatenate uniformly-indexed BlockManagers horizontally.
|
| 1800 |
+
"""
|
| 1801 |
+
offset = 0
|
| 1802 |
+
blocks: list[Block] = []
|
| 1803 |
+
for mgr in mgrs:
|
| 1804 |
+
for blk in mgr.blocks:
|
| 1805 |
+
# We need to do getitem_block here otherwise we would be altering
|
| 1806 |
+
# blk.mgr_locs in place, which would render it invalid. This is only
|
| 1807 |
+
# relevant in the copy=False case.
|
| 1808 |
+
nb = blk.slice_block_columns(slice(None))
|
| 1809 |
+
nb._mgr_locs = nb._mgr_locs.add(offset)
|
| 1810 |
+
blocks.append(nb)
|
| 1811 |
+
|
| 1812 |
+
offset += len(mgr.items)
|
| 1813 |
+
|
| 1814 |
+
new_mgr = cls(tuple(blocks), axes)
|
| 1815 |
+
return new_mgr
|
| 1816 |
+
|
| 1817 |
+
@classmethod
|
| 1818 |
+
def concat_vertical(cls, mgrs: list[Self], axes: list[Index]) -> Self:
|
| 1819 |
+
"""
|
| 1820 |
+
Concatenate uniformly-indexed BlockManagers vertically.
|
| 1821 |
+
"""
|
| 1822 |
+
raise NotImplementedError("This logic lives (for now) in internals.concat")
|
| 1823 |
+
|
| 1824 |
+
|
| 1825 |
+
class SingleBlockManager(BaseBlockManager, SingleDataManager):
|
| 1826 |
+
"""manage a single block with"""
|
| 1827 |
+
|
| 1828 |
+
@property
|
| 1829 |
+
def ndim(self) -> Literal[1]:
|
| 1830 |
+
return 1
|
| 1831 |
+
|
| 1832 |
+
_is_consolidated = True
|
| 1833 |
+
_known_consolidated = True
|
| 1834 |
+
__slots__ = ()
|
| 1835 |
+
is_single_block = True
|
| 1836 |
+
|
| 1837 |
+
def __init__(
|
| 1838 |
+
self,
|
| 1839 |
+
block: Block,
|
| 1840 |
+
axis: Index,
|
| 1841 |
+
verify_integrity: bool = False,
|
| 1842 |
+
) -> None:
|
| 1843 |
+
# Assertions disabled for performance
|
| 1844 |
+
# assert isinstance(block, Block), type(block)
|
| 1845 |
+
# assert isinstance(axis, Index), type(axis)
|
| 1846 |
+
|
| 1847 |
+
self.axes = [axis]
|
| 1848 |
+
self.blocks = (block,)
|
| 1849 |
+
|
| 1850 |
+
@classmethod
|
| 1851 |
+
def from_blocks(
|
| 1852 |
+
cls,
|
| 1853 |
+
blocks: list[Block],
|
| 1854 |
+
axes: list[Index],
|
| 1855 |
+
) -> Self:
|
| 1856 |
+
"""
|
| 1857 |
+
Constructor for BlockManager and SingleBlockManager with same signature.
|
| 1858 |
+
"""
|
| 1859 |
+
assert len(blocks) == 1
|
| 1860 |
+
assert len(axes) == 1
|
| 1861 |
+
return cls(blocks[0], axes[0], verify_integrity=False)
|
| 1862 |
+
|
| 1863 |
+
@classmethod
|
| 1864 |
+
def from_array(
|
| 1865 |
+
cls, array: ArrayLike, index: Index, refs: BlockValuesRefs | None = None
|
| 1866 |
+
) -> SingleBlockManager:
|
| 1867 |
+
"""
|
| 1868 |
+
Constructor for if we have an array that is not yet a Block.
|
| 1869 |
+
"""
|
| 1870 |
+
array = maybe_coerce_values(array)
|
| 1871 |
+
bp = BlockPlacement(slice(0, len(index)))
|
| 1872 |
+
block = new_block(array, placement=bp, ndim=1, refs=refs)
|
| 1873 |
+
return cls(block, index)
|
| 1874 |
+
|
| 1875 |
+
def to_2d_mgr(self, columns: Index) -> BlockManager:
|
| 1876 |
+
"""
|
| 1877 |
+
Manager analogue of Series.to_frame
|
| 1878 |
+
"""
|
| 1879 |
+
blk = self.blocks[0]
|
| 1880 |
+
arr = ensure_block_shape(blk.values, ndim=2)
|
| 1881 |
+
bp = BlockPlacement(0)
|
| 1882 |
+
new_blk = type(blk)(arr, placement=bp, ndim=2, refs=blk.refs)
|
| 1883 |
+
axes = [columns, self.axes[0]]
|
| 1884 |
+
return BlockManager([new_blk], axes=axes, verify_integrity=False)
|
| 1885 |
+
|
| 1886 |
+
def _has_no_reference(self, i: int = 0) -> bool:
|
| 1887 |
+
"""
|
| 1888 |
+
Check for column `i` if it has references.
|
| 1889 |
+
(whether it references another array or is itself being referenced)
|
| 1890 |
+
Returns True if the column has no references.
|
| 1891 |
+
"""
|
| 1892 |
+
return not self.blocks[0].refs.has_reference()
|
| 1893 |
+
|
| 1894 |
+
def __getstate__(self):
|
| 1895 |
+
block_values = [b.values for b in self.blocks]
|
| 1896 |
+
block_items = [self.items[b.mgr_locs.indexer] for b in self.blocks]
|
| 1897 |
+
axes_array = list(self.axes)
|
| 1898 |
+
|
| 1899 |
+
extra_state = {
|
| 1900 |
+
"0.14.1": {
|
| 1901 |
+
"axes": axes_array,
|
| 1902 |
+
"blocks": [
|
| 1903 |
+
{"values": b.values, "mgr_locs": b.mgr_locs.indexer}
|
| 1904 |
+
for b in self.blocks
|
| 1905 |
+
],
|
| 1906 |
+
}
|
| 1907 |
+
}
|
| 1908 |
+
|
| 1909 |
+
# First three elements of the state are to maintain forward
|
| 1910 |
+
# compatibility with 0.13.1.
|
| 1911 |
+
return axes_array, block_values, block_items, extra_state
|
| 1912 |
+
|
| 1913 |
+
def __setstate__(self, state) -> None:
|
| 1914 |
+
def unpickle_block(values, mgr_locs, ndim: int) -> Block:
|
| 1915 |
+
# TODO(EA2D): ndim would be unnecessary with 2D EAs
|
| 1916 |
+
# older pickles may store e.g. DatetimeIndex instead of DatetimeArray
|
| 1917 |
+
values = extract_array(values, extract_numpy=True)
|
| 1918 |
+
if not isinstance(mgr_locs, BlockPlacement):
|
| 1919 |
+
mgr_locs = BlockPlacement(mgr_locs)
|
| 1920 |
+
|
| 1921 |
+
values = maybe_coerce_values(values)
|
| 1922 |
+
return new_block(values, placement=mgr_locs, ndim=ndim)
|
| 1923 |
+
|
| 1924 |
+
if isinstance(state, tuple) and len(state) >= 4 and "0.14.1" in state[3]:
|
| 1925 |
+
state = state[3]["0.14.1"]
|
| 1926 |
+
self.axes = [ensure_index(ax) for ax in state["axes"]]
|
| 1927 |
+
ndim = len(self.axes)
|
| 1928 |
+
self.blocks = tuple(
|
| 1929 |
+
unpickle_block(b["values"], b["mgr_locs"], ndim=ndim)
|
| 1930 |
+
for b in state["blocks"]
|
| 1931 |
+
)
|
| 1932 |
+
else:
|
| 1933 |
+
raise NotImplementedError("pre-0.14.1 pickles are no longer supported")
|
| 1934 |
+
|
| 1935 |
+
self._post_setstate()
|
| 1936 |
+
|
| 1937 |
+
def _post_setstate(self) -> None:
|
| 1938 |
+
pass
|
| 1939 |
+
|
| 1940 |
+
@cache_readonly
|
| 1941 |
+
def _block(self) -> Block:
|
| 1942 |
+
return self.blocks[0]
|
| 1943 |
+
|
| 1944 |
+
@property
|
| 1945 |
+
def _blknos(self):
|
| 1946 |
+
"""compat with BlockManager"""
|
| 1947 |
+
return None
|
| 1948 |
+
|
| 1949 |
+
@property
|
| 1950 |
+
def _blklocs(self):
|
| 1951 |
+
"""compat with BlockManager"""
|
| 1952 |
+
return None
|
| 1953 |
+
|
| 1954 |
+
def get_rows_with_mask(self, indexer: npt.NDArray[np.bool_]) -> Self:
|
| 1955 |
+
# similar to get_slice, but not restricted to slice indexer
|
| 1956 |
+
blk = self._block
|
| 1957 |
+
if using_copy_on_write() and len(indexer) > 0 and indexer.all():
|
| 1958 |
+
return type(self)(blk.copy(deep=False), self.index)
|
| 1959 |
+
array = blk.values[indexer]
|
| 1960 |
+
|
| 1961 |
+
if isinstance(indexer, np.ndarray) and indexer.dtype.kind == "b":
|
| 1962 |
+
# boolean indexing always gives a copy with numpy
|
| 1963 |
+
refs = None
|
| 1964 |
+
else:
|
| 1965 |
+
# TODO(CoW) in theory only need to track reference if new_array is a view
|
| 1966 |
+
refs = blk.refs
|
| 1967 |
+
|
| 1968 |
+
bp = BlockPlacement(slice(0, len(array)))
|
| 1969 |
+
block = type(blk)(array, placement=bp, ndim=1, refs=refs)
|
| 1970 |
+
|
| 1971 |
+
new_idx = self.index[indexer]
|
| 1972 |
+
return type(self)(block, new_idx)
|
| 1973 |
+
|
| 1974 |
+
def get_slice(self, slobj: slice, axis: AxisInt = 0) -> SingleBlockManager:
|
| 1975 |
+
# Assertion disabled for performance
|
| 1976 |
+
# assert isinstance(slobj, slice), type(slobj)
|
| 1977 |
+
if axis >= self.ndim:
|
| 1978 |
+
raise IndexError("Requested axis not found in manager")
|
| 1979 |
+
|
| 1980 |
+
blk = self._block
|
| 1981 |
+
array = blk.values[slobj]
|
| 1982 |
+
bp = BlockPlacement(slice(0, len(array)))
|
| 1983 |
+
# TODO this method is only used in groupby SeriesSplitter at the moment,
|
| 1984 |
+
# so passing refs is not yet covered by the tests
|
| 1985 |
+
block = type(blk)(array, placement=bp, ndim=1, refs=blk.refs)
|
| 1986 |
+
new_index = self.index._getitem_slice(slobj)
|
| 1987 |
+
return type(self)(block, new_index)
|
| 1988 |
+
|
| 1989 |
+
@property
|
| 1990 |
+
def index(self) -> Index:
|
| 1991 |
+
return self.axes[0]
|
| 1992 |
+
|
| 1993 |
+
@property
|
| 1994 |
+
def dtype(self) -> DtypeObj:
|
| 1995 |
+
return self._block.dtype
|
| 1996 |
+
|
| 1997 |
+
def get_dtypes(self) -> npt.NDArray[np.object_]:
|
| 1998 |
+
return np.array([self._block.dtype], dtype=object)
|
| 1999 |
+
|
| 2000 |
+
def external_values(self):
|
| 2001 |
+
"""The array that Series.values returns"""
|
| 2002 |
+
return self._block.external_values()
|
| 2003 |
+
|
| 2004 |
+
def internal_values(self):
|
| 2005 |
+
"""The array that Series._values returns"""
|
| 2006 |
+
return self._block.values
|
| 2007 |
+
|
| 2008 |
+
def array_values(self) -> ExtensionArray:
|
| 2009 |
+
"""The array that Series.array returns"""
|
| 2010 |
+
return self._block.array_values
|
| 2011 |
+
|
| 2012 |
+
def get_numeric_data(self) -> Self:
|
| 2013 |
+
if self._block.is_numeric:
|
| 2014 |
+
return self.copy(deep=False)
|
| 2015 |
+
return self.make_empty()
|
| 2016 |
+
|
| 2017 |
+
@property
|
| 2018 |
+
def _can_hold_na(self) -> bool:
|
| 2019 |
+
return self._block._can_hold_na
|
| 2020 |
+
|
| 2021 |
+
def setitem_inplace(self, indexer, value, warn: bool = True) -> None:
|
| 2022 |
+
"""
|
| 2023 |
+
Set values with indexer.
|
| 2024 |
+
|
| 2025 |
+
For Single[Block/Array]Manager, this backs s[indexer] = value
|
| 2026 |
+
|
| 2027 |
+
This is an inplace version of `setitem()`, mutating the manager/values
|
| 2028 |
+
in place, not returning a new Manager (and Block), and thus never changing
|
| 2029 |
+
the dtype.
|
| 2030 |
+
"""
|
| 2031 |
+
using_cow = using_copy_on_write()
|
| 2032 |
+
warn_cow = warn_copy_on_write()
|
| 2033 |
+
if (using_cow or warn_cow) and not self._has_no_reference(0):
|
| 2034 |
+
if using_cow:
|
| 2035 |
+
self.blocks = (self._block.copy(),)
|
| 2036 |
+
self._cache.clear()
|
| 2037 |
+
elif warn_cow and warn:
|
| 2038 |
+
warnings.warn(
|
| 2039 |
+
COW_WARNING_SETITEM_MSG,
|
| 2040 |
+
FutureWarning,
|
| 2041 |
+
stacklevel=find_stack_level(),
|
| 2042 |
+
)
|
| 2043 |
+
|
| 2044 |
+
super().setitem_inplace(indexer, value)
|
| 2045 |
+
|
| 2046 |
+
def idelete(self, indexer) -> SingleBlockManager:
|
| 2047 |
+
"""
|
| 2048 |
+
Delete single location from SingleBlockManager.
|
| 2049 |
+
|
| 2050 |
+
Ensures that self.blocks doesn't become empty.
|
| 2051 |
+
"""
|
| 2052 |
+
nb = self._block.delete(indexer)[0]
|
| 2053 |
+
self.blocks = (nb,)
|
| 2054 |
+
self.axes[0] = self.axes[0].delete(indexer)
|
| 2055 |
+
self._cache.clear()
|
| 2056 |
+
return self
|
| 2057 |
+
|
| 2058 |
+
def fast_xs(self, loc):
|
| 2059 |
+
"""
|
| 2060 |
+
fast path for getting a cross-section
|
| 2061 |
+
return a view of the data
|
| 2062 |
+
"""
|
| 2063 |
+
raise NotImplementedError("Use series._values[loc] instead")
|
| 2064 |
+
|
| 2065 |
+
def set_values(self, values: ArrayLike) -> None:
|
| 2066 |
+
"""
|
| 2067 |
+
Set the values of the single block in place.
|
| 2068 |
+
|
| 2069 |
+
Use at your own risk! This does not check if the passed values are
|
| 2070 |
+
valid for the current Block/SingleBlockManager (length, dtype, etc),
|
| 2071 |
+
and this does not properly keep track of references.
|
| 2072 |
+
"""
|
| 2073 |
+
# NOTE(CoW) Currently this is only used for FrameColumnApply.series_generator
|
| 2074 |
+
# which handles CoW by setting the refs manually if necessary
|
| 2075 |
+
self.blocks[0].values = values
|
| 2076 |
+
self.blocks[0]._mgr_locs = BlockPlacement(slice(len(values)))
|
| 2077 |
+
|
| 2078 |
+
def _equal_values(self, other: Self) -> bool:
|
| 2079 |
+
"""
|
| 2080 |
+
Used in .equals defined in base class. Only check the column values
|
| 2081 |
+
assuming shape and indexes have already been checked.
|
| 2082 |
+
"""
|
| 2083 |
+
# For SingleBlockManager (i.e.Series)
|
| 2084 |
+
if other.ndim != 1:
|
| 2085 |
+
return False
|
| 2086 |
+
left = self.blocks[0].values
|
| 2087 |
+
right = other.blocks[0].values
|
| 2088 |
+
return array_equals(left, right)
|
| 2089 |
+
|
| 2090 |
+
|
| 2091 |
+
# --------------------------------------------------------------------
|
| 2092 |
+
# Constructor Helpers
|
| 2093 |
+
|
| 2094 |
+
|
| 2095 |
+
def create_block_manager_from_blocks(
|
| 2096 |
+
blocks: list[Block],
|
| 2097 |
+
axes: list[Index],
|
| 2098 |
+
consolidate: bool = True,
|
| 2099 |
+
verify_integrity: bool = True,
|
| 2100 |
+
) -> BlockManager:
|
| 2101 |
+
# If verify_integrity=False, then caller is responsible for checking
|
| 2102 |
+
# all(x.shape[-1] == len(axes[1]) for x in blocks)
|
| 2103 |
+
# sum(x.shape[0] for x in blocks) == len(axes[0])
|
| 2104 |
+
# set(x for blk in blocks for x in blk.mgr_locs) == set(range(len(axes[0])))
|
| 2105 |
+
# all(blk.ndim == 2 for blk in blocks)
|
| 2106 |
+
# This allows us to safely pass verify_integrity=False
|
| 2107 |
+
|
| 2108 |
+
try:
|
| 2109 |
+
mgr = BlockManager(blocks, axes, verify_integrity=verify_integrity)
|
| 2110 |
+
|
| 2111 |
+
except ValueError as err:
|
| 2112 |
+
arrays = [blk.values for blk in blocks]
|
| 2113 |
+
tot_items = sum(arr.shape[0] for arr in arrays)
|
| 2114 |
+
raise_construction_error(tot_items, arrays[0].shape[1:], axes, err)
|
| 2115 |
+
|
| 2116 |
+
if consolidate:
|
| 2117 |
+
mgr._consolidate_inplace()
|
| 2118 |
+
return mgr
|
| 2119 |
+
|
| 2120 |
+
|
| 2121 |
+
def create_block_manager_from_column_arrays(
|
| 2122 |
+
arrays: list[ArrayLike],
|
| 2123 |
+
axes: list[Index],
|
| 2124 |
+
consolidate: bool,
|
| 2125 |
+
refs: list,
|
| 2126 |
+
) -> BlockManager:
|
| 2127 |
+
# Assertions disabled for performance (caller is responsible for verifying)
|
| 2128 |
+
# assert isinstance(axes, list)
|
| 2129 |
+
# assert all(isinstance(x, Index) for x in axes)
|
| 2130 |
+
# assert all(isinstance(x, (np.ndarray, ExtensionArray)) for x in arrays)
|
| 2131 |
+
# assert all(type(x) is not NumpyExtensionArray for x in arrays)
|
| 2132 |
+
# assert all(x.ndim == 1 for x in arrays)
|
| 2133 |
+
# assert all(len(x) == len(axes[1]) for x in arrays)
|
| 2134 |
+
# assert len(arrays) == len(axes[0])
|
| 2135 |
+
# These last three are sufficient to allow us to safely pass
|
| 2136 |
+
# verify_integrity=False below.
|
| 2137 |
+
|
| 2138 |
+
try:
|
| 2139 |
+
blocks = _form_blocks(arrays, consolidate, refs)
|
| 2140 |
+
mgr = BlockManager(blocks, axes, verify_integrity=False)
|
| 2141 |
+
except ValueError as e:
|
| 2142 |
+
raise_construction_error(len(arrays), arrays[0].shape, axes, e)
|
| 2143 |
+
if consolidate:
|
| 2144 |
+
mgr._consolidate_inplace()
|
| 2145 |
+
return mgr
|
| 2146 |
+
|
| 2147 |
+
|
| 2148 |
+
def raise_construction_error(
|
| 2149 |
+
tot_items: int,
|
| 2150 |
+
block_shape: Shape,
|
| 2151 |
+
axes: list[Index],
|
| 2152 |
+
e: ValueError | None = None,
|
| 2153 |
+
):
|
| 2154 |
+
"""raise a helpful message about our construction"""
|
| 2155 |
+
passed = tuple(map(int, [tot_items] + list(block_shape)))
|
| 2156 |
+
# Correcting the user facing error message during dataframe construction
|
| 2157 |
+
if len(passed) <= 2:
|
| 2158 |
+
passed = passed[::-1]
|
| 2159 |
+
|
| 2160 |
+
implied = tuple(len(ax) for ax in axes)
|
| 2161 |
+
# Correcting the user facing error message during dataframe construction
|
| 2162 |
+
if len(implied) <= 2:
|
| 2163 |
+
implied = implied[::-1]
|
| 2164 |
+
|
| 2165 |
+
# We return the exception object instead of raising it so that we
|
| 2166 |
+
# can raise it in the caller; mypy plays better with that
|
| 2167 |
+
if passed == implied and e is not None:
|
| 2168 |
+
raise e
|
| 2169 |
+
if block_shape[0] == 0:
|
| 2170 |
+
raise ValueError("Empty data passed with indices specified.")
|
| 2171 |
+
raise ValueError(f"Shape of passed values is {passed}, indices imply {implied}")
|
| 2172 |
+
|
| 2173 |
+
|
| 2174 |
+
# -----------------------------------------------------------------------
|
| 2175 |
+
|
| 2176 |
+
|
| 2177 |
+
def _grouping_func(tup: tuple[int, ArrayLike]) -> tuple[int, DtypeObj]:
|
| 2178 |
+
dtype = tup[1].dtype
|
| 2179 |
+
|
| 2180 |
+
if is_1d_only_ea_dtype(dtype):
|
| 2181 |
+
# We know these won't be consolidated, so don't need to group these.
|
| 2182 |
+
# This avoids expensive comparisons of CategoricalDtype objects
|
| 2183 |
+
sep = id(dtype)
|
| 2184 |
+
else:
|
| 2185 |
+
sep = 0
|
| 2186 |
+
|
| 2187 |
+
return sep, dtype
|
| 2188 |
+
|
| 2189 |
+
|
| 2190 |
+
def _form_blocks(arrays: list[ArrayLike], consolidate: bool, refs: list) -> list[Block]:
|
| 2191 |
+
tuples = list(enumerate(arrays))
|
| 2192 |
+
|
| 2193 |
+
if not consolidate:
|
| 2194 |
+
return _tuples_to_blocks_no_consolidate(tuples, refs)
|
| 2195 |
+
|
| 2196 |
+
# when consolidating, we can ignore refs (either stacking always copies,
|
| 2197 |
+
# or the EA is already copied in the calling dict_to_mgr)
|
| 2198 |
+
|
| 2199 |
+
# group by dtype
|
| 2200 |
+
grouper = itertools.groupby(tuples, _grouping_func)
|
| 2201 |
+
|
| 2202 |
+
nbs: list[Block] = []
|
| 2203 |
+
for (_, dtype), tup_block in grouper:
|
| 2204 |
+
block_type = get_block_type(dtype)
|
| 2205 |
+
|
| 2206 |
+
if isinstance(dtype, np.dtype):
|
| 2207 |
+
is_dtlike = dtype.kind in "mM"
|
| 2208 |
+
|
| 2209 |
+
if issubclass(dtype.type, (str, bytes)):
|
| 2210 |
+
dtype = np.dtype(object)
|
| 2211 |
+
|
| 2212 |
+
values, placement = _stack_arrays(list(tup_block), dtype)
|
| 2213 |
+
if is_dtlike:
|
| 2214 |
+
values = ensure_wrapped_if_datetimelike(values)
|
| 2215 |
+
blk = block_type(values, placement=BlockPlacement(placement), ndim=2)
|
| 2216 |
+
nbs.append(blk)
|
| 2217 |
+
|
| 2218 |
+
elif is_1d_only_ea_dtype(dtype):
|
| 2219 |
+
dtype_blocks = [
|
| 2220 |
+
block_type(x[1], placement=BlockPlacement(x[0]), ndim=2)
|
| 2221 |
+
for x in tup_block
|
| 2222 |
+
]
|
| 2223 |
+
nbs.extend(dtype_blocks)
|
| 2224 |
+
|
| 2225 |
+
else:
|
| 2226 |
+
dtype_blocks = [
|
| 2227 |
+
block_type(
|
| 2228 |
+
ensure_block_shape(x[1], 2), placement=BlockPlacement(x[0]), ndim=2
|
| 2229 |
+
)
|
| 2230 |
+
for x in tup_block
|
| 2231 |
+
]
|
| 2232 |
+
nbs.extend(dtype_blocks)
|
| 2233 |
+
return nbs
|
| 2234 |
+
|
| 2235 |
+
|
| 2236 |
+
def _tuples_to_blocks_no_consolidate(tuples, refs) -> list[Block]:
|
| 2237 |
+
# tuples produced within _form_blocks are of the form (placement, array)
|
| 2238 |
+
return [
|
| 2239 |
+
new_block_2d(
|
| 2240 |
+
ensure_block_shape(arr, ndim=2), placement=BlockPlacement(i), refs=ref
|
| 2241 |
+
)
|
| 2242 |
+
for ((i, arr), ref) in zip(tuples, refs)
|
| 2243 |
+
]
|
| 2244 |
+
|
| 2245 |
+
|
| 2246 |
+
def _stack_arrays(tuples, dtype: np.dtype):
|
| 2247 |
+
placement, arrays = zip(*tuples)
|
| 2248 |
+
|
| 2249 |
+
first = arrays[0]
|
| 2250 |
+
shape = (len(arrays),) + first.shape
|
| 2251 |
+
|
| 2252 |
+
stacked = np.empty(shape, dtype=dtype)
|
| 2253 |
+
for i, arr in enumerate(arrays):
|
| 2254 |
+
stacked[i] = arr
|
| 2255 |
+
|
| 2256 |
+
return stacked, placement
|
| 2257 |
+
|
| 2258 |
+
|
| 2259 |
+
def _consolidate(blocks: tuple[Block, ...]) -> tuple[Block, ...]:
|
| 2260 |
+
"""
|
| 2261 |
+
Merge blocks having same dtype, exclude non-consolidating blocks
|
| 2262 |
+
"""
|
| 2263 |
+
# sort by _can_consolidate, dtype
|
| 2264 |
+
gkey = lambda x: x._consolidate_key
|
| 2265 |
+
grouper = itertools.groupby(sorted(blocks, key=gkey), gkey)
|
| 2266 |
+
|
| 2267 |
+
new_blocks: list[Block] = []
|
| 2268 |
+
for (_can_consolidate, dtype), group_blocks in grouper:
|
| 2269 |
+
merged_blocks, _ = _merge_blocks(
|
| 2270 |
+
list(group_blocks), dtype=dtype, can_consolidate=_can_consolidate
|
| 2271 |
+
)
|
| 2272 |
+
new_blocks = extend_blocks(merged_blocks, new_blocks)
|
| 2273 |
+
return tuple(new_blocks)
|
| 2274 |
+
|
| 2275 |
+
|
| 2276 |
+
def _merge_blocks(
|
| 2277 |
+
blocks: list[Block], dtype: DtypeObj, can_consolidate: bool
|
| 2278 |
+
) -> tuple[list[Block], bool]:
|
| 2279 |
+
if len(blocks) == 1:
|
| 2280 |
+
return blocks, False
|
| 2281 |
+
|
| 2282 |
+
if can_consolidate:
|
| 2283 |
+
# TODO: optimization potential in case all mgrs contain slices and
|
| 2284 |
+
# combination of those slices is a slice, too.
|
| 2285 |
+
new_mgr_locs = np.concatenate([b.mgr_locs.as_array for b in blocks])
|
| 2286 |
+
|
| 2287 |
+
new_values: ArrayLike
|
| 2288 |
+
|
| 2289 |
+
if isinstance(blocks[0].dtype, np.dtype):
|
| 2290 |
+
# error: List comprehension has incompatible type List[Union[ndarray,
|
| 2291 |
+
# ExtensionArray]]; expected List[Union[complex, generic,
|
| 2292 |
+
# Sequence[Union[int, float, complex, str, bytes, generic]],
|
| 2293 |
+
# Sequence[Sequence[Any]], SupportsArray]]
|
| 2294 |
+
new_values = np.vstack([b.values for b in blocks]) # type: ignore[misc]
|
| 2295 |
+
else:
|
| 2296 |
+
bvals = [blk.values for blk in blocks]
|
| 2297 |
+
bvals2 = cast(Sequence[NDArrayBackedExtensionArray], bvals)
|
| 2298 |
+
new_values = bvals2[0]._concat_same_type(bvals2, axis=0)
|
| 2299 |
+
|
| 2300 |
+
argsort = np.argsort(new_mgr_locs)
|
| 2301 |
+
new_values = new_values[argsort]
|
| 2302 |
+
new_mgr_locs = new_mgr_locs[argsort]
|
| 2303 |
+
|
| 2304 |
+
bp = BlockPlacement(new_mgr_locs)
|
| 2305 |
+
return [new_block_2d(new_values, placement=bp)], True
|
| 2306 |
+
|
| 2307 |
+
# can't consolidate --> no merge
|
| 2308 |
+
return blocks, False
|
| 2309 |
+
|
| 2310 |
+
|
| 2311 |
+
def _fast_count_smallints(arr: npt.NDArray[np.intp]):
|
| 2312 |
+
"""Faster version of set(arr) for sequences of small numbers."""
|
| 2313 |
+
counts = np.bincount(arr)
|
| 2314 |
+
nz = counts.nonzero()[0]
|
| 2315 |
+
# Note: list(zip(...) outperforms list(np.c_[nz, counts[nz]]) here,
|
| 2316 |
+
# in one benchmark by a factor of 11
|
| 2317 |
+
return zip(nz, counts[nz])
|
| 2318 |
+
|
| 2319 |
+
|
| 2320 |
+
def _preprocess_slice_or_indexer(
|
| 2321 |
+
slice_or_indexer: slice | np.ndarray, length: int, allow_fill: bool
|
| 2322 |
+
):
|
| 2323 |
+
if isinstance(slice_or_indexer, slice):
|
| 2324 |
+
return (
|
| 2325 |
+
"slice",
|
| 2326 |
+
slice_or_indexer,
|
| 2327 |
+
libinternals.slice_len(slice_or_indexer, length),
|
| 2328 |
+
)
|
| 2329 |
+
else:
|
| 2330 |
+
if (
|
| 2331 |
+
not isinstance(slice_or_indexer, np.ndarray)
|
| 2332 |
+
or slice_or_indexer.dtype.kind != "i"
|
| 2333 |
+
):
|
| 2334 |
+
dtype = getattr(slice_or_indexer, "dtype", None)
|
| 2335 |
+
raise TypeError(type(slice_or_indexer), dtype)
|
| 2336 |
+
|
| 2337 |
+
indexer = ensure_platform_int(slice_or_indexer)
|
| 2338 |
+
if not allow_fill:
|
| 2339 |
+
indexer = maybe_convert_indices(indexer, length)
|
| 2340 |
+
return "fancy", indexer, len(indexer)
|
| 2341 |
+
|
| 2342 |
+
|
| 2343 |
+
def make_na_array(dtype: DtypeObj, shape: Shape, fill_value) -> ArrayLike:
|
| 2344 |
+
if isinstance(dtype, DatetimeTZDtype):
|
| 2345 |
+
# NB: exclude e.g. pyarrow[dt64tz] dtypes
|
| 2346 |
+
ts = Timestamp(fill_value).as_unit(dtype.unit)
|
| 2347 |
+
i8values = np.full(shape, ts._value)
|
| 2348 |
+
dt64values = i8values.view(f"M8[{dtype.unit}]")
|
| 2349 |
+
return DatetimeArray._simple_new(dt64values, dtype=dtype)
|
| 2350 |
+
|
| 2351 |
+
elif is_1d_only_ea_dtype(dtype):
|
| 2352 |
+
dtype = cast(ExtensionDtype, dtype)
|
| 2353 |
+
cls = dtype.construct_array_type()
|
| 2354 |
+
|
| 2355 |
+
missing_arr = cls._from_sequence([], dtype=dtype)
|
| 2356 |
+
ncols, nrows = shape
|
| 2357 |
+
assert ncols == 1, ncols
|
| 2358 |
+
empty_arr = -1 * np.ones((nrows,), dtype=np.intp)
|
| 2359 |
+
return missing_arr.take(empty_arr, allow_fill=True, fill_value=fill_value)
|
| 2360 |
+
elif isinstance(dtype, ExtensionDtype):
|
| 2361 |
+
# TODO: no tests get here, a handful would if we disabled
|
| 2362 |
+
# the dt64tz special-case above (which is faster)
|
| 2363 |
+
cls = dtype.construct_array_type()
|
| 2364 |
+
missing_arr = cls._empty(shape=shape, dtype=dtype)
|
| 2365 |
+
missing_arr[:] = fill_value
|
| 2366 |
+
return missing_arr
|
| 2367 |
+
else:
|
| 2368 |
+
# NB: we should never get here with dtype integer or bool;
|
| 2369 |
+
# if we did, the missing_arr.fill would cast to gibberish
|
| 2370 |
+
missing_arr = np.empty(shape, dtype=dtype)
|
| 2371 |
+
missing_arr.fill(fill_value)
|
| 2372 |
+
|
| 2373 |
+
if dtype.kind in "mM":
|
| 2374 |
+
missing_arr = ensure_wrapped_if_datetimelike(missing_arr)
|
| 2375 |
+
return missing_arr
|
videollama2/lib/python3.10/site-packages/pandas/core/internals/ops.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
NamedTuple,
|
| 6 |
+
)
|
| 7 |
+
|
| 8 |
+
from pandas.core.dtypes.common import is_1d_only_ea_dtype
|
| 9 |
+
|
| 10 |
+
if TYPE_CHECKING:
|
| 11 |
+
from collections.abc import Iterator
|
| 12 |
+
|
| 13 |
+
from pandas._libs.internals import BlockPlacement
|
| 14 |
+
from pandas._typing import ArrayLike
|
| 15 |
+
|
| 16 |
+
from pandas.core.internals.blocks import Block
|
| 17 |
+
from pandas.core.internals.managers import BlockManager
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class BlockPairInfo(NamedTuple):
|
| 21 |
+
lvals: ArrayLike
|
| 22 |
+
rvals: ArrayLike
|
| 23 |
+
locs: BlockPlacement
|
| 24 |
+
left_ea: bool
|
| 25 |
+
right_ea: bool
|
| 26 |
+
rblk: Block
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _iter_block_pairs(
|
| 30 |
+
left: BlockManager, right: BlockManager
|
| 31 |
+
) -> Iterator[BlockPairInfo]:
|
| 32 |
+
# At this point we have already checked the parent DataFrames for
|
| 33 |
+
# assert rframe._indexed_same(lframe)
|
| 34 |
+
|
| 35 |
+
for blk in left.blocks:
|
| 36 |
+
locs = blk.mgr_locs
|
| 37 |
+
blk_vals = blk.values
|
| 38 |
+
|
| 39 |
+
left_ea = blk_vals.ndim == 1
|
| 40 |
+
|
| 41 |
+
rblks = right._slice_take_blocks_ax0(locs.indexer, only_slice=True)
|
| 42 |
+
|
| 43 |
+
# Assertions are disabled for performance, but should hold:
|
| 44 |
+
# if left_ea:
|
| 45 |
+
# assert len(locs) == 1, locs
|
| 46 |
+
# assert len(rblks) == 1, rblks
|
| 47 |
+
# assert rblks[0].shape[0] == 1, rblks[0].shape
|
| 48 |
+
|
| 49 |
+
for rblk in rblks:
|
| 50 |
+
right_ea = rblk.values.ndim == 1
|
| 51 |
+
|
| 52 |
+
lvals, rvals = _get_same_shape_values(blk, rblk, left_ea, right_ea)
|
| 53 |
+
info = BlockPairInfo(lvals, rvals, locs, left_ea, right_ea, rblk)
|
| 54 |
+
yield info
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def operate_blockwise(
|
| 58 |
+
left: BlockManager, right: BlockManager, array_op
|
| 59 |
+
) -> BlockManager:
|
| 60 |
+
# At this point we have already checked the parent DataFrames for
|
| 61 |
+
# assert rframe._indexed_same(lframe)
|
| 62 |
+
|
| 63 |
+
res_blks: list[Block] = []
|
| 64 |
+
for lvals, rvals, locs, left_ea, right_ea, rblk in _iter_block_pairs(left, right):
|
| 65 |
+
res_values = array_op(lvals, rvals)
|
| 66 |
+
if (
|
| 67 |
+
left_ea
|
| 68 |
+
and not right_ea
|
| 69 |
+
and hasattr(res_values, "reshape")
|
| 70 |
+
and not is_1d_only_ea_dtype(res_values.dtype)
|
| 71 |
+
):
|
| 72 |
+
res_values = res_values.reshape(1, -1)
|
| 73 |
+
nbs = rblk._split_op_result(res_values)
|
| 74 |
+
|
| 75 |
+
# Assertions are disabled for performance, but should hold:
|
| 76 |
+
# if right_ea or left_ea:
|
| 77 |
+
# assert len(nbs) == 1
|
| 78 |
+
# else:
|
| 79 |
+
# assert res_values.shape == lvals.shape, (res_values.shape, lvals.shape)
|
| 80 |
+
|
| 81 |
+
_reset_block_mgr_locs(nbs, locs)
|
| 82 |
+
|
| 83 |
+
res_blks.extend(nbs)
|
| 84 |
+
|
| 85 |
+
# Assertions are disabled for performance, but should hold:
|
| 86 |
+
# slocs = {y for nb in res_blks for y in nb.mgr_locs.as_array}
|
| 87 |
+
# nlocs = sum(len(nb.mgr_locs.as_array) for nb in res_blks)
|
| 88 |
+
# assert nlocs == len(left.items), (nlocs, len(left.items))
|
| 89 |
+
# assert len(slocs) == nlocs, (len(slocs), nlocs)
|
| 90 |
+
# assert slocs == set(range(nlocs)), slocs
|
| 91 |
+
|
| 92 |
+
new_mgr = type(right)(tuple(res_blks), axes=right.axes, verify_integrity=False)
|
| 93 |
+
return new_mgr
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def _reset_block_mgr_locs(nbs: list[Block], locs) -> None:
|
| 97 |
+
"""
|
| 98 |
+
Reset mgr_locs to correspond to our original DataFrame.
|
| 99 |
+
"""
|
| 100 |
+
for nb in nbs:
|
| 101 |
+
nblocs = locs[nb.mgr_locs.indexer]
|
| 102 |
+
nb.mgr_locs = nblocs
|
| 103 |
+
# Assertions are disabled for performance, but should hold:
|
| 104 |
+
# assert len(nblocs) == nb.shape[0], (len(nblocs), nb.shape)
|
| 105 |
+
# assert all(x in locs.as_array for x in nb.mgr_locs.as_array)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def _get_same_shape_values(
|
| 109 |
+
lblk: Block, rblk: Block, left_ea: bool, right_ea: bool
|
| 110 |
+
) -> tuple[ArrayLike, ArrayLike]:
|
| 111 |
+
"""
|
| 112 |
+
Slice lblk.values to align with rblk. Squeeze if we have EAs.
|
| 113 |
+
"""
|
| 114 |
+
lvals = lblk.values
|
| 115 |
+
rvals = rblk.values
|
| 116 |
+
|
| 117 |
+
# Require that the indexing into lvals be slice-like
|
| 118 |
+
assert rblk.mgr_locs.is_slice_like, rblk.mgr_locs
|
| 119 |
+
|
| 120 |
+
# TODO(EA2D): with 2D EAs only this first clause would be needed
|
| 121 |
+
if not (left_ea or right_ea):
|
| 122 |
+
# error: No overload variant of "__getitem__" of "ExtensionArray" matches
|
| 123 |
+
# argument type "Tuple[Union[ndarray, slice], slice]"
|
| 124 |
+
lvals = lvals[rblk.mgr_locs.indexer, :] # type: ignore[call-overload]
|
| 125 |
+
assert lvals.shape == rvals.shape, (lvals.shape, rvals.shape)
|
| 126 |
+
elif left_ea and right_ea:
|
| 127 |
+
assert lvals.shape == rvals.shape, (lvals.shape, rvals.shape)
|
| 128 |
+
elif right_ea:
|
| 129 |
+
# lvals are 2D, rvals are 1D
|
| 130 |
+
|
| 131 |
+
# error: No overload variant of "__getitem__" of "ExtensionArray" matches
|
| 132 |
+
# argument type "Tuple[Union[ndarray, slice], slice]"
|
| 133 |
+
lvals = lvals[rblk.mgr_locs.indexer, :] # type: ignore[call-overload]
|
| 134 |
+
assert lvals.shape[0] == 1, lvals.shape
|
| 135 |
+
lvals = lvals[0, :]
|
| 136 |
+
else:
|
| 137 |
+
# lvals are 1D, rvals are 2D
|
| 138 |
+
assert rvals.shape[0] == 1, rvals.shape
|
| 139 |
+
# error: No overload variant of "__getitem__" of "ExtensionArray" matches
|
| 140 |
+
# argument type "Tuple[int, slice]"
|
| 141 |
+
rvals = rvals[0, :] # type: ignore[call-overload]
|
| 142 |
+
|
| 143 |
+
return lvals, rvals
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def blockwise_all(left: BlockManager, right: BlockManager, op) -> bool:
|
| 147 |
+
"""
|
| 148 |
+
Blockwise `all` reduction.
|
| 149 |
+
"""
|
| 150 |
+
for info in _iter_block_pairs(left, right):
|
| 151 |
+
res = op(info.lvals, info.rvals)
|
| 152 |
+
if not res:
|
| 153 |
+
return False
|
| 154 |
+
return True
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/ES2_compatibility.cpython-310.pyc
ADDED
|
Binary file (2.27 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/arrays_of_arrays.cpython-310.pyc
ADDED
|
Binary file (735 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/clear_buffer_object.cpython-310.pyc
ADDED
|
Binary file (1.17 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/clear_texture.cpython-310.pyc
ADDED
|
Binary file (1.24 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/compressed_texture_pixel_storage.cpython-310.pyc
ADDED
|
Binary file (1.21 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/compute_shader.cpython-310.pyc
ADDED
|
Binary file (2.1 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/draw_instanced.cpython-310.pyc
ADDED
|
Binary file (1.14 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/fragment_layer_viewport.cpython-310.pyc
ADDED
|
Binary file (749 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/fragment_program_shadow.cpython-310.pyc
ADDED
|
Binary file (749 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/geometry_shader4.cpython-310.pyc
ADDED
|
Binary file (2.6 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/gpu_shader_fp64.cpython-310.pyc
ADDED
|
Binary file (3.92 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/OpenGL/raw/GL/ARB/__pycache__/multi_bind.cpython-310.pyc
ADDED
|
Binary file (1.81 kB). View file
|
|
|