Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- lib/python3.10/site-packages/astunparse/__init__.py +20 -0
- lib/python3.10/site-packages/astunparse/__main__.py +48 -0
- lib/python3.10/site-packages/astunparse/printer.py +51 -0
- lib/python3.10/site-packages/astunparse/unparser.py +906 -0
- lib/python3.10/site-packages/audioread-3.0.1.dist-info/LICENSE +19 -0
- lib/python3.10/site-packages/audioread-3.0.1.dist-info/METADATA +239 -0
- lib/python3.10/site-packages/audioread-3.0.1.dist-info/RECORD +15 -0
- lib/python3.10/site-packages/audioread-3.0.1.dist-info/REQUESTED +0 -0
- lib/python3.10/site-packages/audioread-3.0.1.dist-info/WHEEL +4 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/INSTALLER +1 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/LICENSE +202 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/METADATA +365 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/RECORD +131 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/REQUESTED +0 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/WHEEL +5 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/entry_points.txt +3 -0
- lib/python3.10/site-packages/datasets-2.14.4.dist-info/top_level.txt +1 -0
- lib/python3.10/site-packages/datasets/formatting/__init__.py +132 -0
- lib/python3.10/site-packages/datasets/formatting/formatting.py +649 -0
- lib/python3.10/site-packages/datasets/formatting/jax_formatter.py +160 -0
- lib/python3.10/site-packages/datasets/formatting/np_formatter.py +106 -0
- lib/python3.10/site-packages/datasets/formatting/tf_formatter.py +115 -0
- lib/python3.10/site-packages/datasets/formatting/torch_formatter.py +105 -0
- lib/python3.10/site-packages/datasets/utils/resources/__init__.py +0 -0
- lib/python3.10/site-packages/datasets/utils/resources/creators.json +17 -0
- lib/python3.10/site-packages/datasets/utils/resources/languages.json +0 -0
- lib/python3.10/site-packages/datasets/utils/resources/size_categories.json +14 -0
- lib/python3.10/site-packages/importlib_resources/compat/__init__.py +0 -0
- lib/python3.10/site-packages/importlib_resources/compat/py39.py +9 -0
- lib/python3.10/site-packages/importlib_resources/future/__init__.py +0 -0
- lib/python3.10/site-packages/importlib_resources/future/adapters.py +102 -0
- lib/python3.10/site-packages/importlib_resources/tests/__init__.py +0 -0
- lib/python3.10/site-packages/importlib_resources/tests/_path.py +90 -0
- lib/python3.10/site-packages/importlib_resources/tests/compat/__init__.py +0 -0
- lib/python3.10/site-packages/importlib_resources/tests/compat/py312.py +18 -0
- lib/python3.10/site-packages/importlib_resources/tests/compat/py39.py +13 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_compatibilty_files.py +103 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_custom.py +48 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_files.py +194 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_functional.py +267 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_path.py +63 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_read.py +94 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_reader.py +137 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_resource.py +238 -0
- lib/python3.10/site-packages/importlib_resources/tests/test_util.py +29 -0
- lib/python3.10/site-packages/importlib_resources/tests/zip.py +26 -0
- lib/python3.10/site-packages/pandas/__init__.py +367 -0
- lib/python3.10/site-packages/pandas/_config/__init__.py +57 -0
- lib/python3.10/site-packages/pandas/_config/config.py +948 -0
- lib/python3.10/site-packages/pandas/_config/dates.py +25 -0
lib/python3.10/site-packages/astunparse/__init__.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
from six.moves import cStringIO
|
| 4 |
+
from .unparser import Unparser
|
| 5 |
+
from .printer import Printer
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
__version__ = '1.6.3'
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def unparse(tree):
|
| 12 |
+
v = cStringIO()
|
| 13 |
+
Unparser(tree, file=v)
|
| 14 |
+
return v.getvalue()
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def dump(tree):
|
| 18 |
+
v = cStringIO()
|
| 19 |
+
Printer(file=v).visit(tree)
|
| 20 |
+
return v.getvalue()
|
lib/python3.10/site-packages/astunparse/__main__.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import print_function
|
| 2 |
+
import sys
|
| 3 |
+
import os
|
| 4 |
+
import argparse
|
| 5 |
+
from .unparser import roundtrip
|
| 6 |
+
from . import dump
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def roundtrip_recursive(target, dump_tree=False):
|
| 10 |
+
if os.path.isfile(target):
|
| 11 |
+
print(target)
|
| 12 |
+
print("=" * len(target))
|
| 13 |
+
if dump_tree:
|
| 14 |
+
dump(target)
|
| 15 |
+
else:
|
| 16 |
+
roundtrip(target)
|
| 17 |
+
print()
|
| 18 |
+
elif os.path.isdir(target):
|
| 19 |
+
for item in os.listdir(target):
|
| 20 |
+
if item.endswith(".py"):
|
| 21 |
+
roundtrip_recursive(os.path.join(target, item), dump_tree)
|
| 22 |
+
else:
|
| 23 |
+
print(
|
| 24 |
+
"WARNING: skipping '%s', not a file or directory" % target,
|
| 25 |
+
file=sys.stderr
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def main(args):
|
| 30 |
+
parser = argparse.ArgumentParser(prog="astunparse")
|
| 31 |
+
parser.add_argument(
|
| 32 |
+
'target',
|
| 33 |
+
nargs='+',
|
| 34 |
+
help="Files or directories to show roundtripped source for"
|
| 35 |
+
)
|
| 36 |
+
parser.add_argument(
|
| 37 |
+
'--dump',
|
| 38 |
+
type=bool,
|
| 39 |
+
help="Show a pretty-printed AST instead of the source"
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
arguments = parser.parse_args(args)
|
| 43 |
+
for target in arguments.target:
|
| 44 |
+
roundtrip_recursive(target, dump_tree=arguments.dump)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
if __name__ == "__main__":
|
| 48 |
+
main(sys.argv[1:])
|
lib/python3.10/site-packages/astunparse/printer.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import unicode_literals
|
| 2 |
+
import sys
|
| 3 |
+
import ast
|
| 4 |
+
import six
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class Printer(ast.NodeVisitor):
|
| 8 |
+
|
| 9 |
+
def __init__(self, file=sys.stdout, indent=" "):
|
| 10 |
+
self.indentation = 0
|
| 11 |
+
self.indent_with = indent
|
| 12 |
+
self.f = file
|
| 13 |
+
|
| 14 |
+
# overridden to make the API obvious
|
| 15 |
+
def visit(self, node):
|
| 16 |
+
super(Printer, self).visit(node)
|
| 17 |
+
|
| 18 |
+
def write(self, text):
|
| 19 |
+
self.f.write(six.text_type(text))
|
| 20 |
+
|
| 21 |
+
def generic_visit(self, node):
|
| 22 |
+
|
| 23 |
+
if isinstance(node, list):
|
| 24 |
+
nodestart = "["
|
| 25 |
+
nodeend = "]"
|
| 26 |
+
children = [("", child) for child in node]
|
| 27 |
+
else:
|
| 28 |
+
nodestart = type(node).__name__ + "("
|
| 29 |
+
nodeend = ")"
|
| 30 |
+
children = [(name + "=", value) for name, value in ast.iter_fields(node)]
|
| 31 |
+
|
| 32 |
+
if len(children) > 1:
|
| 33 |
+
self.indentation += 1
|
| 34 |
+
|
| 35 |
+
self.write(nodestart)
|
| 36 |
+
for i, pair in enumerate(children):
|
| 37 |
+
attr, child = pair
|
| 38 |
+
if len(children) > 1:
|
| 39 |
+
self.write("\n" + self.indent_with * self.indentation)
|
| 40 |
+
if isinstance(child, (ast.AST, list)):
|
| 41 |
+
self.write(attr)
|
| 42 |
+
self.visit(child)
|
| 43 |
+
else:
|
| 44 |
+
self.write(attr + repr(child))
|
| 45 |
+
|
| 46 |
+
if i != len(children) - 1:
|
| 47 |
+
self.write(",")
|
| 48 |
+
self.write(nodeend)
|
| 49 |
+
|
| 50 |
+
if len(children) > 1:
|
| 51 |
+
self.indentation -= 1
|
lib/python3.10/site-packages/astunparse/unparser.py
ADDED
|
@@ -0,0 +1,906 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Usage: unparse.py <path to source file>"
|
| 2 |
+
from __future__ import print_function, unicode_literals
|
| 3 |
+
import six
|
| 4 |
+
import sys
|
| 5 |
+
import ast
|
| 6 |
+
import os
|
| 7 |
+
import tokenize
|
| 8 |
+
from six import StringIO
|
| 9 |
+
|
| 10 |
+
# Large float and imaginary literals get turned into infinities in the AST.
|
| 11 |
+
# We unparse those infinities to INFSTR.
|
| 12 |
+
INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
|
| 13 |
+
|
| 14 |
+
def interleave(inter, f, seq):
|
| 15 |
+
"""Call f on each item in seq, calling inter() in between.
|
| 16 |
+
"""
|
| 17 |
+
seq = iter(seq)
|
| 18 |
+
try:
|
| 19 |
+
f(next(seq))
|
| 20 |
+
except StopIteration:
|
| 21 |
+
pass
|
| 22 |
+
else:
|
| 23 |
+
for x in seq:
|
| 24 |
+
inter()
|
| 25 |
+
f(x)
|
| 26 |
+
|
| 27 |
+
class Unparser:
|
| 28 |
+
"""Methods in this class recursively traverse an AST and
|
| 29 |
+
output source code for the abstract syntax; original formatting
|
| 30 |
+
is disregarded. """
|
| 31 |
+
|
| 32 |
+
def __init__(self, tree, file = sys.stdout):
|
| 33 |
+
"""Unparser(tree, file=sys.stdout) -> None.
|
| 34 |
+
Print the source for tree to file."""
|
| 35 |
+
self.f = file
|
| 36 |
+
self.future_imports = []
|
| 37 |
+
self._indent = 0
|
| 38 |
+
self.dispatch(tree)
|
| 39 |
+
print("", file=self.f)
|
| 40 |
+
self.f.flush()
|
| 41 |
+
|
| 42 |
+
def fill(self, text = ""):
|
| 43 |
+
"Indent a piece of text, according to the current indentation level"
|
| 44 |
+
self.f.write("\n"+" "*self._indent + text)
|
| 45 |
+
|
| 46 |
+
def write(self, text):
|
| 47 |
+
"Append a piece of text to the current line."
|
| 48 |
+
self.f.write(six.text_type(text))
|
| 49 |
+
|
| 50 |
+
def enter(self):
|
| 51 |
+
"Print ':', and increase the indentation."
|
| 52 |
+
self.write(":")
|
| 53 |
+
self._indent += 1
|
| 54 |
+
|
| 55 |
+
def leave(self):
|
| 56 |
+
"Decrease the indentation level."
|
| 57 |
+
self._indent -= 1
|
| 58 |
+
|
| 59 |
+
def dispatch(self, tree):
|
| 60 |
+
"Dispatcher function, dispatching tree type T to method _T."
|
| 61 |
+
if isinstance(tree, list):
|
| 62 |
+
for t in tree:
|
| 63 |
+
self.dispatch(t)
|
| 64 |
+
return
|
| 65 |
+
meth = getattr(self, "_"+tree.__class__.__name__)
|
| 66 |
+
meth(tree)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
############### Unparsing methods ######################
|
| 70 |
+
# There should be one method per concrete grammar type #
|
| 71 |
+
# Constructors should be grouped by sum type. Ideally, #
|
| 72 |
+
# this would follow the order in the grammar, but #
|
| 73 |
+
# currently doesn't. #
|
| 74 |
+
########################################################
|
| 75 |
+
|
| 76 |
+
def _Module(self, tree):
|
| 77 |
+
for stmt in tree.body:
|
| 78 |
+
self.dispatch(stmt)
|
| 79 |
+
|
| 80 |
+
def _Interactive(self, tree):
|
| 81 |
+
for stmt in tree.body:
|
| 82 |
+
self.dispatch(stmt)
|
| 83 |
+
|
| 84 |
+
def _Expression(self, tree):
|
| 85 |
+
self.dispatch(tree.body)
|
| 86 |
+
|
| 87 |
+
# stmt
|
| 88 |
+
def _Expr(self, tree):
|
| 89 |
+
self.fill()
|
| 90 |
+
self.dispatch(tree.value)
|
| 91 |
+
|
| 92 |
+
def _NamedExpr(self, tree):
|
| 93 |
+
self.write("(")
|
| 94 |
+
self.dispatch(tree.target)
|
| 95 |
+
self.write(" := ")
|
| 96 |
+
self.dispatch(tree.value)
|
| 97 |
+
self.write(")")
|
| 98 |
+
|
| 99 |
+
def _Import(self, t):
|
| 100 |
+
self.fill("import ")
|
| 101 |
+
interleave(lambda: self.write(", "), self.dispatch, t.names)
|
| 102 |
+
|
| 103 |
+
def _ImportFrom(self, t):
|
| 104 |
+
# A from __future__ import may affect unparsing, so record it.
|
| 105 |
+
if t.module and t.module == '__future__':
|
| 106 |
+
self.future_imports.extend(n.name for n in t.names)
|
| 107 |
+
|
| 108 |
+
self.fill("from ")
|
| 109 |
+
self.write("." * t.level)
|
| 110 |
+
if t.module:
|
| 111 |
+
self.write(t.module)
|
| 112 |
+
self.write(" import ")
|
| 113 |
+
interleave(lambda: self.write(", "), self.dispatch, t.names)
|
| 114 |
+
|
| 115 |
+
def _Assign(self, t):
|
| 116 |
+
self.fill()
|
| 117 |
+
for target in t.targets:
|
| 118 |
+
self.dispatch(target)
|
| 119 |
+
self.write(" = ")
|
| 120 |
+
self.dispatch(t.value)
|
| 121 |
+
|
| 122 |
+
def _AugAssign(self, t):
|
| 123 |
+
self.fill()
|
| 124 |
+
self.dispatch(t.target)
|
| 125 |
+
self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
|
| 126 |
+
self.dispatch(t.value)
|
| 127 |
+
|
| 128 |
+
def _AnnAssign(self, t):
|
| 129 |
+
self.fill()
|
| 130 |
+
if not t.simple and isinstance(t.target, ast.Name):
|
| 131 |
+
self.write('(')
|
| 132 |
+
self.dispatch(t.target)
|
| 133 |
+
if not t.simple and isinstance(t.target, ast.Name):
|
| 134 |
+
self.write(')')
|
| 135 |
+
self.write(": ")
|
| 136 |
+
self.dispatch(t.annotation)
|
| 137 |
+
if t.value:
|
| 138 |
+
self.write(" = ")
|
| 139 |
+
self.dispatch(t.value)
|
| 140 |
+
|
| 141 |
+
def _Return(self, t):
|
| 142 |
+
self.fill("return")
|
| 143 |
+
if t.value:
|
| 144 |
+
self.write(" ")
|
| 145 |
+
self.dispatch(t.value)
|
| 146 |
+
|
| 147 |
+
def _Pass(self, t):
|
| 148 |
+
self.fill("pass")
|
| 149 |
+
|
| 150 |
+
def _Break(self, t):
|
| 151 |
+
self.fill("break")
|
| 152 |
+
|
| 153 |
+
def _Continue(self, t):
|
| 154 |
+
self.fill("continue")
|
| 155 |
+
|
| 156 |
+
def _Delete(self, t):
|
| 157 |
+
self.fill("del ")
|
| 158 |
+
interleave(lambda: self.write(", "), self.dispatch, t.targets)
|
| 159 |
+
|
| 160 |
+
def _Assert(self, t):
|
| 161 |
+
self.fill("assert ")
|
| 162 |
+
self.dispatch(t.test)
|
| 163 |
+
if t.msg:
|
| 164 |
+
self.write(", ")
|
| 165 |
+
self.dispatch(t.msg)
|
| 166 |
+
|
| 167 |
+
def _Exec(self, t):
|
| 168 |
+
self.fill("exec ")
|
| 169 |
+
self.dispatch(t.body)
|
| 170 |
+
if t.globals:
|
| 171 |
+
self.write(" in ")
|
| 172 |
+
self.dispatch(t.globals)
|
| 173 |
+
if t.locals:
|
| 174 |
+
self.write(", ")
|
| 175 |
+
self.dispatch(t.locals)
|
| 176 |
+
|
| 177 |
+
def _Print(self, t):
|
| 178 |
+
self.fill("print ")
|
| 179 |
+
do_comma = False
|
| 180 |
+
if t.dest:
|
| 181 |
+
self.write(">>")
|
| 182 |
+
self.dispatch(t.dest)
|
| 183 |
+
do_comma = True
|
| 184 |
+
for e in t.values:
|
| 185 |
+
if do_comma:self.write(", ")
|
| 186 |
+
else:do_comma=True
|
| 187 |
+
self.dispatch(e)
|
| 188 |
+
if not t.nl:
|
| 189 |
+
self.write(",")
|
| 190 |
+
|
| 191 |
+
def _Global(self, t):
|
| 192 |
+
self.fill("global ")
|
| 193 |
+
interleave(lambda: self.write(", "), self.write, t.names)
|
| 194 |
+
|
| 195 |
+
def _Nonlocal(self, t):
|
| 196 |
+
self.fill("nonlocal ")
|
| 197 |
+
interleave(lambda: self.write(", "), self.write, t.names)
|
| 198 |
+
|
| 199 |
+
def _Await(self, t):
|
| 200 |
+
self.write("(")
|
| 201 |
+
self.write("await")
|
| 202 |
+
if t.value:
|
| 203 |
+
self.write(" ")
|
| 204 |
+
self.dispatch(t.value)
|
| 205 |
+
self.write(")")
|
| 206 |
+
|
| 207 |
+
def _Yield(self, t):
|
| 208 |
+
self.write("(")
|
| 209 |
+
self.write("yield")
|
| 210 |
+
if t.value:
|
| 211 |
+
self.write(" ")
|
| 212 |
+
self.dispatch(t.value)
|
| 213 |
+
self.write(")")
|
| 214 |
+
|
| 215 |
+
def _YieldFrom(self, t):
|
| 216 |
+
self.write("(")
|
| 217 |
+
self.write("yield from")
|
| 218 |
+
if t.value:
|
| 219 |
+
self.write(" ")
|
| 220 |
+
self.dispatch(t.value)
|
| 221 |
+
self.write(")")
|
| 222 |
+
|
| 223 |
+
def _Raise(self, t):
|
| 224 |
+
self.fill("raise")
|
| 225 |
+
if six.PY3:
|
| 226 |
+
if not t.exc:
|
| 227 |
+
assert not t.cause
|
| 228 |
+
return
|
| 229 |
+
self.write(" ")
|
| 230 |
+
self.dispatch(t.exc)
|
| 231 |
+
if t.cause:
|
| 232 |
+
self.write(" from ")
|
| 233 |
+
self.dispatch(t.cause)
|
| 234 |
+
else:
|
| 235 |
+
self.write(" ")
|
| 236 |
+
if t.type:
|
| 237 |
+
self.dispatch(t.type)
|
| 238 |
+
if t.inst:
|
| 239 |
+
self.write(", ")
|
| 240 |
+
self.dispatch(t.inst)
|
| 241 |
+
if t.tback:
|
| 242 |
+
self.write(", ")
|
| 243 |
+
self.dispatch(t.tback)
|
| 244 |
+
|
| 245 |
+
def _Try(self, t):
|
| 246 |
+
self.fill("try")
|
| 247 |
+
self.enter()
|
| 248 |
+
self.dispatch(t.body)
|
| 249 |
+
self.leave()
|
| 250 |
+
for ex in t.handlers:
|
| 251 |
+
self.dispatch(ex)
|
| 252 |
+
if t.orelse:
|
| 253 |
+
self.fill("else")
|
| 254 |
+
self.enter()
|
| 255 |
+
self.dispatch(t.orelse)
|
| 256 |
+
self.leave()
|
| 257 |
+
if t.finalbody:
|
| 258 |
+
self.fill("finally")
|
| 259 |
+
self.enter()
|
| 260 |
+
self.dispatch(t.finalbody)
|
| 261 |
+
self.leave()
|
| 262 |
+
|
| 263 |
+
def _TryExcept(self, t):
|
| 264 |
+
self.fill("try")
|
| 265 |
+
self.enter()
|
| 266 |
+
self.dispatch(t.body)
|
| 267 |
+
self.leave()
|
| 268 |
+
|
| 269 |
+
for ex in t.handlers:
|
| 270 |
+
self.dispatch(ex)
|
| 271 |
+
if t.orelse:
|
| 272 |
+
self.fill("else")
|
| 273 |
+
self.enter()
|
| 274 |
+
self.dispatch(t.orelse)
|
| 275 |
+
self.leave()
|
| 276 |
+
|
| 277 |
+
def _TryFinally(self, t):
|
| 278 |
+
if len(t.body) == 1 and isinstance(t.body[0], ast.TryExcept):
|
| 279 |
+
# try-except-finally
|
| 280 |
+
self.dispatch(t.body)
|
| 281 |
+
else:
|
| 282 |
+
self.fill("try")
|
| 283 |
+
self.enter()
|
| 284 |
+
self.dispatch(t.body)
|
| 285 |
+
self.leave()
|
| 286 |
+
|
| 287 |
+
self.fill("finally")
|
| 288 |
+
self.enter()
|
| 289 |
+
self.dispatch(t.finalbody)
|
| 290 |
+
self.leave()
|
| 291 |
+
|
| 292 |
+
def _ExceptHandler(self, t):
|
| 293 |
+
self.fill("except")
|
| 294 |
+
if t.type:
|
| 295 |
+
self.write(" ")
|
| 296 |
+
self.dispatch(t.type)
|
| 297 |
+
if t.name:
|
| 298 |
+
self.write(" as ")
|
| 299 |
+
if six.PY3:
|
| 300 |
+
self.write(t.name)
|
| 301 |
+
else:
|
| 302 |
+
self.dispatch(t.name)
|
| 303 |
+
self.enter()
|
| 304 |
+
self.dispatch(t.body)
|
| 305 |
+
self.leave()
|
| 306 |
+
|
| 307 |
+
def _ClassDef(self, t):
|
| 308 |
+
self.write("\n")
|
| 309 |
+
for deco in t.decorator_list:
|
| 310 |
+
self.fill("@")
|
| 311 |
+
self.dispatch(deco)
|
| 312 |
+
self.fill("class "+t.name)
|
| 313 |
+
if six.PY3:
|
| 314 |
+
self.write("(")
|
| 315 |
+
comma = False
|
| 316 |
+
for e in t.bases:
|
| 317 |
+
if comma: self.write(", ")
|
| 318 |
+
else: comma = True
|
| 319 |
+
self.dispatch(e)
|
| 320 |
+
for e in t.keywords:
|
| 321 |
+
if comma: self.write(", ")
|
| 322 |
+
else: comma = True
|
| 323 |
+
self.dispatch(e)
|
| 324 |
+
if sys.version_info[:2] < (3, 5):
|
| 325 |
+
if t.starargs:
|
| 326 |
+
if comma: self.write(", ")
|
| 327 |
+
else: comma = True
|
| 328 |
+
self.write("*")
|
| 329 |
+
self.dispatch(t.starargs)
|
| 330 |
+
if t.kwargs:
|
| 331 |
+
if comma: self.write(", ")
|
| 332 |
+
else: comma = True
|
| 333 |
+
self.write("**")
|
| 334 |
+
self.dispatch(t.kwargs)
|
| 335 |
+
self.write(")")
|
| 336 |
+
elif t.bases:
|
| 337 |
+
self.write("(")
|
| 338 |
+
for a in t.bases:
|
| 339 |
+
self.dispatch(a)
|
| 340 |
+
self.write(", ")
|
| 341 |
+
self.write(")")
|
| 342 |
+
self.enter()
|
| 343 |
+
self.dispatch(t.body)
|
| 344 |
+
self.leave()
|
| 345 |
+
|
| 346 |
+
def _FunctionDef(self, t):
|
| 347 |
+
self.__FunctionDef_helper(t, "def")
|
| 348 |
+
|
| 349 |
+
def _AsyncFunctionDef(self, t):
|
| 350 |
+
self.__FunctionDef_helper(t, "async def")
|
| 351 |
+
|
| 352 |
+
def __FunctionDef_helper(self, t, fill_suffix):
|
| 353 |
+
self.write("\n")
|
| 354 |
+
for deco in t.decorator_list:
|
| 355 |
+
self.fill("@")
|
| 356 |
+
self.dispatch(deco)
|
| 357 |
+
def_str = fill_suffix+" "+t.name + "("
|
| 358 |
+
self.fill(def_str)
|
| 359 |
+
self.dispatch(t.args)
|
| 360 |
+
self.write(")")
|
| 361 |
+
if getattr(t, "returns", False):
|
| 362 |
+
self.write(" -> ")
|
| 363 |
+
self.dispatch(t.returns)
|
| 364 |
+
self.enter()
|
| 365 |
+
self.dispatch(t.body)
|
| 366 |
+
self.leave()
|
| 367 |
+
|
| 368 |
+
def _For(self, t):
|
| 369 |
+
self.__For_helper("for ", t)
|
| 370 |
+
|
| 371 |
+
def _AsyncFor(self, t):
|
| 372 |
+
self.__For_helper("async for ", t)
|
| 373 |
+
|
| 374 |
+
def __For_helper(self, fill, t):
|
| 375 |
+
self.fill(fill)
|
| 376 |
+
self.dispatch(t.target)
|
| 377 |
+
self.write(" in ")
|
| 378 |
+
self.dispatch(t.iter)
|
| 379 |
+
self.enter()
|
| 380 |
+
self.dispatch(t.body)
|
| 381 |
+
self.leave()
|
| 382 |
+
if t.orelse:
|
| 383 |
+
self.fill("else")
|
| 384 |
+
self.enter()
|
| 385 |
+
self.dispatch(t.orelse)
|
| 386 |
+
self.leave()
|
| 387 |
+
|
| 388 |
+
def _If(self, t):
|
| 389 |
+
self.fill("if ")
|
| 390 |
+
self.dispatch(t.test)
|
| 391 |
+
self.enter()
|
| 392 |
+
self.dispatch(t.body)
|
| 393 |
+
self.leave()
|
| 394 |
+
# collapse nested ifs into equivalent elifs.
|
| 395 |
+
while (t.orelse and len(t.orelse) == 1 and
|
| 396 |
+
isinstance(t.orelse[0], ast.If)):
|
| 397 |
+
t = t.orelse[0]
|
| 398 |
+
self.fill("elif ")
|
| 399 |
+
self.dispatch(t.test)
|
| 400 |
+
self.enter()
|
| 401 |
+
self.dispatch(t.body)
|
| 402 |
+
self.leave()
|
| 403 |
+
# final else
|
| 404 |
+
if t.orelse:
|
| 405 |
+
self.fill("else")
|
| 406 |
+
self.enter()
|
| 407 |
+
self.dispatch(t.orelse)
|
| 408 |
+
self.leave()
|
| 409 |
+
|
| 410 |
+
def _While(self, t):
|
| 411 |
+
self.fill("while ")
|
| 412 |
+
self.dispatch(t.test)
|
| 413 |
+
self.enter()
|
| 414 |
+
self.dispatch(t.body)
|
| 415 |
+
self.leave()
|
| 416 |
+
if t.orelse:
|
| 417 |
+
self.fill("else")
|
| 418 |
+
self.enter()
|
| 419 |
+
self.dispatch(t.orelse)
|
| 420 |
+
self.leave()
|
| 421 |
+
|
| 422 |
+
def _generic_With(self, t, async_=False):
|
| 423 |
+
self.fill("async with " if async_ else "with ")
|
| 424 |
+
if hasattr(t, 'items'):
|
| 425 |
+
interleave(lambda: self.write(", "), self.dispatch, t.items)
|
| 426 |
+
else:
|
| 427 |
+
self.dispatch(t.context_expr)
|
| 428 |
+
if t.optional_vars:
|
| 429 |
+
self.write(" as ")
|
| 430 |
+
self.dispatch(t.optional_vars)
|
| 431 |
+
self.enter()
|
| 432 |
+
self.dispatch(t.body)
|
| 433 |
+
self.leave()
|
| 434 |
+
|
| 435 |
+
def _With(self, t):
|
| 436 |
+
self._generic_With(t)
|
| 437 |
+
|
| 438 |
+
def _AsyncWith(self, t):
|
| 439 |
+
self._generic_With(t, async_=True)
|
| 440 |
+
|
| 441 |
+
# expr
|
| 442 |
+
def _Bytes(self, t):
|
| 443 |
+
self.write(repr(t.s))
|
| 444 |
+
|
| 445 |
+
def _Str(self, tree):
|
| 446 |
+
if six.PY3:
|
| 447 |
+
self.write(repr(tree.s))
|
| 448 |
+
else:
|
| 449 |
+
# if from __future__ import unicode_literals is in effect,
|
| 450 |
+
# then we want to output string literals using a 'b' prefix
|
| 451 |
+
# and unicode literals with no prefix.
|
| 452 |
+
if "unicode_literals" not in self.future_imports:
|
| 453 |
+
self.write(repr(tree.s))
|
| 454 |
+
elif isinstance(tree.s, str):
|
| 455 |
+
self.write("b" + repr(tree.s))
|
| 456 |
+
elif isinstance(tree.s, unicode):
|
| 457 |
+
self.write(repr(tree.s).lstrip("u"))
|
| 458 |
+
else:
|
| 459 |
+
assert False, "shouldn't get here"
|
| 460 |
+
|
| 461 |
+
def _JoinedStr(self, t):
|
| 462 |
+
# JoinedStr(expr* values)
|
| 463 |
+
self.write("f")
|
| 464 |
+
string = StringIO()
|
| 465 |
+
self._fstring_JoinedStr(t, string.write)
|
| 466 |
+
# Deviation from `unparse.py`: Try to find an unused quote.
|
| 467 |
+
# This change is made to handle _very_ complex f-strings.
|
| 468 |
+
v = string.getvalue()
|
| 469 |
+
if '\n' in v or '\r' in v:
|
| 470 |
+
quote_types = ["'''", '"""']
|
| 471 |
+
else:
|
| 472 |
+
quote_types = ["'", '"', '"""', "'''"]
|
| 473 |
+
for quote_type in quote_types:
|
| 474 |
+
if quote_type not in v:
|
| 475 |
+
v = "{quote_type}{v}{quote_type}".format(quote_type=quote_type, v=v)
|
| 476 |
+
break
|
| 477 |
+
else:
|
| 478 |
+
v = repr(v)
|
| 479 |
+
self.write(v)
|
| 480 |
+
|
| 481 |
+
def _FormattedValue(self, t):
|
| 482 |
+
# FormattedValue(expr value, int? conversion, expr? format_spec)
|
| 483 |
+
self.write("f")
|
| 484 |
+
string = StringIO()
|
| 485 |
+
self._fstring_JoinedStr(t, string.write)
|
| 486 |
+
self.write(repr(string.getvalue()))
|
| 487 |
+
|
| 488 |
+
def _fstring_JoinedStr(self, t, write):
|
| 489 |
+
for value in t.values:
|
| 490 |
+
meth = getattr(self, "_fstring_" + type(value).__name__)
|
| 491 |
+
meth(value, write)
|
| 492 |
+
|
| 493 |
+
def _fstring_Str(self, t, write):
|
| 494 |
+
value = t.s.replace("{", "{{").replace("}", "}}")
|
| 495 |
+
write(value)
|
| 496 |
+
|
| 497 |
+
def _fstring_Constant(self, t, write):
|
| 498 |
+
assert isinstance(t.value, str)
|
| 499 |
+
value = t.value.replace("{", "{{").replace("}", "}}")
|
| 500 |
+
write(value)
|
| 501 |
+
|
| 502 |
+
def _fstring_FormattedValue(self, t, write):
|
| 503 |
+
write("{")
|
| 504 |
+
expr = StringIO()
|
| 505 |
+
Unparser(t.value, expr)
|
| 506 |
+
expr = expr.getvalue().rstrip("\n")
|
| 507 |
+
if expr.startswith("{"):
|
| 508 |
+
write(" ") # Separate pair of opening brackets as "{ {"
|
| 509 |
+
write(expr)
|
| 510 |
+
if t.conversion != -1:
|
| 511 |
+
conversion = chr(t.conversion)
|
| 512 |
+
assert conversion in "sra"
|
| 513 |
+
write("!{conversion}".format(conversion=conversion))
|
| 514 |
+
if t.format_spec:
|
| 515 |
+
write(":")
|
| 516 |
+
meth = getattr(self, "_fstring_" + type(t.format_spec).__name__)
|
| 517 |
+
meth(t.format_spec, write)
|
| 518 |
+
write("}")
|
| 519 |
+
|
| 520 |
+
def _Name(self, t):
|
| 521 |
+
self.write(t.id)
|
| 522 |
+
|
| 523 |
+
def _NameConstant(self, t):
|
| 524 |
+
self.write(repr(t.value))
|
| 525 |
+
|
| 526 |
+
def _Repr(self, t):
|
| 527 |
+
self.write("`")
|
| 528 |
+
self.dispatch(t.value)
|
| 529 |
+
self.write("`")
|
| 530 |
+
|
| 531 |
+
def _write_constant(self, value):
|
| 532 |
+
if isinstance(value, (float, complex)):
|
| 533 |
+
# Substitute overflowing decimal literal for AST infinities.
|
| 534 |
+
self.write(repr(value).replace("inf", INFSTR))
|
| 535 |
+
else:
|
| 536 |
+
self.write(repr(value))
|
| 537 |
+
|
| 538 |
+
def _Constant(self, t):
|
| 539 |
+
value = t.value
|
| 540 |
+
if isinstance(value, tuple):
|
| 541 |
+
self.write("(")
|
| 542 |
+
if len(value) == 1:
|
| 543 |
+
self._write_constant(value[0])
|
| 544 |
+
self.write(",")
|
| 545 |
+
else:
|
| 546 |
+
interleave(lambda: self.write(", "), self._write_constant, value)
|
| 547 |
+
self.write(")")
|
| 548 |
+
elif value is Ellipsis: # instead of `...` for Py2 compatibility
|
| 549 |
+
self.write("...")
|
| 550 |
+
else:
|
| 551 |
+
if t.kind == "u":
|
| 552 |
+
self.write("u")
|
| 553 |
+
self._write_constant(t.value)
|
| 554 |
+
|
| 555 |
+
def _Num(self, t):
|
| 556 |
+
repr_n = repr(t.n)
|
| 557 |
+
if six.PY3:
|
| 558 |
+
self.write(repr_n.replace("inf", INFSTR))
|
| 559 |
+
else:
|
| 560 |
+
# Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
|
| 561 |
+
if repr_n.startswith("-"):
|
| 562 |
+
self.write("(")
|
| 563 |
+
if "inf" in repr_n and repr_n.endswith("*j"):
|
| 564 |
+
repr_n = repr_n.replace("*j", "j")
|
| 565 |
+
# Substitute overflowing decimal literal for AST infinities.
|
| 566 |
+
self.write(repr_n.replace("inf", INFSTR))
|
| 567 |
+
if repr_n.startswith("-"):
|
| 568 |
+
self.write(")")
|
| 569 |
+
|
| 570 |
+
def _List(self, t):
|
| 571 |
+
self.write("[")
|
| 572 |
+
interleave(lambda: self.write(", "), self.dispatch, t.elts)
|
| 573 |
+
self.write("]")
|
| 574 |
+
|
| 575 |
+
def _ListComp(self, t):
|
| 576 |
+
self.write("[")
|
| 577 |
+
self.dispatch(t.elt)
|
| 578 |
+
for gen in t.generators:
|
| 579 |
+
self.dispatch(gen)
|
| 580 |
+
self.write("]")
|
| 581 |
+
|
| 582 |
+
def _GeneratorExp(self, t):
|
| 583 |
+
self.write("(")
|
| 584 |
+
self.dispatch(t.elt)
|
| 585 |
+
for gen in t.generators:
|
| 586 |
+
self.dispatch(gen)
|
| 587 |
+
self.write(")")
|
| 588 |
+
|
| 589 |
+
def _SetComp(self, t):
|
| 590 |
+
self.write("{")
|
| 591 |
+
self.dispatch(t.elt)
|
| 592 |
+
for gen in t.generators:
|
| 593 |
+
self.dispatch(gen)
|
| 594 |
+
self.write("}")
|
| 595 |
+
|
| 596 |
+
def _DictComp(self, t):
|
| 597 |
+
self.write("{")
|
| 598 |
+
self.dispatch(t.key)
|
| 599 |
+
self.write(": ")
|
| 600 |
+
self.dispatch(t.value)
|
| 601 |
+
for gen in t.generators:
|
| 602 |
+
self.dispatch(gen)
|
| 603 |
+
self.write("}")
|
| 604 |
+
|
| 605 |
+
def _comprehension(self, t):
|
| 606 |
+
if getattr(t, 'is_async', False):
|
| 607 |
+
self.write(" async for ")
|
| 608 |
+
else:
|
| 609 |
+
self.write(" for ")
|
| 610 |
+
self.dispatch(t.target)
|
| 611 |
+
self.write(" in ")
|
| 612 |
+
self.dispatch(t.iter)
|
| 613 |
+
for if_clause in t.ifs:
|
| 614 |
+
self.write(" if ")
|
| 615 |
+
self.dispatch(if_clause)
|
| 616 |
+
|
| 617 |
+
def _IfExp(self, t):
|
| 618 |
+
self.write("(")
|
| 619 |
+
self.dispatch(t.body)
|
| 620 |
+
self.write(" if ")
|
| 621 |
+
self.dispatch(t.test)
|
| 622 |
+
self.write(" else ")
|
| 623 |
+
self.dispatch(t.orelse)
|
| 624 |
+
self.write(")")
|
| 625 |
+
|
| 626 |
+
def _Set(self, t):
|
| 627 |
+
assert(t.elts) # should be at least one element
|
| 628 |
+
self.write("{")
|
| 629 |
+
interleave(lambda: self.write(", "), self.dispatch, t.elts)
|
| 630 |
+
self.write("}")
|
| 631 |
+
|
| 632 |
+
def _Dict(self, t):
|
| 633 |
+
self.write("{")
|
| 634 |
+
def write_key_value_pair(k, v):
|
| 635 |
+
self.dispatch(k)
|
| 636 |
+
self.write(": ")
|
| 637 |
+
self.dispatch(v)
|
| 638 |
+
|
| 639 |
+
def write_item(item):
|
| 640 |
+
k, v = item
|
| 641 |
+
if k is None:
|
| 642 |
+
# for dictionary unpacking operator in dicts {**{'y': 2}}
|
| 643 |
+
# see PEP 448 for details
|
| 644 |
+
self.write("**")
|
| 645 |
+
self.dispatch(v)
|
| 646 |
+
else:
|
| 647 |
+
write_key_value_pair(k, v)
|
| 648 |
+
interleave(lambda: self.write(", "), write_item, zip(t.keys, t.values))
|
| 649 |
+
self.write("}")
|
| 650 |
+
|
| 651 |
+
def _Tuple(self, t):
|
| 652 |
+
self.write("(")
|
| 653 |
+
if len(t.elts) == 1:
|
| 654 |
+
elt = t.elts[0]
|
| 655 |
+
self.dispatch(elt)
|
| 656 |
+
self.write(",")
|
| 657 |
+
else:
|
| 658 |
+
interleave(lambda: self.write(", "), self.dispatch, t.elts)
|
| 659 |
+
self.write(")")
|
| 660 |
+
|
| 661 |
+
unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
|
| 662 |
+
def _UnaryOp(self, t):
|
| 663 |
+
self.write("(")
|
| 664 |
+
self.write(self.unop[t.op.__class__.__name__])
|
| 665 |
+
self.write(" ")
|
| 666 |
+
if six.PY2 and isinstance(t.op, ast.USub) and isinstance(t.operand, ast.Num):
|
| 667 |
+
# If we're applying unary minus to a number, parenthesize the number.
|
| 668 |
+
# This is necessary: -2147483648 is different from -(2147483648) on
|
| 669 |
+
# a 32-bit machine (the first is an int, the second a long), and
|
| 670 |
+
# -7j is different from -(7j). (The first has real part 0.0, the second
|
| 671 |
+
# has real part -0.0.)
|
| 672 |
+
self.write("(")
|
| 673 |
+
self.dispatch(t.operand)
|
| 674 |
+
self.write(")")
|
| 675 |
+
else:
|
| 676 |
+
self.dispatch(t.operand)
|
| 677 |
+
self.write(")")
|
| 678 |
+
|
| 679 |
+
binop = { "Add":"+", "Sub":"-", "Mult":"*", "MatMult":"@", "Div":"/", "Mod":"%",
|
| 680 |
+
"LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
|
| 681 |
+
"FloorDiv":"//", "Pow": "**"}
|
| 682 |
+
def _BinOp(self, t):
|
| 683 |
+
self.write("(")
|
| 684 |
+
self.dispatch(t.left)
|
| 685 |
+
self.write(" " + self.binop[t.op.__class__.__name__] + " ")
|
| 686 |
+
self.dispatch(t.right)
|
| 687 |
+
self.write(")")
|
| 688 |
+
|
| 689 |
+
cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
|
| 690 |
+
"Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
|
| 691 |
+
def _Compare(self, t):
|
| 692 |
+
self.write("(")
|
| 693 |
+
self.dispatch(t.left)
|
| 694 |
+
for o, e in zip(t.ops, t.comparators):
|
| 695 |
+
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
|
| 696 |
+
self.dispatch(e)
|
| 697 |
+
self.write(")")
|
| 698 |
+
|
| 699 |
+
boolops = {ast.And: 'and', ast.Or: 'or'}
|
| 700 |
+
def _BoolOp(self, t):
|
| 701 |
+
self.write("(")
|
| 702 |
+
s = " %s " % self.boolops[t.op.__class__]
|
| 703 |
+
interleave(lambda: self.write(s), self.dispatch, t.values)
|
| 704 |
+
self.write(")")
|
| 705 |
+
|
| 706 |
+
def _Attribute(self,t):
|
| 707 |
+
self.dispatch(t.value)
|
| 708 |
+
# Special case: 3.__abs__() is a syntax error, so if t.value
|
| 709 |
+
# is an integer literal then we need to either parenthesize
|
| 710 |
+
# it or add an extra space to get 3 .__abs__().
|
| 711 |
+
if isinstance(t.value, getattr(ast, 'Constant', getattr(ast, 'Num', None))) and isinstance(t.value.n, int):
|
| 712 |
+
self.write(" ")
|
| 713 |
+
self.write(".")
|
| 714 |
+
self.write(t.attr)
|
| 715 |
+
|
| 716 |
+
def _Call(self, t):
|
| 717 |
+
self.dispatch(t.func)
|
| 718 |
+
self.write("(")
|
| 719 |
+
comma = False
|
| 720 |
+
for e in t.args:
|
| 721 |
+
if comma: self.write(", ")
|
| 722 |
+
else: comma = True
|
| 723 |
+
self.dispatch(e)
|
| 724 |
+
for e in t.keywords:
|
| 725 |
+
if comma: self.write(", ")
|
| 726 |
+
else: comma = True
|
| 727 |
+
self.dispatch(e)
|
| 728 |
+
if sys.version_info[:2] < (3, 5):
|
| 729 |
+
if t.starargs:
|
| 730 |
+
if comma: self.write(", ")
|
| 731 |
+
else: comma = True
|
| 732 |
+
self.write("*")
|
| 733 |
+
self.dispatch(t.starargs)
|
| 734 |
+
if t.kwargs:
|
| 735 |
+
if comma: self.write(", ")
|
| 736 |
+
else: comma = True
|
| 737 |
+
self.write("**")
|
| 738 |
+
self.dispatch(t.kwargs)
|
| 739 |
+
self.write(")")
|
| 740 |
+
|
| 741 |
+
def _Subscript(self, t):
|
| 742 |
+
self.dispatch(t.value)
|
| 743 |
+
self.write("[")
|
| 744 |
+
self.dispatch(t.slice)
|
| 745 |
+
self.write("]")
|
| 746 |
+
|
| 747 |
+
def _Starred(self, t):
|
| 748 |
+
self.write("*")
|
| 749 |
+
self.dispatch(t.value)
|
| 750 |
+
|
| 751 |
+
# slice
|
| 752 |
+
def _Ellipsis(self, t):
|
| 753 |
+
self.write("...")
|
| 754 |
+
|
| 755 |
+
def _Index(self, t):
|
| 756 |
+
self.dispatch(t.value)
|
| 757 |
+
|
| 758 |
+
def _Slice(self, t):
|
| 759 |
+
if t.lower:
|
| 760 |
+
self.dispatch(t.lower)
|
| 761 |
+
self.write(":")
|
| 762 |
+
if t.upper:
|
| 763 |
+
self.dispatch(t.upper)
|
| 764 |
+
if t.step:
|
| 765 |
+
self.write(":")
|
| 766 |
+
self.dispatch(t.step)
|
| 767 |
+
|
| 768 |
+
def _ExtSlice(self, t):
|
| 769 |
+
interleave(lambda: self.write(', '), self.dispatch, t.dims)
|
| 770 |
+
|
| 771 |
+
# argument
|
| 772 |
+
def _arg(self, t):
|
| 773 |
+
self.write(t.arg)
|
| 774 |
+
if t.annotation:
|
| 775 |
+
self.write(": ")
|
| 776 |
+
self.dispatch(t.annotation)
|
| 777 |
+
|
| 778 |
+
# others
|
| 779 |
+
def _arguments(self, t):
|
| 780 |
+
first = True
|
| 781 |
+
# normal arguments
|
| 782 |
+
all_args = getattr(t, 'posonlyargs', []) + t.args
|
| 783 |
+
defaults = [None] * (len(all_args) - len(t.defaults)) + t.defaults
|
| 784 |
+
for index, elements in enumerate(zip(all_args, defaults), 1):
|
| 785 |
+
a, d = elements
|
| 786 |
+
if first:first = False
|
| 787 |
+
else: self.write(", ")
|
| 788 |
+
self.dispatch(a)
|
| 789 |
+
if d:
|
| 790 |
+
self.write("=")
|
| 791 |
+
self.dispatch(d)
|
| 792 |
+
if index == len(getattr(t, 'posonlyargs', ())):
|
| 793 |
+
self.write(", /")
|
| 794 |
+
|
| 795 |
+
# varargs, or bare '*' if no varargs but keyword-only arguments present
|
| 796 |
+
if t.vararg or getattr(t, "kwonlyargs", False):
|
| 797 |
+
if first:first = False
|
| 798 |
+
else: self.write(", ")
|
| 799 |
+
self.write("*")
|
| 800 |
+
if t.vararg:
|
| 801 |
+
if hasattr(t.vararg, 'arg'):
|
| 802 |
+
self.write(t.vararg.arg)
|
| 803 |
+
if t.vararg.annotation:
|
| 804 |
+
self.write(": ")
|
| 805 |
+
self.dispatch(t.vararg.annotation)
|
| 806 |
+
else:
|
| 807 |
+
self.write(t.vararg)
|
| 808 |
+
if getattr(t, 'varargannotation', None):
|
| 809 |
+
self.write(": ")
|
| 810 |
+
self.dispatch(t.varargannotation)
|
| 811 |
+
|
| 812 |
+
# keyword-only arguments
|
| 813 |
+
if getattr(t, "kwonlyargs", False):
|
| 814 |
+
for a, d in zip(t.kwonlyargs, t.kw_defaults):
|
| 815 |
+
if first:first = False
|
| 816 |
+
else: self.write(", ")
|
| 817 |
+
self.dispatch(a),
|
| 818 |
+
if d:
|
| 819 |
+
self.write("=")
|
| 820 |
+
self.dispatch(d)
|
| 821 |
+
|
| 822 |
+
# kwargs
|
| 823 |
+
if t.kwarg:
|
| 824 |
+
if first:first = False
|
| 825 |
+
else: self.write(", ")
|
| 826 |
+
if hasattr(t.kwarg, 'arg'):
|
| 827 |
+
self.write("**"+t.kwarg.arg)
|
| 828 |
+
if t.kwarg.annotation:
|
| 829 |
+
self.write(": ")
|
| 830 |
+
self.dispatch(t.kwarg.annotation)
|
| 831 |
+
else:
|
| 832 |
+
self.write("**"+t.kwarg)
|
| 833 |
+
if getattr(t, 'kwargannotation', None):
|
| 834 |
+
self.write(": ")
|
| 835 |
+
self.dispatch(t.kwargannotation)
|
| 836 |
+
|
| 837 |
+
def _keyword(self, t):
|
| 838 |
+
if t.arg is None:
|
| 839 |
+
# starting from Python 3.5 this denotes a kwargs part of the invocation
|
| 840 |
+
self.write("**")
|
| 841 |
+
else:
|
| 842 |
+
self.write(t.arg)
|
| 843 |
+
self.write("=")
|
| 844 |
+
self.dispatch(t.value)
|
| 845 |
+
|
| 846 |
+
def _Lambda(self, t):
|
| 847 |
+
self.write("(")
|
| 848 |
+
self.write("lambda ")
|
| 849 |
+
self.dispatch(t.args)
|
| 850 |
+
self.write(": ")
|
| 851 |
+
self.dispatch(t.body)
|
| 852 |
+
self.write(")")
|
| 853 |
+
|
| 854 |
+
def _alias(self, t):
|
| 855 |
+
self.write(t.name)
|
| 856 |
+
if t.asname:
|
| 857 |
+
self.write(" as "+t.asname)
|
| 858 |
+
|
| 859 |
+
def _withitem(self, t):
|
| 860 |
+
self.dispatch(t.context_expr)
|
| 861 |
+
if t.optional_vars:
|
| 862 |
+
self.write(" as ")
|
| 863 |
+
self.dispatch(t.optional_vars)
|
| 864 |
+
|
| 865 |
+
def roundtrip(filename, output=sys.stdout):
|
| 866 |
+
if six.PY3:
|
| 867 |
+
with open(filename, "rb") as pyfile:
|
| 868 |
+
encoding = tokenize.detect_encoding(pyfile.readline)[0]
|
| 869 |
+
with open(filename, "r", encoding=encoding) as pyfile:
|
| 870 |
+
source = pyfile.read()
|
| 871 |
+
else:
|
| 872 |
+
with open(filename, "r") as pyfile:
|
| 873 |
+
source = pyfile.read()
|
| 874 |
+
tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST, dont_inherit=True)
|
| 875 |
+
Unparser(tree, output)
|
| 876 |
+
|
| 877 |
+
|
| 878 |
+
|
| 879 |
+
def testdir(a):
|
| 880 |
+
try:
|
| 881 |
+
names = [n for n in os.listdir(a) if n.endswith('.py')]
|
| 882 |
+
except OSError:
|
| 883 |
+
print("Directory not readable: %s" % a, file=sys.stderr)
|
| 884 |
+
else:
|
| 885 |
+
for n in names:
|
| 886 |
+
fullname = os.path.join(a, n)
|
| 887 |
+
if os.path.isfile(fullname):
|
| 888 |
+
output = StringIO()
|
| 889 |
+
print('Testing %s' % fullname)
|
| 890 |
+
try:
|
| 891 |
+
roundtrip(fullname, output)
|
| 892 |
+
except Exception as e:
|
| 893 |
+
print(' Failed to compile, exception is %s' % repr(e))
|
| 894 |
+
elif os.path.isdir(fullname):
|
| 895 |
+
testdir(fullname)
|
| 896 |
+
|
| 897 |
+
def main(args):
|
| 898 |
+
if args[0] == '--testdir':
|
| 899 |
+
for a in args[1:]:
|
| 900 |
+
testdir(a)
|
| 901 |
+
else:
|
| 902 |
+
for a in args:
|
| 903 |
+
roundtrip(a)
|
| 904 |
+
|
| 905 |
+
if __name__=='__main__':
|
| 906 |
+
main(sys.argv[1:])
|
lib/python3.10/site-packages/audioread-3.0.1.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2011-2018 Adrian Sampson
|
| 2 |
+
|
| 3 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 4 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 5 |
+
in the Software without restriction, including without limitation the rights
|
| 6 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 7 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 8 |
+
furnished to do so, subject to the following conditions:
|
| 9 |
+
|
| 10 |
+
The above copyright notice and this permission notice shall be included in all
|
| 11 |
+
copies or substantial portions of the Software.
|
| 12 |
+
|
| 13 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 14 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 15 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
| 16 |
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
| 17 |
+
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
| 18 |
+
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
| 19 |
+
OR OTHER DEALINGS IN THE SOFTWARE.
|
lib/python3.10/site-packages/audioread-3.0.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: audioread
|
| 3 |
+
Version: 3.0.1
|
| 4 |
+
Summary: Multi-library, cross-platform audio decoding.
|
| 5 |
+
Author-email: Adrian Sampson <adrian@radbox.org>
|
| 6 |
+
Requires-Python: >=3.6
|
| 7 |
+
Description-Content-Type: text/x-rst
|
| 8 |
+
Classifier: Topic :: Multimedia :: Sound/Audio :: Conversion
|
| 9 |
+
Classifier: Intended Audience :: Developers
|
| 10 |
+
Classifier: Programming Language :: Python :: 3
|
| 11 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 12 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 13 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 14 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 15 |
+
Requires-Dist: tox ; extra == "test"
|
| 16 |
+
Project-URL: Home, https://github.com/beetbox/audioread
|
| 17 |
+
Provides-Extra: test
|
| 18 |
+
|
| 19 |
+
audioread
|
| 20 |
+
=========
|
| 21 |
+
|
| 22 |
+
Decode audio files using whichever backend is available. The library
|
| 23 |
+
currently supports:
|
| 24 |
+
|
| 25 |
+
- `Gstreamer`_ via `PyGObject`_.
|
| 26 |
+
- `Core Audio`_ on Mac OS X via `ctypes`_. (PyObjC not required.)
|
| 27 |
+
- `MAD`_ via the `pymad`_ bindings.
|
| 28 |
+
- `FFmpeg`_ or `Libav`_ via its command-line interface.
|
| 29 |
+
- The standard library `wave`_, `aifc`_, and `sunau`_ modules (for
|
| 30 |
+
uncompressed audio formats).
|
| 31 |
+
|
| 32 |
+
.. _Gstreamer: http://gstreamer.freedesktop.org/
|
| 33 |
+
.. _gst-python: http://gstreamer.freedesktop.org/modules/gst-python.html
|
| 34 |
+
.. _Core Audio: http://developer.apple.com/technologies/mac/audio-and-video.html
|
| 35 |
+
.. _ctypes: http://docs.python.org/library/ctypes.html
|
| 36 |
+
.. _MAD: http://www.underbit.com/products/mad/
|
| 37 |
+
.. _pymad: http://spacepants.org/src/pymad/
|
| 38 |
+
.. _FFmpeg: http://ffmpeg.org/
|
| 39 |
+
.. _Libav: https://www.libav.org/
|
| 40 |
+
.. _wave: http://docs.python.org/library/wave.html
|
| 41 |
+
.. _aifc: http://docs.python.org/library/aifc.html
|
| 42 |
+
.. _sunau: http://docs.python.org/library/sunau.html
|
| 43 |
+
.. _PyGObject: https://pygobject.readthedocs.io/
|
| 44 |
+
|
| 45 |
+
Use the library like so::
|
| 46 |
+
|
| 47 |
+
with audioread.audio_open(filename) as f:
|
| 48 |
+
print(f.channels, f.samplerate, f.duration)
|
| 49 |
+
for buf in f:
|
| 50 |
+
do_something(buf)
|
| 51 |
+
|
| 52 |
+
Buffers in the file can be accessed by iterating over the object returned from
|
| 53 |
+
``audio_open``. Each buffer is a bytes-like object (``buffer``, ``bytes``, or
|
| 54 |
+
``bytearray``) containing raw **16-bit little-endian signed integer PCM
|
| 55 |
+
data**. (Currently, these PCM format parameters are not configurable, but this
|
| 56 |
+
could be added to most of the backends.)
|
| 57 |
+
|
| 58 |
+
Additional values are available as fields on the audio file object:
|
| 59 |
+
|
| 60 |
+
- ``channels`` is the number of audio channels (an integer).
|
| 61 |
+
- ``samplerate`` is given in Hz (an integer).
|
| 62 |
+
- ``duration`` is the length of the audio in seconds (a float).
|
| 63 |
+
|
| 64 |
+
The ``audio_open`` function transparently selects a backend that can read the
|
| 65 |
+
file. (Each backend is implemented in a module inside the ``audioread``
|
| 66 |
+
package.) If no backends succeed in opening the file, a ``DecodeError``
|
| 67 |
+
exception is raised. This exception is only used when the file type is
|
| 68 |
+
unsupported by the backends; if the file doesn't exist, a standard ``IOError``
|
| 69 |
+
will be raised.
|
| 70 |
+
|
| 71 |
+
A second optional parameter to ``audio_open`` specifies which backends to try
|
| 72 |
+
(instead of trying them all, which is the default). You can use the
|
| 73 |
+
``available_backends`` function to get a list backends that are usable on the
|
| 74 |
+
current system.
|
| 75 |
+
|
| 76 |
+
Audioread supports Python 3 (3.8+).
|
| 77 |
+
|
| 78 |
+
Example
|
| 79 |
+
-------
|
| 80 |
+
|
| 81 |
+
The included ``decode.py`` script demonstrates using this package to
|
| 82 |
+
convert compressed audio files to WAV files.
|
| 83 |
+
|
| 84 |
+
Troubleshooting
|
| 85 |
+
---------------
|
| 86 |
+
|
| 87 |
+
A ``NoBackendError`` exception means that the library could not find one of
|
| 88 |
+
the libraries or tools it needs to decode audio. This could mean, for example,
|
| 89 |
+
that you have a broken installation of `FFmpeg`_. To check, try typing
|
| 90 |
+
``ffmpeg -version`` in your shell. If that gives you an error, try installing
|
| 91 |
+
FFmpeg with your OS's package manager (e.g., apt or yum) or `using Conda
|
| 92 |
+
<https://anaconda.org/conda-forge/ffmpeg>`_.
|
| 93 |
+
|
| 94 |
+
Version History
|
| 95 |
+
---------------
|
| 96 |
+
|
| 97 |
+
3.0.1
|
| 98 |
+
Fix a possible deadlock when FFmpeg's version output produces too much data.
|
| 99 |
+
|
| 100 |
+
3.0.0
|
| 101 |
+
Drop support for Python 2 and older versions of Python 3. The library now
|
| 102 |
+
requires Python 3.6+.
|
| 103 |
+
Increase default block size in FFmpegAudioFile to get slightly faster file reading.
|
| 104 |
+
Cache backends for faster lookup (thanks to @bmcfee).
|
| 105 |
+
Audio file classes now inherit from a common base ``AudioFile`` class.
|
| 106 |
+
|
| 107 |
+
2.1.9
|
| 108 |
+
Work correctly with GStreamer 1.18 and later (thanks to @ssssam).
|
| 109 |
+
|
| 110 |
+
2.1.8
|
| 111 |
+
Fix an unhandled ``OSError`` when FFmpeg is not installed.
|
| 112 |
+
|
| 113 |
+
2.1.7
|
| 114 |
+
Properly close some filehandles in the FFmpeg backend (thanks to
|
| 115 |
+
@RyanMarcus and @ssssam).
|
| 116 |
+
The maddec backend now always produces bytes objects, like the other
|
| 117 |
+
backends (thanks to @ssssam).
|
| 118 |
+
Resolve an audio data memory leak in the GStreamer backend (thanks again to
|
| 119 |
+
@ssssam).
|
| 120 |
+
You can now optionally specify which specific backends ``audio_open`` should
|
| 121 |
+
try (thanks once again to @ssssam).
|
| 122 |
+
On Windows, avoid opening a console window to run FFmpeg (thanks to @flokX).
|
| 123 |
+
|
| 124 |
+
2.1.6
|
| 125 |
+
Fix a "no such process" crash in the FFmpeg backend on Windows Subsystem for
|
| 126 |
+
Linux (thanks to @llamasoft).
|
| 127 |
+
Avoid suppressing SIGINT in the GStreamer backend on older versions of
|
| 128 |
+
PyGObject (thanks to @lazka).
|
| 129 |
+
|
| 130 |
+
2.1.5
|
| 131 |
+
Properly clean up the file handle when a backend fails to decode a file.
|
| 132 |
+
Fix parsing of "N.M" channel counts in the FFmpeg backend (thanks to @piem).
|
| 133 |
+
Avoid a crash in the raw backend when a file uses an unsupported number of
|
| 134 |
+
bits per sample (namely, 24-bit samples in Python < 3.4).
|
| 135 |
+
Add a ``__version__`` value to the package.
|
| 136 |
+
|
| 137 |
+
2.1.4
|
| 138 |
+
Fix a bug in the FFmpeg backend where, after closing a file, the program's
|
| 139 |
+
standard input stream would be "broken" and wouldn't receive any input.
|
| 140 |
+
|
| 141 |
+
2.1.3
|
| 142 |
+
Avoid some warnings in the GStreamer backend when using modern versions of
|
| 143 |
+
GLib. We now require at least GLib 2.32.
|
| 144 |
+
|
| 145 |
+
2.1.2
|
| 146 |
+
Fix a file descriptor leak when opening and closing many files using
|
| 147 |
+
GStreamer.
|
| 148 |
+
|
| 149 |
+
2.1.1
|
| 150 |
+
Just fix ReST formatting in the README.
|
| 151 |
+
|
| 152 |
+
2.1.0
|
| 153 |
+
The FFmpeg backend can now also use Libav's ``avconv`` command.
|
| 154 |
+
Fix a warning by requiring GStreamer >= 1.0.
|
| 155 |
+
Fix some Python 3 crashes with the new GStreamer backend (thanks to
|
| 156 |
+
@xix-xeaon).
|
| 157 |
+
|
| 158 |
+
2.0.0
|
| 159 |
+
The GStreamer backend now uses GStreamer 1.x via the new
|
| 160 |
+
gobject-introspection API (and is compatible with Python 3).
|
| 161 |
+
|
| 162 |
+
1.2.2
|
| 163 |
+
When running FFmpeg on Windows, disable its crash dialog. Thanks to
|
| 164 |
+
jcsaaddupuy.
|
| 165 |
+
|
| 166 |
+
1.2.1
|
| 167 |
+
Fix an unhandled exception when opening non-raw audio files (thanks to
|
| 168 |
+
aostanin).
|
| 169 |
+
Fix Python 3 compatibility for the raw-file backend.
|
| 170 |
+
|
| 171 |
+
1.2.0
|
| 172 |
+
Add support for FFmpeg on Windows (thanks to Jean-Christophe Saad-Dupuy).
|
| 173 |
+
|
| 174 |
+
1.1.0
|
| 175 |
+
Add support for Sun/NeXT `Au files`_ via the standard-library ``sunau``
|
| 176 |
+
module (thanks to Dan Ellis).
|
| 177 |
+
|
| 178 |
+
1.0.3
|
| 179 |
+
Use the rawread (standard-library) backend for .wav files.
|
| 180 |
+
|
| 181 |
+
1.0.2
|
| 182 |
+
Send SIGKILL, not SIGTERM, to ffmpeg processes to avoid occasional hangs.
|
| 183 |
+
|
| 184 |
+
1.0.1
|
| 185 |
+
When GStreamer fails to report a duration, raise an exception instead of
|
| 186 |
+
silently setting the duration field to None.
|
| 187 |
+
|
| 188 |
+
1.0.0
|
| 189 |
+
Catch GStreamer's exception when necessary components, such as
|
| 190 |
+
``uridecodebin``, are missing.
|
| 191 |
+
The GStreamer backend now accepts relative paths.
|
| 192 |
+
Fix a hang in GStreamer when the stream finishes before it begins (when
|
| 193 |
+
reading broken files).
|
| 194 |
+
Initial support for Python 3.
|
| 195 |
+
|
| 196 |
+
0.8
|
| 197 |
+
All decoding errors are now subclasses of ``DecodeError``.
|
| 198 |
+
|
| 199 |
+
0.7
|
| 200 |
+
Fix opening WAV and AIFF files via Unicode filenames.
|
| 201 |
+
|
| 202 |
+
0.6
|
| 203 |
+
Make FFmpeg timeout more robust.
|
| 204 |
+
Dump FFmpeg output on timeout.
|
| 205 |
+
Fix a nondeterministic hang in the Gstreamer backend.
|
| 206 |
+
Fix a file descriptor leak in the MAD backend.
|
| 207 |
+
|
| 208 |
+
0.5
|
| 209 |
+
Fix crash when FFmpeg fails to report a duration.
|
| 210 |
+
Fix a hang when FFmpeg fills up its stderr output buffer.
|
| 211 |
+
Add a timeout to ``ffmpeg`` tool execution (currently 10 seconds for each
|
| 212 |
+
4096-byte read); a ``ReadTimeoutError`` exception is raised if the tool times
|
| 213 |
+
out.
|
| 214 |
+
|
| 215 |
+
0.4
|
| 216 |
+
Fix channel count detection for FFmpeg backend.
|
| 217 |
+
|
| 218 |
+
0.3
|
| 219 |
+
Fix a problem with the Gstreamer backend where audio files could be left open
|
| 220 |
+
even after the ``GstAudioFile`` was "closed".
|
| 221 |
+
|
| 222 |
+
0.2
|
| 223 |
+
Fix a hang in the GStreamer backend that occurs occasionally on some
|
| 224 |
+
platforms.
|
| 225 |
+
|
| 226 |
+
0.1
|
| 227 |
+
Initial release.
|
| 228 |
+
|
| 229 |
+
.. _Au files: http://en.wikipedia.org/wiki/Au_file_format
|
| 230 |
+
|
| 231 |
+
Et Cetera
|
| 232 |
+
---------
|
| 233 |
+
|
| 234 |
+
``audioread`` is by Adrian Sampson. It is made available under `the MIT
|
| 235 |
+
license`_. An alternative to this module is `decoder.py`_.
|
| 236 |
+
|
| 237 |
+
.. _the MIT license: http://www.opensource.org/licenses/mit-license.php
|
| 238 |
+
.. _decoder.py: http://www.brailleweb.com/cgi-bin/python.py
|
| 239 |
+
|
lib/python3.10/site-packages/audioread-3.0.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
audioread-3.0.1.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
|
| 2 |
+
audioread-3.0.1.dist-info/LICENSE,sha256=4A__aKdaWCEyhC4zQmcwaZJVmG8d7DYiUvdCPbAnAZ0,1063
|
| 3 |
+
audioread-3.0.1.dist-info/METADATA,sha256=PFp7DIIloSaEd08t9jljil9LFORgoybrSH0iFDyRJns,8367
|
| 4 |
+
audioread-3.0.1.dist-info/RECORD,,
|
| 5 |
+
audioread-3.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
audioread-3.0.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
| 7 |
+
audioread/__init__.py,sha256=33Jptohj1m0QO5216orqi9-QMkXftvUyS4nxo--6Uj8,3592
|
| 8 |
+
audioread/base.py,sha256=AO1WKrUUQtrh3hCuvHJaAu_HWQnIVXLvkQyOCWFtWhU,725
|
| 9 |
+
audioread/exceptions.py,sha256=RTwYBpMlBy4bWPeSxidoz69YCXjTtpHrHFMuHWiJ6h0,962
|
| 10 |
+
audioread/ffdec.py,sha256=A8kcImseS99YywzNsuK8DsORho-6vyI79XeJ92CN-YQ,10541
|
| 11 |
+
audioread/gstdec.py,sha256=ksh08sEgN-bLVSoITod0QkeQhXDh7s1_3BMUwTGCu2s,14643
|
| 12 |
+
audioread/macca.py,sha256=HOFuu-SlWiCZetSZxGa-u2XndLy3zMCsfOOqAprFfHM,10899
|
| 13 |
+
audioread/maddec.py,sha256=9MbadGkBIYXVgzZq6cgYCV2FAZwVk8AWTYrsWyee98g,2518
|
| 14 |
+
audioread/rawread.py,sha256=eLA23jT41c1e0nyDmnXJrKwgFo4mNBrILhVzDPr4au8,4322
|
| 15 |
+
audioread/version.py,sha256=3nFz3RTyd0r19pXSvghZzf4rus1066YSjTwpEvbrEqo,738
|
lib/python3.10/site-packages/audioread-3.0.1.dist-info/REQUESTED
ADDED
|
File without changes
|
lib/python3.10/site-packages/audioread-3.0.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.9.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
uv
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
Apache License
|
| 3 |
+
Version 2.0, January 2004
|
| 4 |
+
http://www.apache.org/licenses/
|
| 5 |
+
|
| 6 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 7 |
+
|
| 8 |
+
1. Definitions.
|
| 9 |
+
|
| 10 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 11 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 12 |
+
|
| 13 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 14 |
+
the copyright owner that is granting the License.
|
| 15 |
+
|
| 16 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 17 |
+
other entities that control, are controlled by, or are under common
|
| 18 |
+
control with that entity. For the purposes of this definition,
|
| 19 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 20 |
+
direction or management of such entity, whether by contract or
|
| 21 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 22 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 23 |
+
|
| 24 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 25 |
+
exercising permissions granted by this License.
|
| 26 |
+
|
| 27 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 28 |
+
including but not limited to software source code, documentation
|
| 29 |
+
source, and configuration files.
|
| 30 |
+
|
| 31 |
+
"Object" form shall mean any form resulting from mechanical
|
| 32 |
+
transformation or translation of a Source form, including but
|
| 33 |
+
not limited to compiled object code, generated documentation,
|
| 34 |
+
and conversions to other media types.
|
| 35 |
+
|
| 36 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 37 |
+
Object form, made available under the License, as indicated by a
|
| 38 |
+
copyright notice that is included in or attached to the work
|
| 39 |
+
(an example is provided in the Appendix below).
|
| 40 |
+
|
| 41 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 42 |
+
form, that is based on (or derived from) the Work and for which the
|
| 43 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 44 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 45 |
+
of this License, Derivative Works shall not include works that remain
|
| 46 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 47 |
+
the Work and Derivative Works thereof.
|
| 48 |
+
|
| 49 |
+
"Contribution" shall mean any work of authorship, including
|
| 50 |
+
the original version of the Work and any modifications or additions
|
| 51 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 52 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 53 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 54 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 55 |
+
means any form of electronic, verbal, or written communication sent
|
| 56 |
+
to the Licensor or its representatives, including but not limited to
|
| 57 |
+
communication on electronic mailing lists, source code control systems,
|
| 58 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 59 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 60 |
+
excluding communication that is conspicuously marked or otherwise
|
| 61 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 62 |
+
|
| 63 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 64 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 65 |
+
subsequently incorporated within the Work.
|
| 66 |
+
|
| 67 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 68 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 69 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 70 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 71 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 72 |
+
Work and such Derivative Works in Source or Object form.
|
| 73 |
+
|
| 74 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 75 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 76 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 77 |
+
(except as stated in this section) patent license to make, have made,
|
| 78 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 79 |
+
where such license applies only to those patent claims licensable
|
| 80 |
+
by such Contributor that are necessarily infringed by their
|
| 81 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 82 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 83 |
+
institute patent litigation against any entity (including a
|
| 84 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 85 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 86 |
+
or contributory patent infringement, then any patent licenses
|
| 87 |
+
granted to You under this License for that Work shall terminate
|
| 88 |
+
as of the date such litigation is filed.
|
| 89 |
+
|
| 90 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 91 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 92 |
+
modifications, and in Source or Object form, provided that You
|
| 93 |
+
meet the following conditions:
|
| 94 |
+
|
| 95 |
+
(a) You must give any other recipients of the Work or
|
| 96 |
+
Derivative Works a copy of this License; and
|
| 97 |
+
|
| 98 |
+
(b) You must cause any modified files to carry prominent notices
|
| 99 |
+
stating that You changed the files; and
|
| 100 |
+
|
| 101 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 102 |
+
that You distribute, all copyright, patent, trademark, and
|
| 103 |
+
attribution notices from the Source form of the Work,
|
| 104 |
+
excluding those notices that do not pertain to any part of
|
| 105 |
+
the Derivative Works; and
|
| 106 |
+
|
| 107 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 108 |
+
distribution, then any Derivative Works that You distribute must
|
| 109 |
+
include a readable copy of the attribution notices contained
|
| 110 |
+
within such NOTICE file, excluding those notices that do not
|
| 111 |
+
pertain to any part of the Derivative Works, in at least one
|
| 112 |
+
of the following places: within a NOTICE text file distributed
|
| 113 |
+
as part of the Derivative Works; within the Source form or
|
| 114 |
+
documentation, if provided along with the Derivative Works; or,
|
| 115 |
+
within a display generated by the Derivative Works, if and
|
| 116 |
+
wherever such third-party notices normally appear. The contents
|
| 117 |
+
of the NOTICE file are for informational purposes only and
|
| 118 |
+
do not modify the License. You may add Your own attribution
|
| 119 |
+
notices within Derivative Works that You distribute, alongside
|
| 120 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 121 |
+
that such additional attribution notices cannot be construed
|
| 122 |
+
as modifying the License.
|
| 123 |
+
|
| 124 |
+
You may add Your own copyright statement to Your modifications and
|
| 125 |
+
may provide additional or different license terms and conditions
|
| 126 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 127 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 128 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 129 |
+
the conditions stated in this License.
|
| 130 |
+
|
| 131 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 132 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 133 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 134 |
+
this License, without any additional terms or conditions.
|
| 135 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 136 |
+
the terms of any separate license agreement you may have executed
|
| 137 |
+
with Licensor regarding such Contributions.
|
| 138 |
+
|
| 139 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 140 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 141 |
+
except as required for reasonable and customary use in describing the
|
| 142 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 143 |
+
|
| 144 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 145 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 146 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 147 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 148 |
+
implied, including, without limitation, any warranties or conditions
|
| 149 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 150 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 151 |
+
appropriateness of using or redistributing the Work and assume any
|
| 152 |
+
risks associated with Your exercise of permissions under this License.
|
| 153 |
+
|
| 154 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 155 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 156 |
+
unless required by applicable law (such as deliberate and grossly
|
| 157 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 158 |
+
liable to You for damages, including any direct, indirect, special,
|
| 159 |
+
incidental, or consequential damages of any character arising as a
|
| 160 |
+
result of this License or out of the use or inability to use the
|
| 161 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 162 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 163 |
+
other commercial damages or losses), even if such Contributor
|
| 164 |
+
has been advised of the possibility of such damages.
|
| 165 |
+
|
| 166 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 167 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 168 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 169 |
+
or other liability obligations and/or rights consistent with this
|
| 170 |
+
License. However, in accepting such obligations, You may act only
|
| 171 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 172 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 173 |
+
defend, and hold each Contributor harmless for any liability
|
| 174 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 175 |
+
of your accepting any such warranty or additional liability.
|
| 176 |
+
|
| 177 |
+
END OF TERMS AND CONDITIONS
|
| 178 |
+
|
| 179 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 180 |
+
|
| 181 |
+
To apply the Apache License to your work, attach the following
|
| 182 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 183 |
+
replaced with your own identifying information. (Don't include
|
| 184 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 185 |
+
comment syntax for the file format. We also recommend that a
|
| 186 |
+
file or class name and description of purpose be included on the
|
| 187 |
+
same "printed page" as the copyright notice for easier
|
| 188 |
+
identification within third-party archives.
|
| 189 |
+
|
| 190 |
+
Copyright [yyyy] [name of copyright owner]
|
| 191 |
+
|
| 192 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 193 |
+
you may not use this file except in compliance with the License.
|
| 194 |
+
You may obtain a copy of the License at
|
| 195 |
+
|
| 196 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 197 |
+
|
| 198 |
+
Unless required by applicable law or agreed to in writing, software
|
| 199 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 200 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 201 |
+
See the License for the specific language governing permissions and
|
| 202 |
+
limitations under the License.
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/METADATA
ADDED
|
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: datasets
|
| 3 |
+
Version: 2.14.4
|
| 4 |
+
Summary: HuggingFace community-driven open-source library of datasets
|
| 5 |
+
Home-page: https://github.com/huggingface/datasets
|
| 6 |
+
Author: HuggingFace Inc.
|
| 7 |
+
Author-email: thomas@huggingface.co
|
| 8 |
+
License: Apache 2.0
|
| 9 |
+
Download-URL: https://github.com/huggingface/datasets/tags
|
| 10 |
+
Keywords: datasets machine learning datasets metrics
|
| 11 |
+
Platform: UNKNOWN
|
| 12 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 13 |
+
Classifier: Intended Audience :: Developers
|
| 14 |
+
Classifier: Intended Audience :: Education
|
| 15 |
+
Classifier: Intended Audience :: Science/Research
|
| 16 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 17 |
+
Classifier: Operating System :: OS Independent
|
| 18 |
+
Classifier: Programming Language :: Python :: 3
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 22 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
| 23 |
+
Requires-Python: >=3.8.0
|
| 24 |
+
Description-Content-Type: text/markdown
|
| 25 |
+
License-File: LICENSE
|
| 26 |
+
Requires-Dist: numpy (>=1.17)
|
| 27 |
+
Requires-Dist: pyarrow (>=8.0.0)
|
| 28 |
+
Requires-Dist: dill (<0.3.8,>=0.3.0)
|
| 29 |
+
Requires-Dist: pandas
|
| 30 |
+
Requires-Dist: requests (>=2.19.0)
|
| 31 |
+
Requires-Dist: tqdm (>=4.62.1)
|
| 32 |
+
Requires-Dist: xxhash
|
| 33 |
+
Requires-Dist: multiprocess
|
| 34 |
+
Requires-Dist: fsspec[http] (>=2021.11.1)
|
| 35 |
+
Requires-Dist: aiohttp
|
| 36 |
+
Requires-Dist: huggingface-hub (<1.0.0,>=0.14.0)
|
| 37 |
+
Requires-Dist: packaging
|
| 38 |
+
Requires-Dist: pyyaml (>=5.1)
|
| 39 |
+
Provides-Extra: apache-beam
|
| 40 |
+
Requires-Dist: apache-beam (<2.44.0,>=2.26.0) ; extra == 'apache-beam'
|
| 41 |
+
Provides-Extra: audio
|
| 42 |
+
Requires-Dist: soundfile (>=0.12.1) ; extra == 'audio'
|
| 43 |
+
Requires-Dist: librosa ; extra == 'audio'
|
| 44 |
+
Provides-Extra: benchmarks
|
| 45 |
+
Requires-Dist: tensorflow (==2.12.0) ; extra == 'benchmarks'
|
| 46 |
+
Requires-Dist: torch (==2.0.1) ; extra == 'benchmarks'
|
| 47 |
+
Requires-Dist: transformers (==4.30.1) ; extra == 'benchmarks'
|
| 48 |
+
Provides-Extra: dev
|
| 49 |
+
Requires-Dist: absl-py ; extra == 'dev'
|
| 50 |
+
Requires-Dist: joblib (<1.3.0) ; extra == 'dev'
|
| 51 |
+
Requires-Dist: joblibspark ; extra == 'dev'
|
| 52 |
+
Requires-Dist: pytest ; extra == 'dev'
|
| 53 |
+
Requires-Dist: pytest-datadir ; extra == 'dev'
|
| 54 |
+
Requires-Dist: pytest-xdist ; extra == 'dev'
|
| 55 |
+
Requires-Dist: elasticsearch (<8.0.0) ; extra == 'dev'
|
| 56 |
+
Requires-Dist: faiss-cpu (>=1.6.4) ; extra == 'dev'
|
| 57 |
+
Requires-Dist: lz4 ; extra == 'dev'
|
| 58 |
+
Requires-Dist: pyspark (>=3.4) ; extra == 'dev'
|
| 59 |
+
Requires-Dist: py7zr ; extra == 'dev'
|
| 60 |
+
Requires-Dist: rarfile (>=4.0) ; extra == 'dev'
|
| 61 |
+
Requires-Dist: sqlalchemy (<2.0.0) ; extra == 'dev'
|
| 62 |
+
Requires-Dist: s3fs (>=2021.11.1) ; extra == 'dev'
|
| 63 |
+
Requires-Dist: tiktoken ; extra == 'dev'
|
| 64 |
+
Requires-Dist: torch ; extra == 'dev'
|
| 65 |
+
Requires-Dist: soundfile (>=0.12.1) ; extra == 'dev'
|
| 66 |
+
Requires-Dist: transformers ; extra == 'dev'
|
| 67 |
+
Requires-Dist: zstandard ; extra == 'dev'
|
| 68 |
+
Requires-Dist: Pillow (>=6.2.1) ; extra == 'dev'
|
| 69 |
+
Requires-Dist: librosa ; extra == 'dev'
|
| 70 |
+
Requires-Dist: black (~=23.1) ; extra == 'dev'
|
| 71 |
+
Requires-Dist: ruff (>=0.0.241) ; extra == 'dev'
|
| 72 |
+
Requires-Dist: pyyaml (>=5.3.1) ; extra == 'dev'
|
| 73 |
+
Requires-Dist: s3fs ; extra == 'dev'
|
| 74 |
+
Requires-Dist: apache-beam (<2.44.0,>=2.26.0) ; (python_version < "3.10") and extra == 'dev'
|
| 75 |
+
Requires-Dist: tensorflow (!=2.6.0,!=2.6.1,>=2.3) ; (sys_platform != "darwin" or platform_machine != "arm64") and extra == 'dev'
|
| 76 |
+
Requires-Dist: tensorflow (!=2.6.0,!=2.6.1,>=2.2.0) ; (sys_platform != "darwin" or platform_machine != "arm64") and extra == 'dev'
|
| 77 |
+
Requires-Dist: tensorflow-macos ; (sys_platform == "darwin" and platform_machine == "arm64") and extra == 'dev'
|
| 78 |
+
Provides-Extra: docs
|
| 79 |
+
Requires-Dist: s3fs ; extra == 'docs'
|
| 80 |
+
Requires-Dist: transformers ; extra == 'docs'
|
| 81 |
+
Requires-Dist: torch ; extra == 'docs'
|
| 82 |
+
Requires-Dist: tensorflow (!=2.6.0,!=2.6.1,>=2.2.0) ; (sys_platform != "darwin" or platform_machine != "arm64") and extra == 'docs'
|
| 83 |
+
Requires-Dist: tensorflow-macos ; (sys_platform == "darwin" and platform_machine == "arm64") and extra == 'docs'
|
| 84 |
+
Provides-Extra: jax
|
| 85 |
+
Requires-Dist: jax (!=0.3.2,<=0.3.25,>=0.2.8) ; extra == 'jax'
|
| 86 |
+
Requires-Dist: jaxlib (<=0.3.25,>=0.1.65) ; extra == 'jax'
|
| 87 |
+
Provides-Extra: metrics-tests
|
| 88 |
+
Requires-Dist: accelerate ; extra == 'metrics-tests'
|
| 89 |
+
Requires-Dist: bert-score (>=0.3.6) ; extra == 'metrics-tests'
|
| 90 |
+
Requires-Dist: jiwer ; extra == 'metrics-tests'
|
| 91 |
+
Requires-Dist: langdetect ; extra == 'metrics-tests'
|
| 92 |
+
Requires-Dist: mauve-text ; extra == 'metrics-tests'
|
| 93 |
+
Requires-Dist: nltk ; extra == 'metrics-tests'
|
| 94 |
+
Requires-Dist: rouge-score ; extra == 'metrics-tests'
|
| 95 |
+
Requires-Dist: sacrebleu ; extra == 'metrics-tests'
|
| 96 |
+
Requires-Dist: sacremoses ; extra == 'metrics-tests'
|
| 97 |
+
Requires-Dist: scikit-learn ; extra == 'metrics-tests'
|
| 98 |
+
Requires-Dist: scipy ; extra == 'metrics-tests'
|
| 99 |
+
Requires-Dist: sentencepiece ; extra == 'metrics-tests'
|
| 100 |
+
Requires-Dist: seqeval ; extra == 'metrics-tests'
|
| 101 |
+
Requires-Dist: spacy (>=3.0.0) ; extra == 'metrics-tests'
|
| 102 |
+
Requires-Dist: tldextract ; extra == 'metrics-tests'
|
| 103 |
+
Requires-Dist: toml (>=0.10.1) ; extra == 'metrics-tests'
|
| 104 |
+
Requires-Dist: typer (<0.5.0) ; extra == 'metrics-tests'
|
| 105 |
+
Requires-Dist: requests-file (>=1.5.1) ; extra == 'metrics-tests'
|
| 106 |
+
Requires-Dist: tldextract (>=3.1.0) ; extra == 'metrics-tests'
|
| 107 |
+
Requires-Dist: texttable (>=1.6.3) ; extra == 'metrics-tests'
|
| 108 |
+
Requires-Dist: Werkzeug (>=1.0.1) ; extra == 'metrics-tests'
|
| 109 |
+
Requires-Dist: six (~=1.15.0) ; extra == 'metrics-tests'
|
| 110 |
+
Provides-Extra: quality
|
| 111 |
+
Requires-Dist: black (~=23.1) ; extra == 'quality'
|
| 112 |
+
Requires-Dist: ruff (>=0.0.241) ; extra == 'quality'
|
| 113 |
+
Requires-Dist: pyyaml (>=5.3.1) ; extra == 'quality'
|
| 114 |
+
Provides-Extra: s3
|
| 115 |
+
Requires-Dist: s3fs ; extra == 's3'
|
| 116 |
+
Provides-Extra: streaming
|
| 117 |
+
Provides-Extra: tensorflow
|
| 118 |
+
Requires-Dist: tensorflow (!=2.6.0,!=2.6.1,>=2.2.0) ; (sys_platform != "darwin" or platform_machine != "arm64") and extra == 'tensorflow'
|
| 119 |
+
Requires-Dist: tensorflow-macos ; (sys_platform == "darwin" and platform_machine == "arm64") and extra == 'tensorflow'
|
| 120 |
+
Provides-Extra: tensorflow_gpu
|
| 121 |
+
Requires-Dist: tensorflow-gpu (!=2.6.0,!=2.6.1,>=2.2.0) ; extra == 'tensorflow_gpu'
|
| 122 |
+
Provides-Extra: tests
|
| 123 |
+
Requires-Dist: absl-py ; extra == 'tests'
|
| 124 |
+
Requires-Dist: joblib (<1.3.0) ; extra == 'tests'
|
| 125 |
+
Requires-Dist: joblibspark ; extra == 'tests'
|
| 126 |
+
Requires-Dist: pytest ; extra == 'tests'
|
| 127 |
+
Requires-Dist: pytest-datadir ; extra == 'tests'
|
| 128 |
+
Requires-Dist: pytest-xdist ; extra == 'tests'
|
| 129 |
+
Requires-Dist: elasticsearch (<8.0.0) ; extra == 'tests'
|
| 130 |
+
Requires-Dist: faiss-cpu (>=1.6.4) ; extra == 'tests'
|
| 131 |
+
Requires-Dist: lz4 ; extra == 'tests'
|
| 132 |
+
Requires-Dist: pyspark (>=3.4) ; extra == 'tests'
|
| 133 |
+
Requires-Dist: py7zr ; extra == 'tests'
|
| 134 |
+
Requires-Dist: rarfile (>=4.0) ; extra == 'tests'
|
| 135 |
+
Requires-Dist: sqlalchemy (<2.0.0) ; extra == 'tests'
|
| 136 |
+
Requires-Dist: s3fs (>=2021.11.1) ; extra == 'tests'
|
| 137 |
+
Requires-Dist: tiktoken ; extra == 'tests'
|
| 138 |
+
Requires-Dist: torch ; extra == 'tests'
|
| 139 |
+
Requires-Dist: soundfile (>=0.12.1) ; extra == 'tests'
|
| 140 |
+
Requires-Dist: transformers ; extra == 'tests'
|
| 141 |
+
Requires-Dist: zstandard ; extra == 'tests'
|
| 142 |
+
Requires-Dist: Pillow (>=6.2.1) ; extra == 'tests'
|
| 143 |
+
Requires-Dist: librosa ; extra == 'tests'
|
| 144 |
+
Requires-Dist: apache-beam (<2.44.0,>=2.26.0) ; (python_version < "3.10") and extra == 'tests'
|
| 145 |
+
Requires-Dist: tensorflow (!=2.6.0,!=2.6.1,>=2.3) ; (sys_platform != "darwin" or platform_machine != "arm64") and extra == 'tests'
|
| 146 |
+
Requires-Dist: tensorflow-macos ; (sys_platform == "darwin" and platform_machine == "arm64") and extra == 'tests'
|
| 147 |
+
Provides-Extra: torch
|
| 148 |
+
Requires-Dist: torch ; extra == 'torch'
|
| 149 |
+
Provides-Extra: vision
|
| 150 |
+
Requires-Dist: Pillow (>=6.2.1) ; extra == 'vision'
|
| 151 |
+
|
| 152 |
+
<p align="center">
|
| 153 |
+
<picture>
|
| 154 |
+
<source media="(prefers-color-scheme: dark)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-dark.svg">
|
| 155 |
+
<source media="(prefers-color-scheme: light)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-light.svg">
|
| 156 |
+
<img alt="Hugging Face Datasets Library" src="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-light.svg" width="352" height="59" style="max-width: 100%;">
|
| 157 |
+
</picture>
|
| 158 |
+
<br/>
|
| 159 |
+
<br/>
|
| 160 |
+
</p>
|
| 161 |
+
|
| 162 |
+
<p align="center">
|
| 163 |
+
<a href="https://github.com/huggingface/datasets/actions/workflows/ci.yml?query=branch%3Amain">
|
| 164 |
+
<img alt="Build" src="https://github.com/huggingface/datasets/actions/workflows/ci.yml/badge.svg?branch=main">
|
| 165 |
+
</a>
|
| 166 |
+
<a href="https://github.com/huggingface/datasets/blob/main/LICENSE">
|
| 167 |
+
<img alt="GitHub" src="https://img.shields.io/github/license/huggingface/datasets.svg?color=blue">
|
| 168 |
+
</a>
|
| 169 |
+
<a href="https://huggingface.co/docs/datasets/index.html">
|
| 170 |
+
<img alt="Documentation" src="https://img.shields.io/website/http/huggingface.co/docs/datasets/index.html.svg?down_color=red&down_message=offline&up_message=online">
|
| 171 |
+
</a>
|
| 172 |
+
<a href="https://github.com/huggingface/datasets/releases">
|
| 173 |
+
<img alt="GitHub release" src="https://img.shields.io/github/release/huggingface/datasets.svg">
|
| 174 |
+
</a>
|
| 175 |
+
<a href="https://huggingface.co/datasets/">
|
| 176 |
+
<img alt="Number of datasets" src="https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/datasets&color=brightgreen">
|
| 177 |
+
</a>
|
| 178 |
+
<a href="CODE_OF_CONDUCT.md">
|
| 179 |
+
<img alt="Contributor Covenant" src="https://img.shields.io/badge/Contributor%20Covenant-2.0-4baaaa.svg">
|
| 180 |
+
</a>
|
| 181 |
+
<a href="https://zenodo.org/badge/latestdoi/250213286"><img src="https://zenodo.org/badge/250213286.svg" alt="DOI"></a>
|
| 182 |
+
</p>
|
| 183 |
+
|
| 184 |
+
🤗 Datasets is a lightweight library providing **two** main features:
|
| 185 |
+
|
| 186 |
+
- **one-line dataloaders for many public datasets**: one-liners to download and pre-process any of the  major public datasets (image datasets, audio datasets, text datasets in 467 languages and dialects, etc.) provided on the [HuggingFace Datasets Hub](https://huggingface.co/datasets). With a simple command like `squad_dataset = load_dataset("squad")`, get any of these datasets ready to use in a dataloader for training/evaluating a ML model (Numpy/Pandas/PyTorch/TensorFlow/JAX),
|
| 187 |
+
- **efficient data pre-processing**: simple, fast and reproducible data pre-processing for the public datasets as well as your own local datasets in CSV, JSON, text, PNG, JPEG, WAV, MP3, Parquet, etc. With simple commands like `processed_dataset = dataset.map(process_example)`, efficiently prepare the dataset for inspection and ML model evaluation and training.
|
| 188 |
+
|
| 189 |
+
[🎓 **Documentation**](https://huggingface.co/docs/datasets/) [🔎 **Find a dataset in the Hub**](https://huggingface.co/datasets) [🌟 **Share a dataset on the Hub**](https://huggingface.co/docs/datasets/share)
|
| 190 |
+
|
| 191 |
+
<h3 align="center">
|
| 192 |
+
<a href="https://hf.co/course"><img src="https://raw.githubusercontent.com/huggingface/datasets/main/docs/source/imgs/course_banner.png"></a>
|
| 193 |
+
</h3>
|
| 194 |
+
|
| 195 |
+
🤗 Datasets is designed to let the community easily add and share new datasets.
|
| 196 |
+
|
| 197 |
+
🤗 Datasets has many additional interesting features:
|
| 198 |
+
|
| 199 |
+
- Thrive on large datasets: 🤗 Datasets naturally frees the user from RAM memory limitation, all datasets are memory-mapped using an efficient zero-serialization cost backend (Apache Arrow).
|
| 200 |
+
- Smart caching: never wait for your data to process several times.
|
| 201 |
+
- Lightweight and fast with a transparent and pythonic API (multi-processing/caching/memory-mapping).
|
| 202 |
+
- Built-in interoperability with NumPy, pandas, PyTorch, Tensorflow 2 and JAX.
|
| 203 |
+
- Native support for audio and image data
|
| 204 |
+
- Enable streaming mode to save disk space and start iterating over the dataset immediately.
|
| 205 |
+
|
| 206 |
+
🤗 Datasets originated from a fork of the awesome [TensorFlow Datasets](https://github.com/tensorflow/datasets) and the HuggingFace team want to deeply thank the TensorFlow Datasets team for building this amazing library. More details on the differences between 🤗 Datasets and `tfds` can be found in the section [Main differences between 🤗 Datasets and `tfds`](#main-differences-between--datasets-and-tfds).
|
| 207 |
+
|
| 208 |
+
# Installation
|
| 209 |
+
|
| 210 |
+
## With pip
|
| 211 |
+
|
| 212 |
+
🤗 Datasets can be installed from PyPi and has to be installed in a virtual environment (venv or conda for instance)
|
| 213 |
+
|
| 214 |
+
```bash
|
| 215 |
+
pip install datasets
|
| 216 |
+
```
|
| 217 |
+
|
| 218 |
+
## With conda
|
| 219 |
+
|
| 220 |
+
🤗 Datasets can be installed using conda as follows:
|
| 221 |
+
|
| 222 |
+
```bash
|
| 223 |
+
conda install -c huggingface -c conda-forge datasets
|
| 224 |
+
```
|
| 225 |
+
|
| 226 |
+
Follow the installation pages of TensorFlow and PyTorch to see how to install them with conda.
|
| 227 |
+
|
| 228 |
+
For more details on installation, check the installation page in the documentation: https://huggingface.co/docs/datasets/installation
|
| 229 |
+
|
| 230 |
+
## Installation to use with PyTorch/TensorFlow/pandas
|
| 231 |
+
|
| 232 |
+
If you plan to use 🤗 Datasets with PyTorch (1.0+), TensorFlow (2.2+) or pandas, you should also install PyTorch, TensorFlow or pandas.
|
| 233 |
+
|
| 234 |
+
For more details on using the library with NumPy, pandas, PyTorch or TensorFlow, check the quick start page in the documentation: https://huggingface.co/docs/datasets/quickstart
|
| 235 |
+
|
| 236 |
+
# Usage
|
| 237 |
+
|
| 238 |
+
🤗 Datasets is made to be very simple to use - the API is centered around a single function, `datasets.load_dataset(dataset_name, **kwargs)`, that instantiates a dataset.
|
| 239 |
+
|
| 240 |
+
This library can be used for text/image/audio/etc. datasets. Here is an example to load a text dataset:
|
| 241 |
+
|
| 242 |
+
Here is a quick example:
|
| 243 |
+
|
| 244 |
+
```python
|
| 245 |
+
from datasets import load_dataset
|
| 246 |
+
|
| 247 |
+
# Print all the available datasets
|
| 248 |
+
from huggingface_hub import list_datasets
|
| 249 |
+
print([dataset.id for dataset in list_datasets()])
|
| 250 |
+
|
| 251 |
+
# Load a dataset and print the first example in the training set
|
| 252 |
+
squad_dataset = load_dataset('squad')
|
| 253 |
+
print(squad_dataset['train'][0])
|
| 254 |
+
|
| 255 |
+
# Process the dataset - add a column with the length of the context texts
|
| 256 |
+
dataset_with_length = squad_dataset.map(lambda x: {"length": len(x["context"])})
|
| 257 |
+
|
| 258 |
+
# Process the dataset - tokenize the context texts (using a tokenizer from the 🤗 Transformers library)
|
| 259 |
+
from transformers import AutoTokenizer
|
| 260 |
+
tokenizer = AutoTokenizer.from_pretrained('bert-base-cased')
|
| 261 |
+
|
| 262 |
+
tokenized_dataset = squad_dataset.map(lambda x: tokenizer(x['context']), batched=True)
|
| 263 |
+
```
|
| 264 |
+
|
| 265 |
+
If your dataset is bigger than your disk or if you don't want to wait to download the data, you can use streaming:
|
| 266 |
+
|
| 267 |
+
```python
|
| 268 |
+
# If you want to use the dataset immediately and efficiently stream the data as you iterate over the dataset
|
| 269 |
+
image_dataset = load_dataset('cifar100', streaming=True)
|
| 270 |
+
for example in image_dataset["train"]:
|
| 271 |
+
break
|
| 272 |
+
```
|
| 273 |
+
|
| 274 |
+
For more details on using the library, check the quick start page in the documentation: https://huggingface.co/docs/datasets/quickstart and the specific pages on:
|
| 275 |
+
|
| 276 |
+
- Loading a dataset: https://huggingface.co/docs/datasets/loading
|
| 277 |
+
- What's in a Dataset: https://huggingface.co/docs/datasets/access
|
| 278 |
+
- Processing data with 🤗 Datasets: https://huggingface.co/docs/datasets/process
|
| 279 |
+
- Processing audio data: https://huggingface.co/docs/datasets/audio_process
|
| 280 |
+
- Processing image data: https://huggingface.co/docs/datasets/image_process
|
| 281 |
+
- Processing text data: https://huggingface.co/docs/datasets/nlp_process
|
| 282 |
+
- Streaming a dataset: https://huggingface.co/docs/datasets/stream
|
| 283 |
+
- Writing your own dataset loading script: https://huggingface.co/docs/datasets/dataset_script
|
| 284 |
+
- etc.
|
| 285 |
+
|
| 286 |
+
# Add a new dataset to the Hub
|
| 287 |
+
|
| 288 |
+
We have a very detailed step-by-step guide to add a new dataset to the  datasets already provided on the [HuggingFace Datasets Hub](https://huggingface.co/datasets).
|
| 289 |
+
|
| 290 |
+
You can find:
|
| 291 |
+
- [how to upload a dataset to the Hub using your web browser or Python](https://huggingface.co/docs/datasets/upload_dataset) and also
|
| 292 |
+
- [how to upload it using Git](https://huggingface.co/docs/datasets/share).
|
| 293 |
+
|
| 294 |
+
# Main differences between 🤗 Datasets and `tfds`
|
| 295 |
+
|
| 296 |
+
If you are familiar with the great TensorFlow Datasets, here are the main differences between 🤗 Datasets and `tfds`:
|
| 297 |
+
|
| 298 |
+
- the scripts in 🤗 Datasets are not provided within the library but are queried, downloaded/cached and dynamically loaded upon request
|
| 299 |
+
- the backend serialization of 🤗 Datasets is based on [Apache Arrow](https://arrow.apache.org/) instead of TF Records and leverage python dataclasses for info and features with some diverging features (we mostly don't do encoding and store the raw data as much as possible in the backend serialization cache).
|
| 300 |
+
- the user-facing dataset object of 🤗 Datasets is not a `tf.data.Dataset` but a built-in framework-agnostic dataset class with methods inspired by what we like in `tf.data` (like a `map()` method). It basically wraps a memory-mapped Arrow table cache.
|
| 301 |
+
|
| 302 |
+
# Disclaimers
|
| 303 |
+
|
| 304 |
+
🤗 Datasets may run Python code defined by the dataset authors to parse certain data formats or structures. For security reasons, we ask users to:
|
| 305 |
+
- check the dataset scripts they're going to run beforehand and
|
| 306 |
+
- pin the `revision` of the repositories they use.
|
| 307 |
+
|
| 308 |
+
If you're a dataset owner and wish to update any part of it (description, citation, license, etc.), or do not want your dataset to be included in the Hugging Face Hub, please get in touch by opening a discussion or a pull request in the Community tab of the dataset page. Thanks for your contribution to the ML community!
|
| 309 |
+
|
| 310 |
+
## BibTeX
|
| 311 |
+
|
| 312 |
+
If you want to cite our 🤗 Datasets library, you can use our [paper](https://arxiv.org/abs/2109.02846):
|
| 313 |
+
|
| 314 |
+
```bibtex
|
| 315 |
+
@inproceedings{lhoest-etal-2021-datasets,
|
| 316 |
+
title = "Datasets: A Community Library for Natural Language Processing",
|
| 317 |
+
author = "Lhoest, Quentin and
|
| 318 |
+
Villanova del Moral, Albert and
|
| 319 |
+
Jernite, Yacine and
|
| 320 |
+
Thakur, Abhishek and
|
| 321 |
+
von Platen, Patrick and
|
| 322 |
+
Patil, Suraj and
|
| 323 |
+
Chaumond, Julien and
|
| 324 |
+
Drame, Mariama and
|
| 325 |
+
Plu, Julien and
|
| 326 |
+
Tunstall, Lewis and
|
| 327 |
+
Davison, Joe and
|
| 328 |
+
{\v{S}}a{\v{s}}ko, Mario and
|
| 329 |
+
Chhablani, Gunjan and
|
| 330 |
+
Malik, Bhavitvya and
|
| 331 |
+
Brandeis, Simon and
|
| 332 |
+
Le Scao, Teven and
|
| 333 |
+
Sanh, Victor and
|
| 334 |
+
Xu, Canwen and
|
| 335 |
+
Patry, Nicolas and
|
| 336 |
+
McMillan-Major, Angelina and
|
| 337 |
+
Schmid, Philipp and
|
| 338 |
+
Gugger, Sylvain and
|
| 339 |
+
Delangue, Cl{\'e}ment and
|
| 340 |
+
Matussi{\`e}re, Th{\'e}o and
|
| 341 |
+
Debut, Lysandre and
|
| 342 |
+
Bekman, Stas and
|
| 343 |
+
Cistac, Pierric and
|
| 344 |
+
Goehringer, Thibault and
|
| 345 |
+
Mustar, Victor and
|
| 346 |
+
Lagunas, Fran{\c{c}}ois and
|
| 347 |
+
Rush, Alexander and
|
| 348 |
+
Wolf, Thomas",
|
| 349 |
+
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: System Demonstrations",
|
| 350 |
+
month = nov,
|
| 351 |
+
year = "2021",
|
| 352 |
+
address = "Online and Punta Cana, Dominican Republic",
|
| 353 |
+
publisher = "Association for Computational Linguistics",
|
| 354 |
+
url = "https://aclanthology.org/2021.emnlp-demo.21",
|
| 355 |
+
pages = "175--184",
|
| 356 |
+
abstract = "The scale, variety, and quantity of publicly-available NLP datasets has grown rapidly as researchers propose new tasks, larger models, and novel benchmarks. Datasets is a community library for contemporary NLP designed to support this ecosystem. Datasets aims to standardize end-user interfaces, versioning, and documentation, while providing a lightweight front-end that behaves similarly for small datasets as for internet-scale corpora. The design of the library incorporates a distributed, community-driven approach to adding datasets and documenting usage. After a year of development, the library now includes more than 650 unique datasets, has more than 250 contributors, and has helped support a variety of novel cross-dataset research projects and shared tasks. The library is available at https://github.com/huggingface/datasets.",
|
| 357 |
+
eprint={2109.02846},
|
| 358 |
+
archivePrefix={arXiv},
|
| 359 |
+
primaryClass={cs.CL},
|
| 360 |
+
}
|
| 361 |
+
```
|
| 362 |
+
|
| 363 |
+
If you need to cite a specific version of our 🤗 Datasets library for reproducibility, you can use the corresponding version Zenodo DOI from this [list](https://zenodo.org/search?q=conceptrecid:%224817768%22&sort=-version&all_versions=True).
|
| 364 |
+
|
| 365 |
+
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/RECORD
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/datasets-cli,sha256=7oI9mamYfuMzQ3NO2AzGid32CALPd9iay3ymjzDjrAQ,341
|
| 2 |
+
datasets-2.14.4.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
|
| 3 |
+
datasets-2.14.4.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
| 4 |
+
datasets-2.14.4.dist-info/METADATA,sha256=k5iMnqWKNpq2gZ3eII7zOvUvAAj3BQZtdTXBiM3QfOY,19796
|
| 5 |
+
datasets-2.14.4.dist-info/RECORD,,
|
| 6 |
+
datasets-2.14.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 7 |
+
datasets-2.14.4.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
|
| 8 |
+
datasets-2.14.4.dist-info/entry_points.txt,sha256=vhdg1JXUleCZtwvozP5q5iHqRpSETfyhSDJ39zW3KUA,70
|
| 9 |
+
datasets-2.14.4.dist-info/top_level.txt,sha256=9A857YvCQm_Dg3UjeKkWPz9sDBos0t3zN2pf5krTemQ,9
|
| 10 |
+
datasets/__init__.py,sha256=nfpQY_LXXAkeuTywDWXA6rnFNnb_8bYGsnnqSwwzcKA,2550
|
| 11 |
+
datasets/arrow_dataset.py,sha256=d83jcz_a5I3gHQ-czWb_GwjS3BZDieYPwXvBfkaJnsQ,288717
|
| 12 |
+
datasets/arrow_reader.py,sha256=4zgsJiqRoNU18eAjRGM4CT5cY06WYc6Ikbfa9dVfjTA,26920
|
| 13 |
+
datasets/arrow_writer.py,sha256=Ma0AmUbJdOcnc9Dcu-NW5xKLD1MuwsuAsKbmyqHzer4,33414
|
| 14 |
+
datasets/builder.bak.py,sha256=YZYHkGfXIFeM878CLLM0YvyrY6gLw3_z_MEh-QnCybE,111808
|
| 15 |
+
datasets/builder.py,sha256=O_OG6bd87ld6mEZRDZOKXL0ZlWxPFKF_VpiP7ehLK9k,106717
|
| 16 |
+
datasets/combine.py,sha256=OvMg-5A_cBraHyEXbNTTrWjd9sbUiyA7PG6aBJpbg5Q,10924
|
| 17 |
+
datasets/commands/__init__.py,sha256=rujbQtxJbwHhF9WQqp2DD9tfVTghDMJdl0v6H551Pcs,312
|
| 18 |
+
datasets/commands/convert.py,sha256=-VOqHh0ySkIOfEYmR7HVs7PzouVrkVShqyUtNGcNCYU,7914
|
| 19 |
+
datasets/commands/datasets_cli.py,sha256=mMYGiIYoE9kcZzcWvPDPuT2fEKlhL2hHN9RWgivQu2I,1381
|
| 20 |
+
datasets/commands/dummy_data.py,sha256=rBVQAN1wd9fvldw79PVoL3vNZdqosjO_PPO_SFEYUqw,23106
|
| 21 |
+
datasets/commands/env.py,sha256=U5IPHmBXb05dgZsqsbRLm9Lw8Ni2F571QdgIY13xpro,1171
|
| 22 |
+
datasets/commands/run_beam.py,sha256=Cl6zWXA00C9PjgQyMv_E7SSMN2539no26OLFeznJxYM,6812
|
| 23 |
+
datasets/commands/test.py,sha256=U_Rqs78CLVDrwOamol_RZJ7xCe6TcCvHcMlpeW0Mwgk,8506
|
| 24 |
+
datasets/config.py,sha256=_R1RzaTnDJhWYnch49PaPi_Wv1hiJSN9gpsorYhJvOc,8542
|
| 25 |
+
datasets/data_files.py,sha256=8mq6SYcc2mr7lyjQltypcd_cTVgnViGkomCALysr2aM,27518
|
| 26 |
+
datasets/dataset_dict.py,sha256=rVm7VjTWbzsNibEIk7Xm8wXfcB61TWfTPCFpLbuCnVg,100017
|
| 27 |
+
datasets/distributed.py,sha256=jZ31II0mmlPMhZbEtbAsX6jlK0U69qdpV3uS5U5JFYw,1560
|
| 28 |
+
datasets/download/__init__.py,sha256=lbFOtITDaR7PHrhzJ8VfRnpaOT6NYozSxUcLv_GVfTg,281
|
| 29 |
+
datasets/download/download_config.py,sha256=OBsZBXFKphFysU0eocStWryF3QGFY-9A96RCtxxIi0I,4770
|
| 30 |
+
datasets/download/download_manager.py,sha256=Hk6rnzoenY8K4ZW64sBqGTsDRQSupM_7brWEOa1AdLo,22040
|
| 31 |
+
datasets/download/mock_download_manager.py,sha256=nrisuioyg1ZyV0424fYatCEdOeFD1RR_3sNeSmQKL-o,10445
|
| 32 |
+
datasets/download/streaming_download_manager.py,sha256=HRCBNRimvMj2S364NJHK-dJHQmgYwsxZ-lMnJzLanYQ,43900
|
| 33 |
+
datasets/features/__init__.py,sha256=05nCoWgkpLzZ4xhsNY-uDEeUsTaPcSjbR6pK1RyYswk,447
|
| 34 |
+
datasets/features/audio.py,sha256=ISK9LOnkpggoLulSdsUrNl992zEszf4cL2MbG1Nuu0c,12319
|
| 35 |
+
datasets/features/features.py,sha256=I3lbZRkebE3haHj-m5DNtW2cYysMi_HqXWVrA_ffjrA,86614
|
| 36 |
+
datasets/features/image.py,sha256=p94zl-2pLmnF3ZKoH6yEpfIFAaFZgjkYaSB_jNH4InU,15135
|
| 37 |
+
datasets/features/translation.py,sha256=9tPj27uvGSXBFrCL0JIkm3Do6GcSw_0NngCVbHXVBK4,4373
|
| 38 |
+
datasets/filesystems/__init__.py,sha256=NfkLTMjbvhS2kAbJcclymeafIzsXucBu85zb8N8bxXA,2642
|
| 39 |
+
datasets/filesystems/compression.py,sha256=Rl_E9w_OsERYgIGOVePLHI5k1mcU1cIR8wFkjW8N5cM,6100
|
| 40 |
+
datasets/filesystems/s3filesystem.py,sha256=KowTCvTSsrdAU4syiaRffNw4g25-DTbjsoXBIMWz2tk,5725
|
| 41 |
+
datasets/fingerprint.py,sha256=2T5pFJ0aNOYWN_YqArtZT4FpYfpv-lNllqWRirCVLJE,23309
|
| 42 |
+
datasets/formatting/__init__.py,sha256=3oQaTX0DeV03KNYmzSuSUgxUfjDyrBsDt5e0iqJv4LU,5161
|
| 43 |
+
datasets/formatting/formatting.py,sha256=JwSmaFHJGf4Xa1EJtppL6y59K4_dDXfWoPOxSG50HmI,25742
|
| 44 |
+
datasets/formatting/jax_formatter.py,sha256=KoTbq0XSUQ1Rp3G5IzN3cU192JZ9t5HAZtHiVpHPbB4,6839
|
| 45 |
+
datasets/formatting/np_formatter.py,sha256=DJBnt3oF0fHWJCqe4j6o9BOupZ0uGrw_xxFfsGBVoyk,4525
|
| 46 |
+
datasets/formatting/tf_formatter.py,sha256=QRzeq8f1ALa6961PBNFRTH3RT4S-_8soqfUl9a7F89I,4657
|
| 47 |
+
datasets/formatting/torch_formatter.py,sha256=qbETKRaNFh5WNddjENvX6gEOYyf11ieC9sN9E75kMIQ,4252
|
| 48 |
+
datasets/info.py,sha256=vGglh2DX5D92f3dH6R7dv4qfHtpa6VNx8yrUnaumhnQ,26844
|
| 49 |
+
datasets/inspect.py,sha256=JTDJhmAHT8OXVpNEe5ztHZQTMjCT3zJRAbSNtcC7NcI,23495
|
| 50 |
+
datasets/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 51 |
+
datasets/io/abc.py,sha256=LwDMXYs6YkhZuz1JiMK4PDIqgNjv7I8xH3UMUELW2ys,1672
|
| 52 |
+
datasets/io/csv.py,sha256=MSCfytHfuPEEgsbfHjC21BbfdwakU7lDyO2pxGcI0_I,5294
|
| 53 |
+
datasets/io/generator.py,sha256=79KzzwIBNtofseIT4Ys3Oec0rB9MrDjALZVNM6RU12o,1839
|
| 54 |
+
datasets/io/json.py,sha256=i0kJEl2YJYjMX-yZ33e-uCgy4cTCU7cGMjedoNuOuBM,6320
|
| 55 |
+
datasets/io/parquet.py,sha256=JJ3rim_i3upHtBfjAie1dtkq5PIP-5daygTDzmtEYsw,5833
|
| 56 |
+
datasets/io/spark.py,sha256=VUIODLHgIbiK0CI0UvthQ_gUO0MQDtHUozvw7Dfs8FI,1797
|
| 57 |
+
datasets/io/sql.py,sha256=scwrohCDYyYwpvKZ83fhSxJZtutHHiEY0CDmYSaGR-w,4413
|
| 58 |
+
datasets/io/text.py,sha256=5XboSqdtjRNBfkgi8hFjKrp1pp6hwiaiQJqwVMvGvX0,2026
|
| 59 |
+
datasets/iterable_dataset.py,sha256=GHJGsGyGrT-vrIqMKSRKJtejXfcglHP_PURq1COQz0I,108763
|
| 60 |
+
datasets/keyhash.py,sha256=SvEYj4Z8jnJq_2_Iwe2CSwW_02mKFA3LgcbC42NUOhQ,3826
|
| 61 |
+
datasets/load.py,sha256=tDy-2YVvgKbbRdQx_7lyUgNFbhAsfMnv42Br3xAXEOs,103111
|
| 62 |
+
datasets/metric.py,sha256=CK4B7UtylfXmxisk_rRd8GfaGKaKI1SZwaKpQWq4EoM,27992
|
| 63 |
+
datasets/naming.py,sha256=QIjMryT3PM3A-IQq_7yVECGHSMSzWPN7wTA5g99M1Fc,3000
|
| 64 |
+
datasets/packaged_modules/__init__.py,sha256=WRXAm_CFLu5VSfqsQhg434zl8RffpWrDs6H4r2vRq28,2681
|
| 65 |
+
datasets/packaged_modules/arrow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 66 |
+
datasets/packaged_modules/arrow/arrow.py,sha256=MK8wKbYUErZqeCoq9ZDVMhN5DOX94k5wGMgsM-ONstY,3317
|
| 67 |
+
datasets/packaged_modules/audiofolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 68 |
+
datasets/packaged_modules/audiofolder/audiofolder.py,sha256=BXRlK57KvYdyEo-L-Qs6qtrG2tL0QUF0cmJvl6L1N-w,1633
|
| 69 |
+
datasets/packaged_modules/csv/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 70 |
+
datasets/packaged_modules/csv/csv.py,sha256=0fF5XJqcRWwZ8FqNS16H8bOkEth6FZcsIBZokhEPMRc,8388
|
| 71 |
+
datasets/packaged_modules/folder_based_builder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 72 |
+
datasets/packaged_modules/folder_based_builder/folder_based_builder.py,sha256=a6uVsxHmitdhicNyX2ZZOxJlGNtJIHHrC43poFs3tzo,22126
|
| 73 |
+
datasets/packaged_modules/generator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 74 |
+
datasets/packaged_modules/generator/generator.py,sha256=QZKrNB3ztWPXT_H5OFOl1CBlAlAeckW48kdyySyVVKw,928
|
| 75 |
+
datasets/packaged_modules/imagefolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 76 |
+
datasets/packaged_modules/imagefolder/imagefolder.py,sha256=SYu6yxe4iBZzclT7u3m0gaACa6udSi1YOfFSy7dzdwk,1975
|
| 77 |
+
datasets/packaged_modules/json/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 78 |
+
datasets/packaged_modules/json/json.py,sha256=Jei-FP1xNb3ysUgZQ6Mf_426sXrgZrUjP8q4xuzzWWo,9331
|
| 79 |
+
datasets/packaged_modules/pandas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 80 |
+
datasets/packaged_modules/pandas/pandas.py,sha256=TRgC7LkwmUq0dThVuMVblX8wlyY4lvTExSIvzyUyV1w,2302
|
| 81 |
+
datasets/packaged_modules/parquet/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 82 |
+
datasets/packaged_modules/parquet/parquet.py,sha256=sp1vOPwt64pBWEUIpim-BwbkuDCvnlLFIAxvnC64Il0,4272
|
| 83 |
+
datasets/packaged_modules/spark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 84 |
+
datasets/packaged_modules/spark/spark.py,sha256=7z8KuKSRVxvmdNekgAVWC5ULP3OFR-iUdXhhkLOF-kU,13916
|
| 85 |
+
datasets/packaged_modules/sql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 86 |
+
datasets/packaged_modules/sql/sql.py,sha256=Fcnok2-1uX2XnQah4BrtE5SPli6O3JKb9tzMy1lachk,4482
|
| 87 |
+
datasets/packaged_modules/text/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 88 |
+
datasets/packaged_modules/text/text.py,sha256=RXoZlE1Go08KXgo4RPX1GW0ads1a-6iz1QRi1c66OZg,6260
|
| 89 |
+
datasets/parallel/__init__.py,sha256=dEhpBOLbCcsKClTXYJnJRp-ZtrfUV6jsH-CYqviXl-E,89
|
| 90 |
+
datasets/parallel/parallel.py,sha256=POcwLCtYmusW6vpop_UrQYP7OInhOSY978PP0ZOVimM,4553
|
| 91 |
+
datasets/search.py,sha256=1yZmjb0t6HzKJBvjxc3s7aCZPvKB9hpc0xEhYL43riY,35414
|
| 92 |
+
datasets/splits.py,sha256=DjABqLPeUGm2WTFQgEChL1b8Kc0IYC60d59oT9LDdOI,23340
|
| 93 |
+
datasets/streaming.py,sha256=NBgbP5Cycgut_p2uGn3dloyV3n1AXGzUkG-7l2NGskg,6145
|
| 94 |
+
datasets/table.py,sha256=gf5M16_9gzcP4GNiFeSvO2YLw7EcIuofs2VsPHQ_7g8,97004
|
| 95 |
+
datasets/tasks/__init__.py,sha256=Rz2GmKCOHt29M4DDks4mmz-PrVokZ4MGvMhXD29QZFU,1615
|
| 96 |
+
datasets/tasks/audio_classificiation.py,sha256=fkR37qfJfJRPgCizf9iDV-dBnsGmLo2V0w8JpMwyX0M,1297
|
| 97 |
+
datasets/tasks/automatic_speech_recognition.py,sha256=zbTTsLX5N-_Da5oucuk6zBZhDdhD4N5_rzsni9lT_vo,1309
|
| 98 |
+
datasets/tasks/base.py,sha256=SlYEeDS87jruZNNkDRgz-U4q7EUijePL-RTN14ngwsk,1095
|
| 99 |
+
datasets/tasks/image_classification.py,sha256=llF5_koN5APq7cF_WlGy5c9hAVspRlYCprXgwAa7kCc,1297
|
| 100 |
+
datasets/tasks/language_modeling.py,sha256=Vdor-TdCGdiMpaIPZr0fRvgNrt5_D-1JElXKGbfQhvI,581
|
| 101 |
+
datasets/tasks/question_answering.py,sha256=z8a80QRTsouUuIYVKQRDMTxOGeSK1QMycyDHxUW42zg,1105
|
| 102 |
+
datasets/tasks/summarization.py,sha256=adrpmvgfAjXCyDRdZnZ52h0FKql5-EWU61Z2-v6rN-w,772
|
| 103 |
+
datasets/tasks/text_classification.py,sha256=KvlddXxnnzzjCjJmyY3Z-e1G4dpTN0UXqlmZ1L0LrjU,1403
|
| 104 |
+
datasets/utils/__init__.py,sha256=1nEhDcerD-WSbA_W_6fR_CIoLTCO0M0rpgkSQJr04h0,1045
|
| 105 |
+
datasets/utils/beam_utils.py,sha256=DvA0ZVrx4-T9iHpB9VpduKn435p4rFaJw0Ua5cKmpeI,2029
|
| 106 |
+
datasets/utils/cache.py,sha256=ouFjySURlby2H9KqJLfpRBM8H1Fwiuo3LBlfZAB-OPo,10557
|
| 107 |
+
datasets/utils/deprecation_utils.py,sha256=hTHwlzRs92NfNVudH71LMpW70sjbsP5amebrIgi3A-U,3452
|
| 108 |
+
datasets/utils/doc_utils.py,sha256=HoSm0TFaQaCYGfDgNhpBJ4Xc2WQZuOD6dTxLd9D87fs,407
|
| 109 |
+
datasets/utils/download_manager.py,sha256=reLDKIa72LGFuw1Xj6uFcxeIa1yFlO3-MpGXTUPp0to,60
|
| 110 |
+
datasets/utils/experimental.py,sha256=hsTzoXR2lnVpOlRIsgrSTS0iiUhAJAwl7d2xN04N3hc,1096
|
| 111 |
+
datasets/utils/extract.py,sha256=hPoC4kMD8iaAaCDr61ySlYeB7VMGZXZh2ka4zV0EPYM,14194
|
| 112 |
+
datasets/utils/file_utils.py,sha256=JwwPIYCOU9C9ZDtLOeSqVyKwlcmBTbmubFdKeAnPp6k,26095
|
| 113 |
+
datasets/utils/filelock.py,sha256=JDUXzLFPepiHOGUeVw2D6qdUC3ruUT7dijTSTtsievI,13813
|
| 114 |
+
datasets/utils/hub.py,sha256=q2hpEIjPPgzRiTk1m-hTQdHITDq5PiHWjOM1cIN5wDw,452
|
| 115 |
+
datasets/utils/info_utils.py,sha256=ufjCalrf3xyEGEtcTmA53dZjGfoBIRMBRRE3FsZkUBA,5008
|
| 116 |
+
datasets/utils/logging.py,sha256=sBv8RTJP6LAac7R8gOQKblEgTe9x40YmMNOho3ueW0o,7004
|
| 117 |
+
datasets/utils/metadata.py,sha256=L_T6ZAHE_XjHlAFuxfGYsMz1gXYgXPlWOMuL6TBHy2w,10432
|
| 118 |
+
datasets/utils/patching.py,sha256=WMNz-rvovRBkPPJZNyHXRsuH-WA5H4Gg29ocMnBmqSA,4957
|
| 119 |
+
datasets/utils/py_utils.py,sha256=nsWyfkF9tKND3YaKudHytxSq1SBUIu_qGppSLmZ_Ze0,56737
|
| 120 |
+
datasets/utils/readme.py,sha256=JFlaLMCGrIz0nQCdnYKUZk5d9D9DErEYfjtRrX9VzIw,12627
|
| 121 |
+
datasets/utils/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 122 |
+
datasets/utils/resources/creators.json,sha256=XtIpMZefgBOdTevRrQTkFiufbgCbp_iyxseyphYQkn0,257
|
| 123 |
+
datasets/utils/resources/languages.json,sha256=Z0rQNPsfje8zMi8KdvvwxF4APwwqcskJFUvhNiLAgPM,199138
|
| 124 |
+
datasets/utils/resources/multilingualities.json,sha256=02Uc8RtRzfl13l98Y_alZm5HuMYwPzL78B0S5a1X-8c,205
|
| 125 |
+
datasets/utils/resources/readme_structure.yaml,sha256=hNf9msoBZw5jfakQrDb0Af8T325TXdcaHsAO2MUcZvY,3877
|
| 126 |
+
datasets/utils/resources/size_categories.json,sha256=_5nAP7z8R6t7_GfER81QudFO6Y1tqYu4AWrr4Aot8S8,171
|
| 127 |
+
datasets/utils/sharding.py,sha256=FDi895opKH7XkpfIu-ag9PqBQo2PGx0tSO3Dg-gDAAs,4288
|
| 128 |
+
datasets/utils/stratify.py,sha256=uMwuCDRbW342vy-lXDHs6IQusOr7c9nOG3PpnWyzJO4,4091
|
| 129 |
+
datasets/utils/tf_utils.py,sha256=YWmXP525b-kp1A-pnF-rGEOKqmg0Dv5j8RXJieSbkZc,25044
|
| 130 |
+
datasets/utils/typing.py,sha256=LznosIqUzjXgwbRLAGCv4_7-yZo7muYY42Y3495oz5I,224
|
| 131 |
+
datasets/utils/version.py,sha256=Z82cHpjTbQVJyWgnwSU8DsW2G0y-sSbSoOVeQrAds9k,3281
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/REQUESTED
ADDED
|
File without changes
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.36.2)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
datasets-cli = datasets.commands.datasets_cli:main
|
| 3 |
+
|
lib/python3.10/site-packages/datasets-2.14.4.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
datasets
|
lib/python3.10/site-packages/datasets/formatting/__init__.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Datasets Authors and the TensorFlow Datasets Authors.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# flake8: noqa
|
| 16 |
+
# Lint as: python3
|
| 17 |
+
|
| 18 |
+
from typing import Dict, List, Optional, Type
|
| 19 |
+
|
| 20 |
+
from .. import config
|
| 21 |
+
from ..utils import logging
|
| 22 |
+
from .formatting import (
|
| 23 |
+
ArrowFormatter,
|
| 24 |
+
CustomFormatter,
|
| 25 |
+
Formatter,
|
| 26 |
+
PandasFormatter,
|
| 27 |
+
PythonFormatter,
|
| 28 |
+
TensorFormatter,
|
| 29 |
+
format_table,
|
| 30 |
+
query_table,
|
| 31 |
+
)
|
| 32 |
+
from .np_formatter import NumpyFormatter
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
logger = logging.get_logger(__name__)
|
| 36 |
+
|
| 37 |
+
_FORMAT_TYPES: Dict[Optional[str], Type[Formatter]] = {}
|
| 38 |
+
_FORMAT_TYPES_ALIASES: Dict[Optional[str], str] = {}
|
| 39 |
+
_FORMAT_TYPES_ALIASES_UNAVAILABLE: Dict[Optional[str], Exception] = {}
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _register_formatter(
|
| 43 |
+
formatter_cls: type,
|
| 44 |
+
format_type: Optional[str],
|
| 45 |
+
aliases: Optional[List[str]] = None,
|
| 46 |
+
):
|
| 47 |
+
"""
|
| 48 |
+
Register a Formatter object using a name and optional aliases.
|
| 49 |
+
This function must be used on a Formatter class.
|
| 50 |
+
"""
|
| 51 |
+
aliases = aliases if aliases is not None else []
|
| 52 |
+
if format_type in _FORMAT_TYPES:
|
| 53 |
+
logger.warning(
|
| 54 |
+
f"Overwriting format type '{format_type}' ({_FORMAT_TYPES[format_type].__name__} -> {formatter_cls.__name__})"
|
| 55 |
+
)
|
| 56 |
+
_FORMAT_TYPES[format_type] = formatter_cls
|
| 57 |
+
for alias in set(aliases + [format_type]):
|
| 58 |
+
if alias in _FORMAT_TYPES_ALIASES:
|
| 59 |
+
logger.warning(
|
| 60 |
+
f"Overwriting format type alias '{alias}' ({_FORMAT_TYPES_ALIASES[alias]} -> {format_type})"
|
| 61 |
+
)
|
| 62 |
+
_FORMAT_TYPES_ALIASES[alias] = format_type
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _register_unavailable_formatter(
|
| 66 |
+
unavailable_error: Exception, format_type: Optional[str], aliases: Optional[List[str]] = None
|
| 67 |
+
):
|
| 68 |
+
"""
|
| 69 |
+
Register an unavailable Formatter object using a name and optional aliases.
|
| 70 |
+
This function must be used on an Exception object that is raised when trying to get the unavailable formatter.
|
| 71 |
+
"""
|
| 72 |
+
aliases = aliases if aliases is not None else []
|
| 73 |
+
for alias in set(aliases + [format_type]):
|
| 74 |
+
_FORMAT_TYPES_ALIASES_UNAVAILABLE[alias] = unavailable_error
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
# Here we define all the available formatting functions that can be used by `Dataset.set_format`
|
| 78 |
+
_register_formatter(PythonFormatter, None, aliases=["python"])
|
| 79 |
+
_register_formatter(ArrowFormatter, "arrow", aliases=["pa", "pyarrow"])
|
| 80 |
+
_register_formatter(NumpyFormatter, "numpy", aliases=["np"])
|
| 81 |
+
_register_formatter(PandasFormatter, "pandas", aliases=["pd"])
|
| 82 |
+
_register_formatter(CustomFormatter, "custom")
|
| 83 |
+
|
| 84 |
+
if config.TORCH_AVAILABLE:
|
| 85 |
+
from .torch_formatter import TorchFormatter
|
| 86 |
+
|
| 87 |
+
_register_formatter(TorchFormatter, "torch", aliases=["pt", "pytorch"])
|
| 88 |
+
else:
|
| 89 |
+
_torch_error = ValueError("PyTorch needs to be installed to be able to return PyTorch tensors.")
|
| 90 |
+
_register_unavailable_formatter(_torch_error, "torch", aliases=["pt", "pytorch"])
|
| 91 |
+
|
| 92 |
+
if config.TF_AVAILABLE:
|
| 93 |
+
from .tf_formatter import TFFormatter
|
| 94 |
+
|
| 95 |
+
_register_formatter(TFFormatter, "tensorflow", aliases=["tf"])
|
| 96 |
+
else:
|
| 97 |
+
_tf_error = ValueError("Tensorflow needs to be installed to be able to return Tensorflow tensors.")
|
| 98 |
+
_register_unavailable_formatter(_tf_error, "tensorflow", aliases=["tf"])
|
| 99 |
+
|
| 100 |
+
if config.JAX_AVAILABLE:
|
| 101 |
+
from .jax_formatter import JaxFormatter
|
| 102 |
+
|
| 103 |
+
_register_formatter(JaxFormatter, "jax", aliases=[])
|
| 104 |
+
else:
|
| 105 |
+
_jax_error = ValueError("JAX needs to be installed to be able to return JAX arrays.")
|
| 106 |
+
_register_unavailable_formatter(_jax_error, "jax", aliases=[])
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def get_format_type_from_alias(format_type: Optional[str]) -> Optional[str]:
|
| 110 |
+
"""If the given format type is a known alias, then return its main type name. Otherwise return the type with no change."""
|
| 111 |
+
if format_type in _FORMAT_TYPES_ALIASES:
|
| 112 |
+
return _FORMAT_TYPES_ALIASES[format_type]
|
| 113 |
+
else:
|
| 114 |
+
return format_type
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def get_formatter(format_type: Optional[str], **format_kwargs) -> Formatter:
|
| 118 |
+
"""
|
| 119 |
+
Factory function to get a Formatter given its type name and keyword arguments.
|
| 120 |
+
A formatter is an object that extracts and formats data from pyarrow table.
|
| 121 |
+
It defines the formatting for rows, colums and batches.
|
| 122 |
+
If the formatter for a given type name doesn't exist or is not available, an error is raised.
|
| 123 |
+
"""
|
| 124 |
+
format_type = get_format_type_from_alias(format_type)
|
| 125 |
+
if format_type in _FORMAT_TYPES:
|
| 126 |
+
return _FORMAT_TYPES[format_type](**format_kwargs)
|
| 127 |
+
if format_type in _FORMAT_TYPES_ALIASES_UNAVAILABLE:
|
| 128 |
+
raise _FORMAT_TYPES_ALIASES_UNAVAILABLE[format_type]
|
| 129 |
+
else:
|
| 130 |
+
raise ValueError(
|
| 131 |
+
f"Return type should be None or selected in {list(type for type in _FORMAT_TYPES.keys() if type != None)}, but got '{format_type}'"
|
| 132 |
+
)
|
lib/python3.10/site-packages/datasets/formatting/formatting.py
ADDED
|
@@ -0,0 +1,649 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Authors.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
from collections.abc import Mapping, MutableMapping
|
| 16 |
+
from functools import partial
|
| 17 |
+
|
| 18 |
+
# Lint as: python3
|
| 19 |
+
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
|
| 20 |
+
|
| 21 |
+
import numpy as np
|
| 22 |
+
import pandas as pd
|
| 23 |
+
import pyarrow as pa
|
| 24 |
+
from packaging import version
|
| 25 |
+
|
| 26 |
+
from .. import config
|
| 27 |
+
from ..features import Features
|
| 28 |
+
from ..features.features import _ArrayXDExtensionType, _is_zero_copy_only, decode_nested_example, pandas_types_mapper
|
| 29 |
+
from ..table import Table
|
| 30 |
+
from ..utils.py_utils import no_op_if_value_is_null
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
T = TypeVar("T")
|
| 34 |
+
|
| 35 |
+
RowFormat = TypeVar("RowFormat")
|
| 36 |
+
ColumnFormat = TypeVar("ColumnFormat")
|
| 37 |
+
BatchFormat = TypeVar("BatchFormat")
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _is_range_contiguous(key: range) -> bool:
|
| 41 |
+
return key.step == 1 and key.stop >= key.start
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def _raise_bad_key_type(key: Any):
|
| 45 |
+
raise TypeError(
|
| 46 |
+
f"Wrong key type: '{key}' of type '{type(key)}'. Expected one of int, slice, range, str or Iterable."
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _query_table_with_indices_mapping(
|
| 51 |
+
table: Table, key: Union[int, slice, range, str, Iterable], indices: Table
|
| 52 |
+
) -> pa.Table:
|
| 53 |
+
"""
|
| 54 |
+
Query a pyarrow Table to extract the subtable that correspond to the given key.
|
| 55 |
+
The :obj:`indices` parameter corresponds to the indices mapping in case we cant to take into
|
| 56 |
+
account a shuffling or an indices selection for example.
|
| 57 |
+
The indices table must contain one column named "indices" of type uint64.
|
| 58 |
+
"""
|
| 59 |
+
if isinstance(key, int):
|
| 60 |
+
key = indices.fast_slice(key % indices.num_rows, 1).column(0)[0].as_py()
|
| 61 |
+
return _query_table(table, key)
|
| 62 |
+
if isinstance(key, slice):
|
| 63 |
+
key = range(*key.indices(indices.num_rows))
|
| 64 |
+
if isinstance(key, range):
|
| 65 |
+
if _is_range_contiguous(key) and key.start >= 0:
|
| 66 |
+
return _query_table(
|
| 67 |
+
table, [i.as_py() for i in indices.fast_slice(key.start, key.stop - key.start).column(0)]
|
| 68 |
+
)
|
| 69 |
+
else:
|
| 70 |
+
pass # treat as an iterable
|
| 71 |
+
if isinstance(key, str):
|
| 72 |
+
table = table.select([key])
|
| 73 |
+
return _query_table(table, indices.column(0).to_pylist())
|
| 74 |
+
if isinstance(key, Iterable):
|
| 75 |
+
return _query_table(table, [indices.fast_slice(i, 1).column(0)[0].as_py() for i in key])
|
| 76 |
+
|
| 77 |
+
_raise_bad_key_type(key)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def _query_table(table: Table, key: Union[int, slice, range, str, Iterable]) -> pa.Table:
|
| 81 |
+
"""
|
| 82 |
+
Query a pyarrow Table to extract the subtable that correspond to the given key.
|
| 83 |
+
"""
|
| 84 |
+
if isinstance(key, int):
|
| 85 |
+
return table.fast_slice(key % table.num_rows, 1)
|
| 86 |
+
if isinstance(key, slice):
|
| 87 |
+
key = range(*key.indices(table.num_rows))
|
| 88 |
+
if isinstance(key, range):
|
| 89 |
+
if _is_range_contiguous(key) and key.start >= 0:
|
| 90 |
+
return table.fast_slice(key.start, key.stop - key.start)
|
| 91 |
+
else:
|
| 92 |
+
pass # treat as an iterable
|
| 93 |
+
if isinstance(key, str):
|
| 94 |
+
return table.table.drop([column for column in table.column_names if column != key])
|
| 95 |
+
if isinstance(key, Iterable):
|
| 96 |
+
key = np.fromiter(key, np.int64)
|
| 97 |
+
if len(key) == 0:
|
| 98 |
+
return table.table.slice(0, 0)
|
| 99 |
+
# don't use pyarrow.Table.take even for pyarrow >=1.0 (see https://issues.apache.org/jira/browse/ARROW-9773)
|
| 100 |
+
return table.fast_gather(key % table.num_rows)
|
| 101 |
+
|
| 102 |
+
_raise_bad_key_type(key)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def _is_array_with_nulls(pa_array: pa.Array) -> bool:
|
| 106 |
+
return pa_array.null_count > 0
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class BaseArrowExtractor(Generic[RowFormat, ColumnFormat, BatchFormat]):
|
| 110 |
+
"""
|
| 111 |
+
Arrow extractor are used to extract data from pyarrow tables.
|
| 112 |
+
It makes it possible to extract rows, columns and batches.
|
| 113 |
+
These three extractions types have to be implemented.
|
| 114 |
+
"""
|
| 115 |
+
|
| 116 |
+
def extract_row(self, pa_table: pa.Table) -> RowFormat:
|
| 117 |
+
raise NotImplementedError
|
| 118 |
+
|
| 119 |
+
def extract_column(self, pa_table: pa.Table) -> ColumnFormat:
|
| 120 |
+
raise NotImplementedError
|
| 121 |
+
|
| 122 |
+
def extract_batch(self, pa_table: pa.Table) -> BatchFormat:
|
| 123 |
+
raise NotImplementedError
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _unnest(py_dict: Dict[str, List[T]]) -> Dict[str, T]:
|
| 127 |
+
"""Return the first element of a batch (dict) as a row (dict)"""
|
| 128 |
+
return {key: array[0] for key, array in py_dict.items()}
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class SimpleArrowExtractor(BaseArrowExtractor[pa.Table, pa.Array, pa.Table]):
|
| 132 |
+
def extract_row(self, pa_table: pa.Table) -> pa.Table:
|
| 133 |
+
return pa_table
|
| 134 |
+
|
| 135 |
+
def extract_column(self, pa_table: pa.Table) -> pa.Array:
|
| 136 |
+
return pa_table.column(0)
|
| 137 |
+
|
| 138 |
+
def extract_batch(self, pa_table: pa.Table) -> pa.Table:
|
| 139 |
+
return pa_table
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class PythonArrowExtractor(BaseArrowExtractor[dict, list, dict]):
|
| 143 |
+
def extract_row(self, pa_table: pa.Table) -> dict:
|
| 144 |
+
return _unnest(pa_table.to_pydict())
|
| 145 |
+
|
| 146 |
+
def extract_column(self, pa_table: pa.Table) -> list:
|
| 147 |
+
return pa_table.column(0).to_pylist()
|
| 148 |
+
|
| 149 |
+
def extract_batch(self, pa_table: pa.Table) -> dict:
|
| 150 |
+
return pa_table.to_pydict()
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
class NumpyArrowExtractor(BaseArrowExtractor[dict, np.ndarray, dict]):
|
| 154 |
+
def __init__(self, **np_array_kwargs):
|
| 155 |
+
self.np_array_kwargs = np_array_kwargs
|
| 156 |
+
|
| 157 |
+
def extract_row(self, pa_table: pa.Table) -> dict:
|
| 158 |
+
return _unnest(self.extract_batch(pa_table))
|
| 159 |
+
|
| 160 |
+
def extract_column(self, pa_table: pa.Table) -> np.ndarray:
|
| 161 |
+
return self._arrow_array_to_numpy(pa_table[pa_table.column_names[0]])
|
| 162 |
+
|
| 163 |
+
def extract_batch(self, pa_table: pa.Table) -> dict:
|
| 164 |
+
return {col: self._arrow_array_to_numpy(pa_table[col]) for col in pa_table.column_names}
|
| 165 |
+
|
| 166 |
+
def _arrow_array_to_numpy(self, pa_array: pa.Array) -> np.ndarray:
|
| 167 |
+
if isinstance(pa_array, pa.ChunkedArray):
|
| 168 |
+
if isinstance(pa_array.type, _ArrayXDExtensionType):
|
| 169 |
+
# don't call to_pylist() to preserve dtype of the fixed-size array
|
| 170 |
+
zero_copy_only = _is_zero_copy_only(pa_array.type.storage_dtype, unnest=True)
|
| 171 |
+
array: List = [
|
| 172 |
+
row for chunk in pa_array.chunks for row in chunk.to_numpy(zero_copy_only=zero_copy_only)
|
| 173 |
+
]
|
| 174 |
+
else:
|
| 175 |
+
zero_copy_only = _is_zero_copy_only(pa_array.type) and all(
|
| 176 |
+
not _is_array_with_nulls(chunk) for chunk in pa_array.chunks
|
| 177 |
+
)
|
| 178 |
+
array: List = [
|
| 179 |
+
row for chunk in pa_array.chunks for row in chunk.to_numpy(zero_copy_only=zero_copy_only)
|
| 180 |
+
]
|
| 181 |
+
else:
|
| 182 |
+
if isinstance(pa_array.type, _ArrayXDExtensionType):
|
| 183 |
+
# don't call to_pylist() to preserve dtype of the fixed-size array
|
| 184 |
+
zero_copy_only = _is_zero_copy_only(pa_array.type.storage_dtype, unnest=True)
|
| 185 |
+
array: List = pa_array.to_numpy(zero_copy_only=zero_copy_only)
|
| 186 |
+
else:
|
| 187 |
+
zero_copy_only = _is_zero_copy_only(pa_array.type) and not _is_array_with_nulls(pa_array)
|
| 188 |
+
array: List = pa_array.to_numpy(zero_copy_only=zero_copy_only).tolist()
|
| 189 |
+
if len(array) > 0:
|
| 190 |
+
if any(
|
| 191 |
+
(isinstance(x, np.ndarray) and (x.dtype == object or x.shape != array[0].shape))
|
| 192 |
+
or (isinstance(x, float) and np.isnan(x))
|
| 193 |
+
for x in array
|
| 194 |
+
):
|
| 195 |
+
return np.array(array, copy=False, dtype=object)
|
| 196 |
+
return np.array(array, copy=False)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
class PandasArrowExtractor(BaseArrowExtractor[pd.DataFrame, pd.Series, pd.DataFrame]):
|
| 200 |
+
def extract_row(self, pa_table: pa.Table) -> pd.DataFrame:
|
| 201 |
+
return pa_table.slice(length=1).to_pandas(types_mapper=pandas_types_mapper)
|
| 202 |
+
|
| 203 |
+
def extract_column(self, pa_table: pa.Table) -> pd.Series:
|
| 204 |
+
return pa_table.select([0]).to_pandas(types_mapper=pandas_types_mapper)[pa_table.column_names[0]]
|
| 205 |
+
|
| 206 |
+
def extract_batch(self, pa_table: pa.Table) -> pd.DataFrame:
|
| 207 |
+
return pa_table.to_pandas(types_mapper=pandas_types_mapper)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
class PythonFeaturesDecoder:
|
| 211 |
+
def __init__(self, features: Optional[Features]):
|
| 212 |
+
self.features = features
|
| 213 |
+
|
| 214 |
+
def decode_row(self, row: dict) -> dict:
|
| 215 |
+
return self.features.decode_example(row) if self.features else row
|
| 216 |
+
|
| 217 |
+
def decode_column(self, column: list, column_name: str) -> list:
|
| 218 |
+
return self.features.decode_column(column, column_name) if self.features else column
|
| 219 |
+
|
| 220 |
+
def decode_batch(self, batch: dict) -> dict:
|
| 221 |
+
return self.features.decode_batch(batch) if self.features else batch
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
class PandasFeaturesDecoder:
|
| 225 |
+
def __init__(self, features: Optional[Features]):
|
| 226 |
+
self.features = features
|
| 227 |
+
|
| 228 |
+
def decode_row(self, row: pd.DataFrame) -> pd.DataFrame:
|
| 229 |
+
decode = (
|
| 230 |
+
{
|
| 231 |
+
column_name: no_op_if_value_is_null(partial(decode_nested_example, feature))
|
| 232 |
+
for column_name, feature in self.features.items()
|
| 233 |
+
if self.features._column_requires_decoding[column_name]
|
| 234 |
+
}
|
| 235 |
+
if self.features
|
| 236 |
+
else {}
|
| 237 |
+
)
|
| 238 |
+
if decode:
|
| 239 |
+
row[list(decode.keys())] = row.transform(decode)
|
| 240 |
+
return row
|
| 241 |
+
|
| 242 |
+
def decode_column(self, column: pd.Series, column_name: str) -> pd.Series:
|
| 243 |
+
decode = (
|
| 244 |
+
no_op_if_value_is_null(partial(decode_nested_example, self.features[column_name]))
|
| 245 |
+
if self.features and column_name in self.features and self.features._column_requires_decoding[column_name]
|
| 246 |
+
else None
|
| 247 |
+
)
|
| 248 |
+
if decode:
|
| 249 |
+
column = column.transform(decode)
|
| 250 |
+
return column
|
| 251 |
+
|
| 252 |
+
def decode_batch(self, batch: pd.DataFrame) -> pd.DataFrame:
|
| 253 |
+
return self.decode_row(batch)
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
class LazyDict(MutableMapping):
|
| 257 |
+
"""A dictionary backed by Arrow data. The values are formatted on-the-fly when accessing the dictionary."""
|
| 258 |
+
|
| 259 |
+
def __init__(self, pa_table: pa.Table, formatter: "Formatter"):
|
| 260 |
+
self.pa_table = pa_table
|
| 261 |
+
self.formatter = formatter
|
| 262 |
+
|
| 263 |
+
self.data = {key: None for key in pa_table.column_names}
|
| 264 |
+
self.keys_to_format = set(self.data.keys())
|
| 265 |
+
|
| 266 |
+
def __len__(self):
|
| 267 |
+
return len(self.data)
|
| 268 |
+
|
| 269 |
+
def __getitem__(self, key):
|
| 270 |
+
value = self.data[key]
|
| 271 |
+
if key in self.keys_to_format:
|
| 272 |
+
value = self.format(key)
|
| 273 |
+
self.data[key] = value
|
| 274 |
+
self.keys_to_format.remove(key)
|
| 275 |
+
return value
|
| 276 |
+
|
| 277 |
+
def __setitem__(self, key, value):
|
| 278 |
+
if key in self.keys_to_format:
|
| 279 |
+
self.keys_to_format.remove(key)
|
| 280 |
+
self.data[key] = value
|
| 281 |
+
|
| 282 |
+
def __delitem__(self, key) -> None:
|
| 283 |
+
if key in self.keys_to_format:
|
| 284 |
+
self.keys_to_format.remove(key)
|
| 285 |
+
del self.data[key]
|
| 286 |
+
|
| 287 |
+
def __iter__(self):
|
| 288 |
+
return iter(self.data)
|
| 289 |
+
|
| 290 |
+
def __contains__(self, key):
|
| 291 |
+
return key in self.data
|
| 292 |
+
|
| 293 |
+
def __repr__(self):
|
| 294 |
+
self._format_all()
|
| 295 |
+
return repr(self.data)
|
| 296 |
+
|
| 297 |
+
if config.PY_VERSION >= version.parse("3.9"):
|
| 298 |
+
# merging with the union ("|") operator is supported in Python 3.9+
|
| 299 |
+
|
| 300 |
+
def __or__(self, other):
|
| 301 |
+
if isinstance(other, LazyDict):
|
| 302 |
+
inst = self.copy()
|
| 303 |
+
other = other.copy()
|
| 304 |
+
other._format_all()
|
| 305 |
+
inst.keys_to_format -= other.data.keys()
|
| 306 |
+
inst.data = inst.data | other.data
|
| 307 |
+
return inst
|
| 308 |
+
if isinstance(other, dict):
|
| 309 |
+
inst = self.copy()
|
| 310 |
+
inst.keys_to_format -= other.keys()
|
| 311 |
+
inst.data = inst.data | other
|
| 312 |
+
return inst
|
| 313 |
+
return NotImplemented
|
| 314 |
+
|
| 315 |
+
def __ror__(self, other):
|
| 316 |
+
if isinstance(other, LazyDict):
|
| 317 |
+
inst = self.copy()
|
| 318 |
+
other = other.copy()
|
| 319 |
+
other._format_all()
|
| 320 |
+
inst.keys_to_format -= other.data.keys()
|
| 321 |
+
inst.data = other.data | inst.data
|
| 322 |
+
return inst
|
| 323 |
+
if isinstance(other, dict):
|
| 324 |
+
inst = self.copy()
|
| 325 |
+
inst.keys_to_format -= other.keys()
|
| 326 |
+
inst.data = other | inst.data
|
| 327 |
+
return inst
|
| 328 |
+
return NotImplemented
|
| 329 |
+
|
| 330 |
+
def __ior__(self, other):
|
| 331 |
+
if isinstance(other, LazyDict):
|
| 332 |
+
other = other.copy()
|
| 333 |
+
other._format_all()
|
| 334 |
+
self.keys_to_format -= other.data.keys()
|
| 335 |
+
self.data |= other.data
|
| 336 |
+
else:
|
| 337 |
+
self.keys_to_format -= other.keys()
|
| 338 |
+
self.data |= other
|
| 339 |
+
return self
|
| 340 |
+
|
| 341 |
+
def __copy__(self):
|
| 342 |
+
# Identical to `UserDict.__copy__`
|
| 343 |
+
inst = self.__class__.__new__(self.__class__)
|
| 344 |
+
inst.__dict__.update(self.__dict__)
|
| 345 |
+
# Create a copy and avoid triggering descriptors
|
| 346 |
+
inst.__dict__["data"] = self.__dict__["data"].copy()
|
| 347 |
+
inst.__dict__["keys_to_format"] = self.__dict__["keys_to_format"].copy()
|
| 348 |
+
return inst
|
| 349 |
+
|
| 350 |
+
def copy(self):
|
| 351 |
+
import copy
|
| 352 |
+
|
| 353 |
+
return copy.copy(self)
|
| 354 |
+
|
| 355 |
+
@classmethod
|
| 356 |
+
def fromkeys(cls, iterable, value=None):
|
| 357 |
+
raise NotImplementedError
|
| 358 |
+
|
| 359 |
+
def format(self, key):
|
| 360 |
+
raise NotImplementedError
|
| 361 |
+
|
| 362 |
+
def _format_all(self):
|
| 363 |
+
for key in self.keys_to_format:
|
| 364 |
+
self.data[key] = self.format(key)
|
| 365 |
+
self.keys_to_format.clear()
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
class LazyRow(LazyDict):
|
| 369 |
+
def format(self, key):
|
| 370 |
+
return self.formatter.format_column(self.pa_table.select([key]))[0]
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
class LazyBatch(LazyDict):
|
| 374 |
+
def format(self, key):
|
| 375 |
+
return self.formatter.format_column(self.pa_table.select([key]))
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
class Formatter(Generic[RowFormat, ColumnFormat, BatchFormat]):
|
| 379 |
+
"""
|
| 380 |
+
A formatter is an object that extracts and formats data from pyarrow tables.
|
| 381 |
+
It defines the formatting for rows, columns and batches.
|
| 382 |
+
"""
|
| 383 |
+
|
| 384 |
+
simple_arrow_extractor = SimpleArrowExtractor
|
| 385 |
+
python_arrow_extractor = PythonArrowExtractor
|
| 386 |
+
numpy_arrow_extractor = NumpyArrowExtractor
|
| 387 |
+
pandas_arrow_extractor = PandasArrowExtractor
|
| 388 |
+
|
| 389 |
+
def __init__(self, features: Optional[Features] = None):
|
| 390 |
+
self.features = features
|
| 391 |
+
self.python_features_decoder = PythonFeaturesDecoder(self.features)
|
| 392 |
+
self.pandas_features_decoder = PandasFeaturesDecoder(self.features)
|
| 393 |
+
|
| 394 |
+
def __call__(self, pa_table: pa.Table, query_type: str) -> Union[RowFormat, ColumnFormat, BatchFormat]:
|
| 395 |
+
if query_type == "row":
|
| 396 |
+
return self.format_row(pa_table)
|
| 397 |
+
elif query_type == "column":
|
| 398 |
+
return self.format_column(pa_table)
|
| 399 |
+
elif query_type == "batch":
|
| 400 |
+
return self.format_batch(pa_table)
|
| 401 |
+
|
| 402 |
+
def format_row(self, pa_table: pa.Table) -> RowFormat:
|
| 403 |
+
raise NotImplementedError
|
| 404 |
+
|
| 405 |
+
def format_column(self, pa_table: pa.Table) -> ColumnFormat:
|
| 406 |
+
raise NotImplementedError
|
| 407 |
+
|
| 408 |
+
def format_batch(self, pa_table: pa.Table) -> BatchFormat:
|
| 409 |
+
raise NotImplementedError
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
class TensorFormatter(Formatter[RowFormat, ColumnFormat, BatchFormat]):
|
| 413 |
+
def recursive_tensorize(self, data_struct: dict):
|
| 414 |
+
raise NotImplementedError
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
class ArrowFormatter(Formatter[pa.Table, pa.Array, pa.Table]):
|
| 418 |
+
def format_row(self, pa_table: pa.Table) -> pa.Table:
|
| 419 |
+
return self.simple_arrow_extractor().extract_row(pa_table)
|
| 420 |
+
|
| 421 |
+
def format_column(self, pa_table: pa.Table) -> pa.Array:
|
| 422 |
+
return self.simple_arrow_extractor().extract_column(pa_table)
|
| 423 |
+
|
| 424 |
+
def format_batch(self, pa_table: pa.Table) -> pa.Table:
|
| 425 |
+
return self.simple_arrow_extractor().extract_batch(pa_table)
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
class PythonFormatter(Formatter[Mapping, list, Mapping]):
|
| 429 |
+
def __init__(self, features=None, lazy=False):
|
| 430 |
+
super().__init__(features)
|
| 431 |
+
self.lazy = lazy
|
| 432 |
+
|
| 433 |
+
def format_row(self, pa_table: pa.Table) -> Mapping:
|
| 434 |
+
if self.lazy:
|
| 435 |
+
return LazyRow(pa_table, self)
|
| 436 |
+
row = self.python_arrow_extractor().extract_row(pa_table)
|
| 437 |
+
row = self.python_features_decoder.decode_row(row)
|
| 438 |
+
return row
|
| 439 |
+
|
| 440 |
+
def format_column(self, pa_table: pa.Table) -> list:
|
| 441 |
+
column = self.python_arrow_extractor().extract_column(pa_table)
|
| 442 |
+
column = self.python_features_decoder.decode_column(column, pa_table.column_names[0])
|
| 443 |
+
return column
|
| 444 |
+
|
| 445 |
+
def format_batch(self, pa_table: pa.Table) -> Mapping:
|
| 446 |
+
if self.lazy:
|
| 447 |
+
return LazyBatch(pa_table, self)
|
| 448 |
+
batch = self.python_arrow_extractor().extract_batch(pa_table)
|
| 449 |
+
batch = self.python_features_decoder.decode_batch(batch)
|
| 450 |
+
return batch
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
class PandasFormatter(Formatter[pd.DataFrame, pd.Series, pd.DataFrame]):
|
| 454 |
+
def format_row(self, pa_table: pa.Table) -> pd.DataFrame:
|
| 455 |
+
row = self.pandas_arrow_extractor().extract_row(pa_table)
|
| 456 |
+
row = self.pandas_features_decoder.decode_row(row)
|
| 457 |
+
return row
|
| 458 |
+
|
| 459 |
+
def format_column(self, pa_table: pa.Table) -> pd.Series:
|
| 460 |
+
column = self.pandas_arrow_extractor().extract_column(pa_table)
|
| 461 |
+
column = self.pandas_features_decoder.decode_column(column, pa_table.column_names[0])
|
| 462 |
+
return column
|
| 463 |
+
|
| 464 |
+
def format_batch(self, pa_table: pa.Table) -> pd.DataFrame:
|
| 465 |
+
row = self.pandas_arrow_extractor().extract_batch(pa_table)
|
| 466 |
+
row = self.pandas_features_decoder.decode_batch(row)
|
| 467 |
+
return row
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
class CustomFormatter(Formatter[dict, ColumnFormat, dict]):
|
| 471 |
+
"""
|
| 472 |
+
A user-defined custom formatter function defined by a ``transform``.
|
| 473 |
+
The transform must take as input a batch of data extracted for an arrow table using the python extractor,
|
| 474 |
+
and return a batch.
|
| 475 |
+
If the output batch is not a dict, then output_all_columns won't work.
|
| 476 |
+
If the ouput batch has several fields, then querying a single column won't work since we don't know which field
|
| 477 |
+
to return.
|
| 478 |
+
"""
|
| 479 |
+
|
| 480 |
+
def __init__(self, transform: Callable[[dict], dict], features=None, **kwargs):
|
| 481 |
+
super().__init__(features=features)
|
| 482 |
+
self.transform = transform
|
| 483 |
+
|
| 484 |
+
def format_row(self, pa_table: pa.Table) -> dict:
|
| 485 |
+
formatted_batch = self.format_batch(pa_table)
|
| 486 |
+
try:
|
| 487 |
+
return _unnest(formatted_batch)
|
| 488 |
+
except Exception as exc:
|
| 489 |
+
raise TypeError(
|
| 490 |
+
f"Custom formatting function must return a dict of sequences to be able to pick a row, but got {formatted_batch}"
|
| 491 |
+
) from exc
|
| 492 |
+
|
| 493 |
+
def format_column(self, pa_table: pa.Table) -> ColumnFormat:
|
| 494 |
+
formatted_batch = self.format_batch(pa_table)
|
| 495 |
+
if hasattr(formatted_batch, "keys"):
|
| 496 |
+
if len(formatted_batch.keys()) > 1:
|
| 497 |
+
raise TypeError(
|
| 498 |
+
"Tried to query a column but the custom formatting function returns too many columns. "
|
| 499 |
+
f"Only one column was expected but got columns {list(formatted_batch.keys())}."
|
| 500 |
+
)
|
| 501 |
+
else:
|
| 502 |
+
raise TypeError(
|
| 503 |
+
f"Custom formatting function must return a dict to be able to pick a row, but got {formatted_batch}"
|
| 504 |
+
)
|
| 505 |
+
try:
|
| 506 |
+
return formatted_batch[pa_table.column_names[0]]
|
| 507 |
+
except Exception as exc:
|
| 508 |
+
raise TypeError(
|
| 509 |
+
f"Custom formatting function must return a dict to be able to pick a row, but got {formatted_batch}"
|
| 510 |
+
) from exc
|
| 511 |
+
|
| 512 |
+
def format_batch(self, pa_table: pa.Table) -> dict:
|
| 513 |
+
batch = self.python_arrow_extractor().extract_batch(pa_table)
|
| 514 |
+
batch = self.python_features_decoder.decode_batch(batch)
|
| 515 |
+
return self.transform(batch)
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
def _check_valid_column_key(key: str, columns: List[str]) -> None:
|
| 519 |
+
if key not in columns:
|
| 520 |
+
raise KeyError(f"Column {key} not in the dataset. Current columns in the dataset: {columns}")
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
def _check_valid_index_key(key: Union[int, slice, range, Iterable], size: int) -> None:
|
| 524 |
+
if isinstance(key, int):
|
| 525 |
+
if (key < 0 and key + size < 0) or (key >= size):
|
| 526 |
+
raise IndexError(f"Invalid key: {key} is out of bounds for size {size}")
|
| 527 |
+
return
|
| 528 |
+
elif isinstance(key, slice):
|
| 529 |
+
pass
|
| 530 |
+
elif isinstance(key, range):
|
| 531 |
+
if len(key) > 0:
|
| 532 |
+
_check_valid_index_key(max(key), size=size)
|
| 533 |
+
_check_valid_index_key(min(key), size=size)
|
| 534 |
+
elif isinstance(key, Iterable):
|
| 535 |
+
if len(key) > 0:
|
| 536 |
+
_check_valid_index_key(int(max(key)), size=size)
|
| 537 |
+
_check_valid_index_key(int(min(key)), size=size)
|
| 538 |
+
else:
|
| 539 |
+
_raise_bad_key_type(key)
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
def key_to_query_type(key: Union[int, slice, range, str, Iterable]) -> str:
|
| 543 |
+
if isinstance(key, int):
|
| 544 |
+
return "row"
|
| 545 |
+
elif isinstance(key, str):
|
| 546 |
+
return "column"
|
| 547 |
+
elif isinstance(key, (slice, range, Iterable)):
|
| 548 |
+
return "batch"
|
| 549 |
+
_raise_bad_key_type(key)
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
def query_table(
|
| 553 |
+
table: Table,
|
| 554 |
+
key: Union[int, slice, range, str, Iterable],
|
| 555 |
+
indices: Optional[Table] = None,
|
| 556 |
+
) -> pa.Table:
|
| 557 |
+
"""
|
| 558 |
+
Query a Table to extract the subtable that correspond to the given key.
|
| 559 |
+
|
| 560 |
+
Args:
|
| 561 |
+
table (``datasets.table.Table``): The input Table to query from
|
| 562 |
+
key (``Union[int, slice, range, str, Iterable]``): The key can be of different types:
|
| 563 |
+
- an integer i: the subtable containing only the i-th row
|
| 564 |
+
- a slice [i:j:k]: the subtable containing the rows that correspond to this slice
|
| 565 |
+
- a range(i, j, k): the subtable containing the rows that correspond to this range
|
| 566 |
+
- a string c: the subtable containing all the rows but only the column c
|
| 567 |
+
- an iterable l: the subtable that is the concatenation of all the i-th rows for all i in the iterable
|
| 568 |
+
indices (Optional ``datasets.table.Table``): If not None, it is used to re-map the given key to the table rows.
|
| 569 |
+
The indices table must contain one column named "indices" of type uint64.
|
| 570 |
+
This is used in case of shuffling or rows selection.
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
Returns:
|
| 574 |
+
``pyarrow.Table``: the result of the query on the input table
|
| 575 |
+
"""
|
| 576 |
+
# Check if key is valid
|
| 577 |
+
if not isinstance(key, (int, slice, range, str, Iterable)):
|
| 578 |
+
_raise_bad_key_type(key)
|
| 579 |
+
if isinstance(key, str):
|
| 580 |
+
_check_valid_column_key(key, table.column_names)
|
| 581 |
+
else:
|
| 582 |
+
size = indices.num_rows if indices is not None else table.num_rows
|
| 583 |
+
_check_valid_index_key(key, size)
|
| 584 |
+
# Query the main table
|
| 585 |
+
if indices is None:
|
| 586 |
+
pa_subtable = _query_table(table, key)
|
| 587 |
+
else:
|
| 588 |
+
pa_subtable = _query_table_with_indices_mapping(table, key, indices=indices)
|
| 589 |
+
return pa_subtable
|
| 590 |
+
|
| 591 |
+
|
| 592 |
+
def format_table(
|
| 593 |
+
table: Table,
|
| 594 |
+
key: Union[int, slice, range, str, Iterable],
|
| 595 |
+
formatter: Formatter,
|
| 596 |
+
format_columns: Optional[list] = None,
|
| 597 |
+
output_all_columns=False,
|
| 598 |
+
):
|
| 599 |
+
"""
|
| 600 |
+
Format a Table depending on the key that was used and a Formatter object.
|
| 601 |
+
|
| 602 |
+
Args:
|
| 603 |
+
table (``datasets.table.Table``): The input Table to format
|
| 604 |
+
key (``Union[int, slice, range, str, Iterable]``): Depending on the key that was used, the formatter formats
|
| 605 |
+
the table as either a row, a column or a batch.
|
| 606 |
+
formatter (``datasets.formatting.formatting.Formatter``): Any subclass of a Formatter such as
|
| 607 |
+
PythonFormatter, NumpyFormatter, etc.
|
| 608 |
+
format_columns (:obj:`List[str]`, optional): if not None, it defines the columns that will be formatted using the
|
| 609 |
+
given formatter. Other columns are discarded (unless ``output_all_columns`` is True)
|
| 610 |
+
output_all_columns (:obj:`bool`, defaults to False). If True, the formatted output is completed using the columns
|
| 611 |
+
that are not in the ``format_columns`` list. For these columns, the PythonFormatter is used.
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
Returns:
|
| 615 |
+
A row, column or batch formatted object defined by the Formatter:
|
| 616 |
+
- the PythonFormatter returns a dictionary for a row or a batch, and a list for a column.
|
| 617 |
+
- the NumpyFormatter returns a dictionary for a row or a batch, and a np.array for a column.
|
| 618 |
+
- the PandasFormatter returns a pd.DataFrame for a row or a batch, and a pd.Series for a column.
|
| 619 |
+
- the TorchFormatter returns a dictionary for a row or a batch, and a torch.Tensor for a column.
|
| 620 |
+
- the TFFormatter returns a dictionary for a row or a batch, and a tf.Tensor for a column.
|
| 621 |
+
"""
|
| 622 |
+
if isinstance(table, Table):
|
| 623 |
+
pa_table = table.table
|
| 624 |
+
else:
|
| 625 |
+
pa_table = table
|
| 626 |
+
query_type = key_to_query_type(key)
|
| 627 |
+
python_formatter = PythonFormatter(features=None)
|
| 628 |
+
if format_columns is None:
|
| 629 |
+
return formatter(pa_table, query_type=query_type)
|
| 630 |
+
elif query_type == "column":
|
| 631 |
+
if key in format_columns:
|
| 632 |
+
return formatter(pa_table, query_type)
|
| 633 |
+
else:
|
| 634 |
+
return python_formatter(pa_table, query_type=query_type)
|
| 635 |
+
else:
|
| 636 |
+
pa_table_to_format = pa_table.drop(col for col in pa_table.column_names if col not in format_columns)
|
| 637 |
+
formatted_output = formatter(pa_table_to_format, query_type=query_type)
|
| 638 |
+
if output_all_columns:
|
| 639 |
+
if isinstance(formatted_output, MutableMapping):
|
| 640 |
+
pa_table_with_remaining_columns = pa_table.drop(
|
| 641 |
+
col for col in pa_table.column_names if col in format_columns
|
| 642 |
+
)
|
| 643 |
+
remaining_columns_dict = python_formatter(pa_table_with_remaining_columns, query_type=query_type)
|
| 644 |
+
formatted_output.update(remaining_columns_dict)
|
| 645 |
+
else:
|
| 646 |
+
raise TypeError(
|
| 647 |
+
f"Custom formatting function must return a dict to work with output_all_columns=True, but got {formatted_output}"
|
| 648 |
+
)
|
| 649 |
+
return formatted_output
|
lib/python3.10/site-packages/datasets/formatting/jax_formatter.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 The HuggingFace Authors.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Lint as: python3
|
| 16 |
+
import sys
|
| 17 |
+
from collections.abc import Mapping
|
| 18 |
+
from typing import TYPE_CHECKING, Dict, Optional
|
| 19 |
+
|
| 20 |
+
import numpy as np
|
| 21 |
+
import pyarrow as pa
|
| 22 |
+
|
| 23 |
+
from .. import config
|
| 24 |
+
from ..utils.logging import get_logger
|
| 25 |
+
from ..utils.py_utils import map_nested
|
| 26 |
+
from .formatting import TensorFormatter
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
if TYPE_CHECKING:
|
| 30 |
+
import jax
|
| 31 |
+
import jaxlib
|
| 32 |
+
|
| 33 |
+
logger = get_logger()
|
| 34 |
+
|
| 35 |
+
DEVICE_MAPPING: Optional[dict] = None
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class JaxFormatter(TensorFormatter[Mapping, "jax.Array", Mapping]):
|
| 39 |
+
def __init__(self, features=None, device=None, **jnp_array_kwargs):
|
| 40 |
+
super().__init__(features=features)
|
| 41 |
+
import jax
|
| 42 |
+
from jaxlib.xla_client import Device
|
| 43 |
+
|
| 44 |
+
if isinstance(device, Device):
|
| 45 |
+
raise ValueError(
|
| 46 |
+
f"Expected {device} to be a `str` not {type(device)}, as `jaxlib.xla_extension.Device` "
|
| 47 |
+
"is not serializable neither with `pickle` nor with `dill`. Instead you can surround "
|
| 48 |
+
"the device with `str()` to get its string identifier that will be internally mapped "
|
| 49 |
+
"to the actual `jaxlib.xla_extension.Device`."
|
| 50 |
+
)
|
| 51 |
+
self.device = device if isinstance(device, str) else str(jax.devices()[0])
|
| 52 |
+
# using global variable since `jaxlib.xla_extension.Device` is not serializable neither
|
| 53 |
+
# with `pickle` nor with `dill`, so we need to use a global variable instead
|
| 54 |
+
global DEVICE_MAPPING
|
| 55 |
+
if DEVICE_MAPPING is None:
|
| 56 |
+
DEVICE_MAPPING = self._map_devices_to_str()
|
| 57 |
+
if self.device not in list(DEVICE_MAPPING.keys()):
|
| 58 |
+
logger.warning(
|
| 59 |
+
f"Device with string identifier {self.device} not listed among the available "
|
| 60 |
+
f"devices: {list(DEVICE_MAPPING.keys())}, so falling back to the default "
|
| 61 |
+
f"device: {str(jax.devices()[0])}."
|
| 62 |
+
)
|
| 63 |
+
self.device = str(jax.devices()[0])
|
| 64 |
+
self.jnp_array_kwargs = jnp_array_kwargs
|
| 65 |
+
|
| 66 |
+
@staticmethod
|
| 67 |
+
def _map_devices_to_str() -> Dict[str, "jaxlib.xla_extension.Device"]:
|
| 68 |
+
import jax
|
| 69 |
+
|
| 70 |
+
return {str(device): device for device in jax.devices()}
|
| 71 |
+
|
| 72 |
+
def _consolidate(self, column):
|
| 73 |
+
import jax
|
| 74 |
+
import jax.numpy as jnp
|
| 75 |
+
|
| 76 |
+
if isinstance(column, list) and column:
|
| 77 |
+
if all(
|
| 78 |
+
isinstance(x, jax.Array) and x.shape == column[0].shape and x.dtype == column[0].dtype for x in column
|
| 79 |
+
):
|
| 80 |
+
return jnp.stack(column, axis=0)
|
| 81 |
+
return column
|
| 82 |
+
|
| 83 |
+
def _tensorize(self, value):
|
| 84 |
+
import jax
|
| 85 |
+
import jax.numpy as jnp
|
| 86 |
+
|
| 87 |
+
if isinstance(value, (str, bytes, type(None))):
|
| 88 |
+
return value
|
| 89 |
+
elif isinstance(value, (np.character, np.ndarray)) and np.issubdtype(value.dtype, np.character):
|
| 90 |
+
return value.tolist()
|
| 91 |
+
|
| 92 |
+
default_dtype = {}
|
| 93 |
+
|
| 94 |
+
if isinstance(value, (np.number, np.ndarray)) and np.issubdtype(value.dtype, np.integer):
|
| 95 |
+
# the default int precision depends on the jax config
|
| 96 |
+
# see https://jax.readthedocs.io/en/latest/notebooks/Common_Gotchas_in_JAX.html#double-64bit-precision
|
| 97 |
+
if jax.config.jax_enable_x64:
|
| 98 |
+
default_dtype = {"dtype": jnp.int64}
|
| 99 |
+
else:
|
| 100 |
+
default_dtype = {"dtype": jnp.int32}
|
| 101 |
+
elif isinstance(value, (np.number, np.ndarray)) and np.issubdtype(value.dtype, np.floating):
|
| 102 |
+
default_dtype = {"dtype": jnp.float32}
|
| 103 |
+
elif config.PIL_AVAILABLE and "PIL" in sys.modules:
|
| 104 |
+
import PIL.Image
|
| 105 |
+
|
| 106 |
+
if isinstance(value, PIL.Image.Image):
|
| 107 |
+
value = np.asarray(value)
|
| 108 |
+
|
| 109 |
+
# using global variable since `jaxlib.xla_extension.Device` is not serializable neither
|
| 110 |
+
# with `pickle` nor with `dill`, so we need to use a global variable instead
|
| 111 |
+
global DEVICE_MAPPING
|
| 112 |
+
if DEVICE_MAPPING is None:
|
| 113 |
+
DEVICE_MAPPING = self._map_devices_to_str()
|
| 114 |
+
|
| 115 |
+
with jax.default_device(DEVICE_MAPPING[self.device]):
|
| 116 |
+
# calling jnp.array on a np.ndarray does copy the data
|
| 117 |
+
# see https://github.com/google/jax/issues/4486
|
| 118 |
+
return jnp.array(value, **{**default_dtype, **self.jnp_array_kwargs})
|
| 119 |
+
|
| 120 |
+
def _recursive_tensorize(self, data_struct):
|
| 121 |
+
import jax
|
| 122 |
+
|
| 123 |
+
# support for torch, tf, jax etc.
|
| 124 |
+
if config.TORCH_AVAILABLE and "torch" in sys.modules:
|
| 125 |
+
import torch
|
| 126 |
+
|
| 127 |
+
if isinstance(data_struct, torch.Tensor):
|
| 128 |
+
return self._tensorize(data_struct.detach().cpu().numpy()[()])
|
| 129 |
+
if hasattr(data_struct, "__array__") and not isinstance(data_struct, jax.Array):
|
| 130 |
+
data_struct = data_struct.__array__()
|
| 131 |
+
# support for nested types like struct of list of struct
|
| 132 |
+
if isinstance(data_struct, np.ndarray):
|
| 133 |
+
if data_struct.dtype == object: # jax arrays cannot be instantied from an array of objects
|
| 134 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 135 |
+
elif isinstance(data_struct, (list, tuple)):
|
| 136 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 137 |
+
return self._tensorize(data_struct)
|
| 138 |
+
|
| 139 |
+
def recursive_tensorize(self, data_struct: dict):
|
| 140 |
+
return map_nested(self._recursive_tensorize, data_struct, map_list=False)
|
| 141 |
+
|
| 142 |
+
def format_row(self, pa_table: pa.Table) -> Mapping:
|
| 143 |
+
row = self.numpy_arrow_extractor().extract_row(pa_table)
|
| 144 |
+
row = self.python_features_decoder.decode_row(row)
|
| 145 |
+
return self.recursive_tensorize(row)
|
| 146 |
+
|
| 147 |
+
def format_column(self, pa_table: pa.Table) -> "jax.Array":
|
| 148 |
+
column = self.numpy_arrow_extractor().extract_column(pa_table)
|
| 149 |
+
column = self.python_features_decoder.decode_column(column, pa_table.column_names[0])
|
| 150 |
+
column = self.recursive_tensorize(column)
|
| 151 |
+
column = self._consolidate(column)
|
| 152 |
+
return column
|
| 153 |
+
|
| 154 |
+
def format_batch(self, pa_table: pa.Table) -> Mapping:
|
| 155 |
+
batch = self.numpy_arrow_extractor().extract_batch(pa_table)
|
| 156 |
+
batch = self.python_features_decoder.decode_batch(batch)
|
| 157 |
+
batch = self.recursive_tensorize(batch)
|
| 158 |
+
for column_name in batch:
|
| 159 |
+
batch[column_name] = self._consolidate(batch[column_name])
|
| 160 |
+
return batch
|
lib/python3.10/site-packages/datasets/formatting/np_formatter.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Authors.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
import sys
|
| 16 |
+
from collections.abc import Mapping
|
| 17 |
+
|
| 18 |
+
import numpy as np
|
| 19 |
+
import pyarrow as pa
|
| 20 |
+
|
| 21 |
+
from .. import config
|
| 22 |
+
from ..utils.py_utils import map_nested
|
| 23 |
+
from .formatting import TensorFormatter
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class NumpyFormatter(TensorFormatter[Mapping, np.ndarray, Mapping]):
|
| 27 |
+
def __init__(self, features=None, **np_array_kwargs):
|
| 28 |
+
super().__init__(features=features)
|
| 29 |
+
self.np_array_kwargs = np_array_kwargs
|
| 30 |
+
|
| 31 |
+
def _consolidate(self, column):
|
| 32 |
+
if isinstance(column, list):
|
| 33 |
+
if column and all(
|
| 34 |
+
isinstance(x, np.ndarray) and x.shape == column[0].shape and x.dtype == column[0].dtype for x in column
|
| 35 |
+
):
|
| 36 |
+
return np.stack(column)
|
| 37 |
+
else:
|
| 38 |
+
# don't use np.array(column, dtype=object)
|
| 39 |
+
# since it fails in certain cases
|
| 40 |
+
# see https://stackoverflow.com/q/51005699
|
| 41 |
+
out = np.empty(len(column), dtype=object)
|
| 42 |
+
out[:] = column
|
| 43 |
+
return out
|
| 44 |
+
return column
|
| 45 |
+
|
| 46 |
+
def _tensorize(self, value):
|
| 47 |
+
if isinstance(value, (str, bytes, type(None))):
|
| 48 |
+
return value
|
| 49 |
+
elif isinstance(value, (np.character, np.ndarray)) and np.issubdtype(value.dtype, np.character):
|
| 50 |
+
return value
|
| 51 |
+
elif isinstance(value, np.number):
|
| 52 |
+
return value
|
| 53 |
+
|
| 54 |
+
default_dtype = {}
|
| 55 |
+
|
| 56 |
+
if isinstance(value, np.ndarray) and np.issubdtype(value.dtype, np.integer):
|
| 57 |
+
default_dtype = {"dtype": np.int64}
|
| 58 |
+
elif isinstance(value, np.ndarray) and np.issubdtype(value.dtype, np.floating):
|
| 59 |
+
default_dtype = {"dtype": np.float32}
|
| 60 |
+
elif config.PIL_AVAILABLE and "PIL" in sys.modules:
|
| 61 |
+
import PIL.Image
|
| 62 |
+
|
| 63 |
+
if isinstance(value, PIL.Image.Image):
|
| 64 |
+
return np.asarray(value, **self.np_array_kwargs)
|
| 65 |
+
|
| 66 |
+
return np.asarray(value, **{**default_dtype, **self.np_array_kwargs})
|
| 67 |
+
|
| 68 |
+
def _recursive_tensorize(self, data_struct):
|
| 69 |
+
# support for torch, tf, jax etc.
|
| 70 |
+
if config.TORCH_AVAILABLE and "torch" in sys.modules:
|
| 71 |
+
import torch
|
| 72 |
+
|
| 73 |
+
if isinstance(data_struct, torch.Tensor):
|
| 74 |
+
return self._tensorize(data_struct.detach().cpu().numpy()[()])
|
| 75 |
+
if hasattr(data_struct, "__array__") and not isinstance(data_struct, (np.ndarray, np.character, np.number)):
|
| 76 |
+
data_struct = data_struct.__array__()
|
| 77 |
+
# support for nested types like struct of list of struct
|
| 78 |
+
if isinstance(data_struct, np.ndarray):
|
| 79 |
+
if data_struct.dtype == object:
|
| 80 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 81 |
+
if isinstance(data_struct, (list, tuple)):
|
| 82 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 83 |
+
return self._tensorize(data_struct)
|
| 84 |
+
|
| 85 |
+
def recursive_tensorize(self, data_struct: dict):
|
| 86 |
+
return map_nested(self._recursive_tensorize, data_struct, map_list=False)
|
| 87 |
+
|
| 88 |
+
def format_row(self, pa_table: pa.Table) -> Mapping:
|
| 89 |
+
row = self.numpy_arrow_extractor().extract_row(pa_table)
|
| 90 |
+
row = self.python_features_decoder.decode_row(row)
|
| 91 |
+
return self.recursive_tensorize(row)
|
| 92 |
+
|
| 93 |
+
def format_column(self, pa_table: pa.Table) -> np.ndarray:
|
| 94 |
+
column = self.numpy_arrow_extractor().extract_column(pa_table)
|
| 95 |
+
column = self.python_features_decoder.decode_column(column, pa_table.column_names[0])
|
| 96 |
+
column = self.recursive_tensorize(column)
|
| 97 |
+
column = self._consolidate(column)
|
| 98 |
+
return column
|
| 99 |
+
|
| 100 |
+
def format_batch(self, pa_table: pa.Table) -> Mapping:
|
| 101 |
+
batch = self.numpy_arrow_extractor().extract_batch(pa_table)
|
| 102 |
+
batch = self.python_features_decoder.decode_batch(batch)
|
| 103 |
+
batch = self.recursive_tensorize(batch)
|
| 104 |
+
for column_name in batch:
|
| 105 |
+
batch[column_name] = self._consolidate(batch[column_name])
|
| 106 |
+
return batch
|
lib/python3.10/site-packages/datasets/formatting/tf_formatter.py
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Authors.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Lint as: python3
|
| 16 |
+
import sys
|
| 17 |
+
from collections.abc import Mapping
|
| 18 |
+
from typing import TYPE_CHECKING
|
| 19 |
+
|
| 20 |
+
import numpy as np
|
| 21 |
+
import pyarrow as pa
|
| 22 |
+
|
| 23 |
+
from .. import config
|
| 24 |
+
from ..utils.py_utils import map_nested
|
| 25 |
+
from .formatting import TensorFormatter
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
if TYPE_CHECKING:
|
| 29 |
+
import tensorflow as tf
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class TFFormatter(TensorFormatter[Mapping, "tf.Tensor", Mapping]):
|
| 33 |
+
def __init__(self, features=None, **tf_tensor_kwargs):
|
| 34 |
+
super().__init__(features=features)
|
| 35 |
+
self.tf_tensor_kwargs = tf_tensor_kwargs
|
| 36 |
+
import tensorflow as tf # noqa: F401 - import tf at initialization
|
| 37 |
+
|
| 38 |
+
def _consolidate(self, column):
|
| 39 |
+
import tensorflow as tf
|
| 40 |
+
|
| 41 |
+
if isinstance(column, list) and column:
|
| 42 |
+
if all(
|
| 43 |
+
isinstance(x, tf.Tensor) and x.shape == column[0].shape and x.dtype == column[0].dtype for x in column
|
| 44 |
+
):
|
| 45 |
+
return tf.stack(column)
|
| 46 |
+
elif all(
|
| 47 |
+
isinstance(x, (tf.Tensor, tf.RaggedTensor)) and x.ndim == 1 and x.dtype == column[0].dtype
|
| 48 |
+
for x in column
|
| 49 |
+
):
|
| 50 |
+
# only rag 1-D tensors, otherwise some dimensions become ragged even though they were consolidated
|
| 51 |
+
return tf.ragged.stack(column)
|
| 52 |
+
|
| 53 |
+
return column
|
| 54 |
+
|
| 55 |
+
def _tensorize(self, value):
|
| 56 |
+
import tensorflow as tf
|
| 57 |
+
|
| 58 |
+
if value is None:
|
| 59 |
+
return value
|
| 60 |
+
|
| 61 |
+
default_dtype = {}
|
| 62 |
+
|
| 63 |
+
if isinstance(value, (np.number, np.ndarray)) and np.issubdtype(value.dtype, np.integer):
|
| 64 |
+
default_dtype = {"dtype": tf.int64}
|
| 65 |
+
elif isinstance(value, (np.number, np.ndarray)) and np.issubdtype(value.dtype, np.floating):
|
| 66 |
+
default_dtype = {"dtype": tf.float32}
|
| 67 |
+
elif config.PIL_AVAILABLE and "PIL" in sys.modules:
|
| 68 |
+
import PIL.Image
|
| 69 |
+
|
| 70 |
+
if isinstance(value, PIL.Image.Image):
|
| 71 |
+
value = np.asarray(value)
|
| 72 |
+
|
| 73 |
+
return tf.convert_to_tensor(value, **{**default_dtype, **self.tf_tensor_kwargs})
|
| 74 |
+
|
| 75 |
+
def _recursive_tensorize(self, data_struct):
|
| 76 |
+
import tensorflow as tf
|
| 77 |
+
|
| 78 |
+
# support for torch, tf, jax etc.
|
| 79 |
+
if config.TORCH_AVAILABLE and "torch" in sys.modules:
|
| 80 |
+
import torch
|
| 81 |
+
|
| 82 |
+
if isinstance(data_struct, torch.Tensor):
|
| 83 |
+
return self._tensorize(data_struct.detach().cpu().numpy()[()])
|
| 84 |
+
if hasattr(data_struct, "__array__") and not isinstance(data_struct, tf.Tensor):
|
| 85 |
+
data_struct = data_struct.__array__()
|
| 86 |
+
# support for nested types like struct of list of struct
|
| 87 |
+
if isinstance(data_struct, np.ndarray):
|
| 88 |
+
if data_struct.dtype == object: # tf tensors cannot be instantied from an array of objects
|
| 89 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 90 |
+
elif isinstance(data_struct, (list, tuple)):
|
| 91 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 92 |
+
return self._tensorize(data_struct)
|
| 93 |
+
|
| 94 |
+
def recursive_tensorize(self, data_struct: dict):
|
| 95 |
+
return map_nested(self._recursive_tensorize, data_struct, map_list=False)
|
| 96 |
+
|
| 97 |
+
def format_row(self, pa_table: pa.Table) -> Mapping:
|
| 98 |
+
row = self.numpy_arrow_extractor().extract_row(pa_table)
|
| 99 |
+
row = self.python_features_decoder.decode_row(row)
|
| 100 |
+
return self.recursive_tensorize(row)
|
| 101 |
+
|
| 102 |
+
def format_column(self, pa_table: pa.Table) -> "tf.Tensor":
|
| 103 |
+
column = self.numpy_arrow_extractor().extract_column(pa_table)
|
| 104 |
+
column = self.python_features_decoder.decode_column(column, pa_table.column_names[0])
|
| 105 |
+
column = self.recursive_tensorize(column)
|
| 106 |
+
column = self._consolidate(column)
|
| 107 |
+
return column
|
| 108 |
+
|
| 109 |
+
def format_batch(self, pa_table: pa.Table) -> Mapping:
|
| 110 |
+
batch = self.numpy_arrow_extractor().extract_batch(pa_table)
|
| 111 |
+
batch = self.python_features_decoder.decode_batch(batch)
|
| 112 |
+
batch = self.recursive_tensorize(batch)
|
| 113 |
+
for column_name in batch:
|
| 114 |
+
batch[column_name] = self._consolidate(batch[column_name])
|
| 115 |
+
return batch
|
lib/python3.10/site-packages/datasets/formatting/torch_formatter.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Authors.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Lint as: python3
|
| 16 |
+
import sys
|
| 17 |
+
from collections.abc import Mapping
|
| 18 |
+
from typing import TYPE_CHECKING
|
| 19 |
+
|
| 20 |
+
import numpy as np
|
| 21 |
+
import pyarrow as pa
|
| 22 |
+
|
| 23 |
+
from .. import config
|
| 24 |
+
from ..utils.py_utils import map_nested
|
| 25 |
+
from .formatting import TensorFormatter
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
if TYPE_CHECKING:
|
| 29 |
+
import torch
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class TorchFormatter(TensorFormatter[Mapping, "torch.Tensor", Mapping]):
|
| 33 |
+
def __init__(self, features=None, **torch_tensor_kwargs):
|
| 34 |
+
super().__init__(features=features)
|
| 35 |
+
self.torch_tensor_kwargs = torch_tensor_kwargs
|
| 36 |
+
import torch # noqa import torch at initialization
|
| 37 |
+
|
| 38 |
+
def _consolidate(self, column):
|
| 39 |
+
import torch
|
| 40 |
+
|
| 41 |
+
if isinstance(column, list) and column:
|
| 42 |
+
if all(
|
| 43 |
+
isinstance(x, torch.Tensor) and x.shape == column[0].shape and x.dtype == column[0].dtype
|
| 44 |
+
for x in column
|
| 45 |
+
):
|
| 46 |
+
return torch.stack(column)
|
| 47 |
+
return column
|
| 48 |
+
|
| 49 |
+
def _tensorize(self, value):
|
| 50 |
+
import torch
|
| 51 |
+
|
| 52 |
+
if isinstance(value, (str, bytes, type(None))):
|
| 53 |
+
return value
|
| 54 |
+
elif isinstance(value, (np.character, np.ndarray)) and np.issubdtype(value.dtype, np.character):
|
| 55 |
+
return value.tolist()
|
| 56 |
+
|
| 57 |
+
default_dtype = {}
|
| 58 |
+
|
| 59 |
+
if isinstance(value, (np.number, np.ndarray)) and np.issubdtype(value.dtype, np.integer):
|
| 60 |
+
default_dtype = {"dtype": torch.int64}
|
| 61 |
+
elif isinstance(value, (np.number, np.ndarray)) and np.issubdtype(value.dtype, np.floating):
|
| 62 |
+
default_dtype = {"dtype": torch.float32}
|
| 63 |
+
elif config.PIL_AVAILABLE and "PIL" in sys.modules:
|
| 64 |
+
import PIL.Image
|
| 65 |
+
|
| 66 |
+
if isinstance(value, PIL.Image.Image):
|
| 67 |
+
value = np.asarray(value)
|
| 68 |
+
return torch.tensor(value, **{**default_dtype, **self.torch_tensor_kwargs})
|
| 69 |
+
|
| 70 |
+
def _recursive_tensorize(self, data_struct):
|
| 71 |
+
import torch
|
| 72 |
+
|
| 73 |
+
# support for torch, tf, jax etc.
|
| 74 |
+
if hasattr(data_struct, "__array__") and not isinstance(data_struct, torch.Tensor):
|
| 75 |
+
data_struct = data_struct.__array__()
|
| 76 |
+
# support for nested types like struct of list of struct
|
| 77 |
+
if isinstance(data_struct, np.ndarray):
|
| 78 |
+
if data_struct.dtype == object: # torch tensors cannot be instantied from an array of objects
|
| 79 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 80 |
+
elif isinstance(data_struct, (list, tuple)):
|
| 81 |
+
return self._consolidate([self.recursive_tensorize(substruct) for substruct in data_struct])
|
| 82 |
+
return self._tensorize(data_struct)
|
| 83 |
+
|
| 84 |
+
def recursive_tensorize(self, data_struct: dict):
|
| 85 |
+
return map_nested(self._recursive_tensorize, data_struct, map_list=False)
|
| 86 |
+
|
| 87 |
+
def format_row(self, pa_table: pa.Table) -> Mapping:
|
| 88 |
+
row = self.numpy_arrow_extractor().extract_row(pa_table)
|
| 89 |
+
row = self.python_features_decoder.decode_row(row)
|
| 90 |
+
return self.recursive_tensorize(row)
|
| 91 |
+
|
| 92 |
+
def format_column(self, pa_table: pa.Table) -> "torch.Tensor":
|
| 93 |
+
column = self.numpy_arrow_extractor().extract_column(pa_table)
|
| 94 |
+
column = self.python_features_decoder.decode_column(column, pa_table.column_names[0])
|
| 95 |
+
column = self.recursive_tensorize(column)
|
| 96 |
+
column = self._consolidate(column)
|
| 97 |
+
return column
|
| 98 |
+
|
| 99 |
+
def format_batch(self, pa_table: pa.Table) -> Mapping:
|
| 100 |
+
batch = self.numpy_arrow_extractor().extract_batch(pa_table)
|
| 101 |
+
batch = self.python_features_decoder.decode_batch(batch)
|
| 102 |
+
batch = self.recursive_tensorize(batch)
|
| 103 |
+
for column_name in batch:
|
| 104 |
+
batch[column_name] = self._consolidate(batch[column_name])
|
| 105 |
+
return batch
|
lib/python3.10/site-packages/datasets/utils/resources/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/datasets/utils/resources/creators.json
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"language": [
|
| 3 |
+
"found",
|
| 4 |
+
"crowdsourced",
|
| 5 |
+
"expert-generated",
|
| 6 |
+
"machine-generated",
|
| 7 |
+
"other"
|
| 8 |
+
],
|
| 9 |
+
"annotations": [
|
| 10 |
+
"found",
|
| 11 |
+
"crowdsourced",
|
| 12 |
+
"expert-generated",
|
| 13 |
+
"machine-generated",
|
| 14 |
+
"no-annotation",
|
| 15 |
+
"other"
|
| 16 |
+
]
|
| 17 |
+
}
|
lib/python3.10/site-packages/datasets/utils/resources/languages.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
lib/python3.10/site-packages/datasets/utils/resources/size_categories.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
"unknown",
|
| 3 |
+
"n<1K",
|
| 4 |
+
"1K<n<10K",
|
| 5 |
+
"10K<n<100K",
|
| 6 |
+
"100K<n<1M",
|
| 7 |
+
"1M<n<10M",
|
| 8 |
+
"10M<n<100M",
|
| 9 |
+
"100M<n<1B",
|
| 10 |
+
"1B<n<10B",
|
| 11 |
+
"10B<n<100B",
|
| 12 |
+
"100B<n<1T",
|
| 13 |
+
"n>1T"
|
| 14 |
+
]
|
lib/python3.10/site-packages/importlib_resources/compat/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/importlib_resources/compat/py39.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
__all__ = ['ZipPath']
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
if sys.version_info >= (3, 10):
|
| 7 |
+
from zipfile import Path as ZipPath
|
| 8 |
+
else:
|
| 9 |
+
from zipp import Path as ZipPath
|
lib/python3.10/site-packages/importlib_resources/future/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/importlib_resources/future/adapters.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import pathlib
|
| 3 |
+
from contextlib import suppress
|
| 4 |
+
from types import SimpleNamespace
|
| 5 |
+
|
| 6 |
+
from .. import _adapters, readers
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _block_standard(reader_getter):
|
| 10 |
+
"""
|
| 11 |
+
Wrap _adapters.TraversableResourcesLoader.get_resource_reader
|
| 12 |
+
and intercept any standard library readers.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
@functools.wraps(reader_getter)
|
| 16 |
+
def wrapper(*args, **kwargs):
|
| 17 |
+
"""
|
| 18 |
+
If the reader is from the standard library, return None to allow
|
| 19 |
+
allow likely newer implementations in this library to take precedence.
|
| 20 |
+
"""
|
| 21 |
+
try:
|
| 22 |
+
reader = reader_getter(*args, **kwargs)
|
| 23 |
+
except NotADirectoryError:
|
| 24 |
+
# MultiplexedPath may fail on zip subdirectory
|
| 25 |
+
return
|
| 26 |
+
except ValueError as exc:
|
| 27 |
+
# NamespaceReader in stdlib may fail for editable installs
|
| 28 |
+
# (python/importlib_resources#311, python/importlib_resources#318)
|
| 29 |
+
# Remove after bugfix applied to Python 3.13.
|
| 30 |
+
if "not enough values to unpack" not in str(exc):
|
| 31 |
+
raise
|
| 32 |
+
return
|
| 33 |
+
# Python 3.10+
|
| 34 |
+
mod_name = reader.__class__.__module__
|
| 35 |
+
if mod_name.startswith('importlib.') and mod_name.endswith('readers'):
|
| 36 |
+
return
|
| 37 |
+
# Python 3.8, 3.9
|
| 38 |
+
if isinstance(reader, _adapters.CompatibilityFiles) and (
|
| 39 |
+
reader.spec.loader.__class__.__module__.startswith('zipimport')
|
| 40 |
+
or reader.spec.loader.__class__.__module__.startswith(
|
| 41 |
+
'_frozen_importlib_external'
|
| 42 |
+
)
|
| 43 |
+
):
|
| 44 |
+
return
|
| 45 |
+
return reader
|
| 46 |
+
|
| 47 |
+
return wrapper
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _skip_degenerate(reader):
|
| 51 |
+
"""
|
| 52 |
+
Mask any degenerate reader. Ref #298.
|
| 53 |
+
"""
|
| 54 |
+
is_degenerate = (
|
| 55 |
+
isinstance(reader, _adapters.CompatibilityFiles) and not reader._reader
|
| 56 |
+
)
|
| 57 |
+
return reader if not is_degenerate else None
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class TraversableResourcesLoader(_adapters.TraversableResourcesLoader):
|
| 61 |
+
"""
|
| 62 |
+
Adapt loaders to provide TraversableResources and other
|
| 63 |
+
compatibility.
|
| 64 |
+
|
| 65 |
+
Ensures the readers from importlib_resources are preferred
|
| 66 |
+
over stdlib readers.
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
def get_resource_reader(self, name):
|
| 70 |
+
return (
|
| 71 |
+
_skip_degenerate(_block_standard(super().get_resource_reader)(name))
|
| 72 |
+
or self._standard_reader()
|
| 73 |
+
or super().get_resource_reader(name)
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
def _standard_reader(self):
|
| 77 |
+
return self._zip_reader() or self._namespace_reader() or self._file_reader()
|
| 78 |
+
|
| 79 |
+
def _zip_reader(self):
|
| 80 |
+
with suppress(AttributeError):
|
| 81 |
+
return readers.ZipReader(self.spec.loader, self.spec.name)
|
| 82 |
+
|
| 83 |
+
def _namespace_reader(self):
|
| 84 |
+
with suppress(AttributeError, ValueError):
|
| 85 |
+
return readers.NamespaceReader(self.spec.submodule_search_locations)
|
| 86 |
+
|
| 87 |
+
def _file_reader(self):
|
| 88 |
+
try:
|
| 89 |
+
path = pathlib.Path(self.spec.origin)
|
| 90 |
+
except TypeError:
|
| 91 |
+
return None
|
| 92 |
+
if path.exists():
|
| 93 |
+
return readers.FileReader(SimpleNamespace(path=path))
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def wrap_spec(package):
|
| 97 |
+
"""
|
| 98 |
+
Override _adapters.wrap_spec to use TraversableResourcesLoader
|
| 99 |
+
from above. Ensures that future behavior is always available on older
|
| 100 |
+
Pythons.
|
| 101 |
+
"""
|
| 102 |
+
return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
lib/python3.10/site-packages/importlib_resources/tests/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/importlib_resources/tests/_path.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import pathlib
|
| 3 |
+
from typing import Dict, Protocol, Union, runtime_checkable
|
| 4 |
+
|
| 5 |
+
####
|
| 6 |
+
# from jaraco.path 3.7.1
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class Symlink(str):
|
| 10 |
+
"""
|
| 11 |
+
A string indicating the target of a symlink.
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
FilesSpec = Dict[str, Union[str, bytes, Symlink, 'FilesSpec']]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@runtime_checkable
|
| 19 |
+
class TreeMaker(Protocol):
|
| 20 |
+
def __truediv__(self, *args, **kwargs): ... # pragma: no cover
|
| 21 |
+
|
| 22 |
+
def mkdir(self, **kwargs): ... # pragma: no cover
|
| 23 |
+
|
| 24 |
+
def write_text(self, content, **kwargs): ... # pragma: no cover
|
| 25 |
+
|
| 26 |
+
def write_bytes(self, content): ... # pragma: no cover
|
| 27 |
+
|
| 28 |
+
def symlink_to(self, target): ... # pragma: no cover
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker:
|
| 32 |
+
return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore[return-value]
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def build(
|
| 36 |
+
spec: FilesSpec,
|
| 37 |
+
prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore[assignment]
|
| 38 |
+
):
|
| 39 |
+
"""
|
| 40 |
+
Build a set of files/directories, as described by the spec.
|
| 41 |
+
|
| 42 |
+
Each key represents a pathname, and the value represents
|
| 43 |
+
the content. Content may be a nested directory.
|
| 44 |
+
|
| 45 |
+
>>> spec = {
|
| 46 |
+
... 'README.txt': "A README file",
|
| 47 |
+
... "foo": {
|
| 48 |
+
... "__init__.py": "",
|
| 49 |
+
... "bar": {
|
| 50 |
+
... "__init__.py": "",
|
| 51 |
+
... },
|
| 52 |
+
... "baz.py": "# Some code",
|
| 53 |
+
... "bar.py": Symlink("baz.py"),
|
| 54 |
+
... },
|
| 55 |
+
... "bing": Symlink("foo"),
|
| 56 |
+
... }
|
| 57 |
+
>>> target = getfixture('tmp_path')
|
| 58 |
+
>>> build(spec, target)
|
| 59 |
+
>>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
|
| 60 |
+
'# Some code'
|
| 61 |
+
>>> target.joinpath('bing/bar.py').read_text(encoding='utf-8')
|
| 62 |
+
'# Some code'
|
| 63 |
+
"""
|
| 64 |
+
for name, contents in spec.items():
|
| 65 |
+
create(contents, _ensure_tree_maker(prefix) / name)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
@functools.singledispatch
|
| 69 |
+
def create(content: Union[str, bytes, FilesSpec], path):
|
| 70 |
+
path.mkdir(exist_ok=True)
|
| 71 |
+
build(content, prefix=path) # type: ignore[arg-type]
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@create.register
|
| 75 |
+
def _(content: bytes, path):
|
| 76 |
+
path.write_bytes(content)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@create.register
|
| 80 |
+
def _(content: str, path):
|
| 81 |
+
path.write_text(content, encoding='utf-8')
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
@create.register
|
| 85 |
+
def _(content: Symlink, path):
|
| 86 |
+
path.symlink_to(content)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# end from jaraco.path
|
| 90 |
+
####
|
lib/python3.10/site-packages/importlib_resources/tests/compat/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/importlib_resources/tests/compat/py312.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
|
| 3 |
+
from .py39 import import_helper
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@contextlib.contextmanager
|
| 7 |
+
def isolated_modules():
|
| 8 |
+
"""
|
| 9 |
+
Save modules on entry and cleanup on exit.
|
| 10 |
+
"""
|
| 11 |
+
(saved,) = import_helper.modules_setup()
|
| 12 |
+
try:
|
| 13 |
+
yield
|
| 14 |
+
finally:
|
| 15 |
+
import_helper.modules_cleanup(saved)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
vars(import_helper).setdefault('isolated_modules', isolated_modules)
|
lib/python3.10/site-packages/importlib_resources/tests/compat/py39.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backward-compatability shims to support Python 3.9 and earlier.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from jaraco.test.cpython import from_test_support, try_import
|
| 6 |
+
|
| 7 |
+
import_helper = try_import('import_helper') or from_test_support(
|
| 8 |
+
'modules_setup', 'modules_cleanup', 'DirsOnSysPath'
|
| 9 |
+
)
|
| 10 |
+
os_helper = try_import('os_helper') or from_test_support('temp_dir')
|
| 11 |
+
warnings_helper = try_import('warnings_helper') or from_test_support(
|
| 12 |
+
'ignore_warnings', 'check_warnings'
|
| 13 |
+
)
|
lib/python3.10/site-packages/importlib_resources/tests/test_compatibilty_files.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import unittest
|
| 3 |
+
|
| 4 |
+
import importlib_resources as resources
|
| 5 |
+
from importlib_resources._adapters import (
|
| 6 |
+
CompatibilityFiles,
|
| 7 |
+
wrap_spec,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
from . import util
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class CompatibilityFilesTests(unittest.TestCase):
|
| 14 |
+
@property
|
| 15 |
+
def package(self):
|
| 16 |
+
bytes_data = io.BytesIO(b'Hello, world!')
|
| 17 |
+
return util.create_package(
|
| 18 |
+
file=bytes_data,
|
| 19 |
+
path='some_path',
|
| 20 |
+
contents=('a', 'b', 'c'),
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
@property
|
| 24 |
+
def files(self):
|
| 25 |
+
return resources.files(self.package)
|
| 26 |
+
|
| 27 |
+
def test_spec_path_iter(self):
|
| 28 |
+
self.assertEqual(
|
| 29 |
+
sorted(path.name for path in self.files.iterdir()),
|
| 30 |
+
['a', 'b', 'c'],
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def test_child_path_iter(self):
|
| 34 |
+
self.assertEqual(list((self.files / 'a').iterdir()), [])
|
| 35 |
+
|
| 36 |
+
def test_orphan_path_iter(self):
|
| 37 |
+
self.assertEqual(list((self.files / 'a' / 'a').iterdir()), [])
|
| 38 |
+
self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), [])
|
| 39 |
+
|
| 40 |
+
def test_spec_path_is(self):
|
| 41 |
+
self.assertFalse(self.files.is_file())
|
| 42 |
+
self.assertFalse(self.files.is_dir())
|
| 43 |
+
|
| 44 |
+
def test_child_path_is(self):
|
| 45 |
+
self.assertTrue((self.files / 'a').is_file())
|
| 46 |
+
self.assertFalse((self.files / 'a').is_dir())
|
| 47 |
+
|
| 48 |
+
def test_orphan_path_is(self):
|
| 49 |
+
self.assertFalse((self.files / 'a' / 'a').is_file())
|
| 50 |
+
self.assertFalse((self.files / 'a' / 'a').is_dir())
|
| 51 |
+
self.assertFalse((self.files / 'a' / 'a' / 'a').is_file())
|
| 52 |
+
self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir())
|
| 53 |
+
|
| 54 |
+
def test_spec_path_name(self):
|
| 55 |
+
self.assertEqual(self.files.name, 'testingpackage')
|
| 56 |
+
|
| 57 |
+
def test_child_path_name(self):
|
| 58 |
+
self.assertEqual((self.files / 'a').name, 'a')
|
| 59 |
+
|
| 60 |
+
def test_orphan_path_name(self):
|
| 61 |
+
self.assertEqual((self.files / 'a' / 'b').name, 'b')
|
| 62 |
+
self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c')
|
| 63 |
+
|
| 64 |
+
def test_spec_path_open(self):
|
| 65 |
+
self.assertEqual(self.files.read_bytes(), b'Hello, world!')
|
| 66 |
+
self.assertEqual(self.files.read_text(encoding='utf-8'), 'Hello, world!')
|
| 67 |
+
|
| 68 |
+
def test_child_path_open(self):
|
| 69 |
+
self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
|
| 70 |
+
self.assertEqual(
|
| 71 |
+
(self.files / 'a').read_text(encoding='utf-8'), 'Hello, world!'
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
def test_orphan_path_open(self):
|
| 75 |
+
with self.assertRaises(FileNotFoundError):
|
| 76 |
+
(self.files / 'a' / 'b').read_bytes()
|
| 77 |
+
with self.assertRaises(FileNotFoundError):
|
| 78 |
+
(self.files / 'a' / 'b' / 'c').read_bytes()
|
| 79 |
+
|
| 80 |
+
def test_open_invalid_mode(self):
|
| 81 |
+
with self.assertRaises(ValueError):
|
| 82 |
+
self.files.open('0')
|
| 83 |
+
|
| 84 |
+
def test_orphan_path_invalid(self):
|
| 85 |
+
with self.assertRaises(ValueError):
|
| 86 |
+
CompatibilityFiles.OrphanPath()
|
| 87 |
+
|
| 88 |
+
def test_wrap_spec(self):
|
| 89 |
+
spec = wrap_spec(self.package)
|
| 90 |
+
self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class CompatibilityFilesNoReaderTests(unittest.TestCase):
|
| 94 |
+
@property
|
| 95 |
+
def package(self):
|
| 96 |
+
return util.create_package_from_loader(None)
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def files(self):
|
| 100 |
+
return resources.files(self.package)
|
| 101 |
+
|
| 102 |
+
def test_spec_path_joinpath(self):
|
| 103 |
+
self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath)
|
lib/python3.10/site-packages/importlib_resources/tests/test_custom.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import pathlib
|
| 3 |
+
import unittest
|
| 4 |
+
|
| 5 |
+
import importlib_resources as resources
|
| 6 |
+
|
| 7 |
+
from .. import abc
|
| 8 |
+
from ..abc import ResourceReader, TraversableResources
|
| 9 |
+
from . import util
|
| 10 |
+
from .compat.py39 import os_helper
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class SimpleLoader:
|
| 14 |
+
"""
|
| 15 |
+
A simple loader that only implements a resource reader.
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
def __init__(self, reader: ResourceReader):
|
| 19 |
+
self.reader = reader
|
| 20 |
+
|
| 21 |
+
def get_resource_reader(self, package):
|
| 22 |
+
return self.reader
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class MagicResources(TraversableResources):
|
| 26 |
+
"""
|
| 27 |
+
Magically returns the resources at path.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(self, path: pathlib.Path):
|
| 31 |
+
self.path = path
|
| 32 |
+
|
| 33 |
+
def files(self):
|
| 34 |
+
return self.path
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class CustomTraversableResourcesTests(unittest.TestCase):
|
| 38 |
+
def setUp(self):
|
| 39 |
+
self.fixtures = contextlib.ExitStack()
|
| 40 |
+
self.addCleanup(self.fixtures.close)
|
| 41 |
+
|
| 42 |
+
def test_custom_loader(self):
|
| 43 |
+
temp_dir = pathlib.Path(self.fixtures.enter_context(os_helper.temp_dir()))
|
| 44 |
+
loader = SimpleLoader(MagicResources(temp_dir))
|
| 45 |
+
pkg = util.create_package_from_loader(loader)
|
| 46 |
+
files = resources.files(pkg)
|
| 47 |
+
assert isinstance(files, abc.Traversable)
|
| 48 |
+
assert list(files.iterdir()) == []
|
lib/python3.10/site-packages/importlib_resources/tests/test_files.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import importlib
|
| 3 |
+
import pathlib
|
| 4 |
+
import py_compile
|
| 5 |
+
import textwrap
|
| 6 |
+
import unittest
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
import importlib_resources as resources
|
| 10 |
+
|
| 11 |
+
from ..abc import Traversable
|
| 12 |
+
from . import util
|
| 13 |
+
from .compat.py39 import import_helper, os_helper
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@contextlib.contextmanager
|
| 17 |
+
def suppress_known_deprecation():
|
| 18 |
+
with warnings.catch_warnings(record=True) as ctx:
|
| 19 |
+
warnings.simplefilter('default', category=DeprecationWarning)
|
| 20 |
+
yield ctx
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class FilesTests:
|
| 24 |
+
def test_read_bytes(self):
|
| 25 |
+
files = resources.files(self.data)
|
| 26 |
+
actual = files.joinpath('utf-8.file').read_bytes()
|
| 27 |
+
assert actual == b'Hello, UTF-8 world!\n'
|
| 28 |
+
|
| 29 |
+
def test_read_text(self):
|
| 30 |
+
files = resources.files(self.data)
|
| 31 |
+
actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
|
| 32 |
+
assert actual == 'Hello, UTF-8 world!\n'
|
| 33 |
+
|
| 34 |
+
def test_traversable(self):
|
| 35 |
+
assert isinstance(resources.files(self.data), Traversable)
|
| 36 |
+
|
| 37 |
+
def test_joinpath_with_multiple_args(self):
|
| 38 |
+
files = resources.files(self.data)
|
| 39 |
+
binfile = files.joinpath('subdirectory', 'binary.file')
|
| 40 |
+
self.assertTrue(binfile.is_file())
|
| 41 |
+
|
| 42 |
+
def test_old_parameter(self):
|
| 43 |
+
"""
|
| 44 |
+
Files used to take a 'package' parameter. Make sure anyone
|
| 45 |
+
passing by name is still supported.
|
| 46 |
+
"""
|
| 47 |
+
with suppress_known_deprecation():
|
| 48 |
+
resources.files(package=self.data)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class OpenDiskTests(FilesTests, util.DiskSetup, unittest.TestCase):
|
| 52 |
+
pass
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
|
| 56 |
+
pass
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class OpenNamespaceTests(FilesTests, util.DiskSetup, unittest.TestCase):
|
| 60 |
+
MODULE = 'namespacedata01'
|
| 61 |
+
|
| 62 |
+
def test_non_paths_in_dunder_path(self):
|
| 63 |
+
"""
|
| 64 |
+
Non-path items in a namespace package's ``__path__`` are ignored.
|
| 65 |
+
|
| 66 |
+
As reported in python/importlib_resources#311, some tools
|
| 67 |
+
like Setuptools, when creating editable packages, will inject
|
| 68 |
+
non-paths into a namespace package's ``__path__``, a
|
| 69 |
+
sentinel like
|
| 70 |
+
``__editable__.sample_namespace-1.0.finder.__path_hook__``
|
| 71 |
+
to cause the ``PathEntryFinder`` to be called when searching
|
| 72 |
+
for packages. In that case, resources should still be loadable.
|
| 73 |
+
"""
|
| 74 |
+
import namespacedata01 # type: ignore[import-not-found]
|
| 75 |
+
|
| 76 |
+
namespacedata01.__path__.append(
|
| 77 |
+
'__editable__.sample_namespace-1.0.finder.__path_hook__'
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
resources.files(namespacedata01)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
|
| 84 |
+
ZIP_MODULE = 'namespacedata01'
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class DirectSpec:
|
| 88 |
+
"""
|
| 89 |
+
Override behavior of ModuleSetup to write a full spec directly.
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
MODULE = 'unused'
|
| 93 |
+
|
| 94 |
+
def load_fixture(self, name):
|
| 95 |
+
self.tree_on_path(self.spec)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class ModulesFiles:
|
| 99 |
+
spec = {
|
| 100 |
+
'mod.py': '',
|
| 101 |
+
'res.txt': 'resources are the best',
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
def test_module_resources(self):
|
| 105 |
+
"""
|
| 106 |
+
A module can have resources found adjacent to the module.
|
| 107 |
+
"""
|
| 108 |
+
import mod # type: ignore[import-not-found]
|
| 109 |
+
|
| 110 |
+
actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
|
| 111 |
+
assert actual == self.spec['res.txt']
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class ModuleFilesDiskTests(DirectSpec, util.DiskSetup, ModulesFiles, unittest.TestCase):
|
| 115 |
+
pass
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class ModuleFilesZipTests(DirectSpec, util.ZipSetup, ModulesFiles, unittest.TestCase):
|
| 119 |
+
pass
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class ImplicitContextFiles:
|
| 123 |
+
set_val = textwrap.dedent(
|
| 124 |
+
f"""
|
| 125 |
+
import {resources.__name__} as res
|
| 126 |
+
val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
|
| 127 |
+
"""
|
| 128 |
+
)
|
| 129 |
+
spec = {
|
| 130 |
+
'somepkg': {
|
| 131 |
+
'__init__.py': set_val,
|
| 132 |
+
'submod.py': set_val,
|
| 133 |
+
'res.txt': 'resources are the best',
|
| 134 |
+
},
|
| 135 |
+
'frozenpkg': {
|
| 136 |
+
'__init__.py': set_val.replace(resources.__name__, 'c_resources'),
|
| 137 |
+
'res.txt': 'resources are the best',
|
| 138 |
+
},
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
def test_implicit_files_package(self):
|
| 142 |
+
"""
|
| 143 |
+
Without any parameter, files() will infer the location as the caller.
|
| 144 |
+
"""
|
| 145 |
+
assert importlib.import_module('somepkg').val == 'resources are the best'
|
| 146 |
+
|
| 147 |
+
def test_implicit_files_submodule(self):
|
| 148 |
+
"""
|
| 149 |
+
Without any parameter, files() will infer the location as the caller.
|
| 150 |
+
"""
|
| 151 |
+
assert importlib.import_module('somepkg.submod').val == 'resources are the best'
|
| 152 |
+
|
| 153 |
+
def _compile_importlib(self):
|
| 154 |
+
"""
|
| 155 |
+
Make a compiled-only copy of the importlib resources package.
|
| 156 |
+
|
| 157 |
+
Currently only code is copied, as importlib resources doesn't itself
|
| 158 |
+
have any resources.
|
| 159 |
+
"""
|
| 160 |
+
bin_site = self.fixtures.enter_context(os_helper.temp_dir())
|
| 161 |
+
c_resources = pathlib.Path(bin_site, 'c_resources')
|
| 162 |
+
sources = pathlib.Path(resources.__file__).parent
|
| 163 |
+
|
| 164 |
+
for source_path in sources.glob('**/*.py'):
|
| 165 |
+
c_path = c_resources.joinpath(source_path.relative_to(sources)).with_suffix(
|
| 166 |
+
'.pyc'
|
| 167 |
+
)
|
| 168 |
+
py_compile.compile(source_path, c_path)
|
| 169 |
+
self.fixtures.enter_context(import_helper.DirsOnSysPath(bin_site))
|
| 170 |
+
|
| 171 |
+
def test_implicit_files_with_compiled_importlib(self):
|
| 172 |
+
"""
|
| 173 |
+
Caller detection works for compiled-only resources module.
|
| 174 |
+
|
| 175 |
+
python/cpython#123085
|
| 176 |
+
"""
|
| 177 |
+
self._compile_importlib()
|
| 178 |
+
assert importlib.import_module('frozenpkg').val == 'resources are the best'
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
class ImplicitContextFilesDiskTests(
|
| 182 |
+
DirectSpec, util.DiskSetup, ImplicitContextFiles, unittest.TestCase
|
| 183 |
+
):
|
| 184 |
+
pass
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class ImplicitContextFilesZipTests(
|
| 188 |
+
DirectSpec, util.ZipSetup, ImplicitContextFiles, unittest.TestCase
|
| 189 |
+
):
|
| 190 |
+
pass
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
if __name__ == '__main__':
|
| 194 |
+
unittest.main()
|
lib/python3.10/site-packages/importlib_resources/tests/test_functional.py
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
import os
|
| 3 |
+
import unittest
|
| 4 |
+
|
| 5 |
+
import importlib_resources as resources
|
| 6 |
+
|
| 7 |
+
from . import util
|
| 8 |
+
from .compat.py39 import warnings_helper
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class StringAnchorMixin:
|
| 12 |
+
anchor01 = 'data01'
|
| 13 |
+
anchor02 = 'data02'
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ModuleAnchorMixin:
|
| 17 |
+
@property
|
| 18 |
+
def anchor01(self):
|
| 19 |
+
return importlib.import_module('data01')
|
| 20 |
+
|
| 21 |
+
@property
|
| 22 |
+
def anchor02(self):
|
| 23 |
+
return importlib.import_module('data02')
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class FunctionalAPIBase:
|
| 27 |
+
def setUp(self):
|
| 28 |
+
super().setUp()
|
| 29 |
+
self.load_fixture('data02')
|
| 30 |
+
|
| 31 |
+
def _gen_resourcetxt_path_parts(self):
|
| 32 |
+
"""Yield various names of a text file in anchor02, each in a subTest"""
|
| 33 |
+
for path_parts in (
|
| 34 |
+
('subdirectory', 'subsubdir', 'resource.txt'),
|
| 35 |
+
('subdirectory/subsubdir/resource.txt',),
|
| 36 |
+
('subdirectory/subsubdir', 'resource.txt'),
|
| 37 |
+
):
|
| 38 |
+
with self.subTest(path_parts=path_parts):
|
| 39 |
+
yield path_parts
|
| 40 |
+
|
| 41 |
+
def assertEndsWith(self, string, suffix):
|
| 42 |
+
"""Assert that `string` ends with `suffix`.
|
| 43 |
+
|
| 44 |
+
Used to ignore an architecture-specific UTF-16 byte-order mark."""
|
| 45 |
+
self.assertEqual(string[-len(suffix) :], suffix)
|
| 46 |
+
|
| 47 |
+
def test_read_text(self):
|
| 48 |
+
self.assertEqual(
|
| 49 |
+
resources.read_text(self.anchor01, 'utf-8.file'),
|
| 50 |
+
'Hello, UTF-8 world!\n',
|
| 51 |
+
)
|
| 52 |
+
self.assertEqual(
|
| 53 |
+
resources.read_text(
|
| 54 |
+
self.anchor02,
|
| 55 |
+
'subdirectory',
|
| 56 |
+
'subsubdir',
|
| 57 |
+
'resource.txt',
|
| 58 |
+
encoding='utf-8',
|
| 59 |
+
),
|
| 60 |
+
'a resource',
|
| 61 |
+
)
|
| 62 |
+
for path_parts in self._gen_resourcetxt_path_parts():
|
| 63 |
+
self.assertEqual(
|
| 64 |
+
resources.read_text(
|
| 65 |
+
self.anchor02,
|
| 66 |
+
*path_parts,
|
| 67 |
+
encoding='utf-8',
|
| 68 |
+
),
|
| 69 |
+
'a resource',
|
| 70 |
+
)
|
| 71 |
+
# Use generic OSError, since e.g. attempting to read a directory can
|
| 72 |
+
# fail with PermissionError rather than IsADirectoryError
|
| 73 |
+
with self.assertRaises(OSError):
|
| 74 |
+
resources.read_text(self.anchor01)
|
| 75 |
+
with self.assertRaises((OSError, resources.abc.TraversalError)):
|
| 76 |
+
resources.read_text(self.anchor01, 'no-such-file')
|
| 77 |
+
with self.assertRaises(UnicodeDecodeError):
|
| 78 |
+
resources.read_text(self.anchor01, 'utf-16.file')
|
| 79 |
+
self.assertEqual(
|
| 80 |
+
resources.read_text(
|
| 81 |
+
self.anchor01,
|
| 82 |
+
'binary.file',
|
| 83 |
+
encoding='latin1',
|
| 84 |
+
),
|
| 85 |
+
'\x00\x01\x02\x03',
|
| 86 |
+
)
|
| 87 |
+
self.assertEndsWith( # ignore the BOM
|
| 88 |
+
resources.read_text(
|
| 89 |
+
self.anchor01,
|
| 90 |
+
'utf-16.file',
|
| 91 |
+
errors='backslashreplace',
|
| 92 |
+
),
|
| 93 |
+
'Hello, UTF-16 world!\n'.encode('utf-16-le').decode(
|
| 94 |
+
errors='backslashreplace',
|
| 95 |
+
),
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
def test_read_binary(self):
|
| 99 |
+
self.assertEqual(
|
| 100 |
+
resources.read_binary(self.anchor01, 'utf-8.file'),
|
| 101 |
+
b'Hello, UTF-8 world!\n',
|
| 102 |
+
)
|
| 103 |
+
for path_parts in self._gen_resourcetxt_path_parts():
|
| 104 |
+
self.assertEqual(
|
| 105 |
+
resources.read_binary(self.anchor02, *path_parts),
|
| 106 |
+
b'a resource',
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
def test_open_text(self):
|
| 110 |
+
with resources.open_text(self.anchor01, 'utf-8.file') as f:
|
| 111 |
+
self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
|
| 112 |
+
for path_parts in self._gen_resourcetxt_path_parts():
|
| 113 |
+
with resources.open_text(
|
| 114 |
+
self.anchor02,
|
| 115 |
+
*path_parts,
|
| 116 |
+
encoding='utf-8',
|
| 117 |
+
) as f:
|
| 118 |
+
self.assertEqual(f.read(), 'a resource')
|
| 119 |
+
# Use generic OSError, since e.g. attempting to read a directory can
|
| 120 |
+
# fail with PermissionError rather than IsADirectoryError
|
| 121 |
+
with self.assertRaises(OSError):
|
| 122 |
+
resources.open_text(self.anchor01)
|
| 123 |
+
with self.assertRaises((OSError, resources.abc.TraversalError)):
|
| 124 |
+
resources.open_text(self.anchor01, 'no-such-file')
|
| 125 |
+
with resources.open_text(self.anchor01, 'utf-16.file') as f:
|
| 126 |
+
with self.assertRaises(UnicodeDecodeError):
|
| 127 |
+
f.read()
|
| 128 |
+
with resources.open_text(
|
| 129 |
+
self.anchor01,
|
| 130 |
+
'binary.file',
|
| 131 |
+
encoding='latin1',
|
| 132 |
+
) as f:
|
| 133 |
+
self.assertEqual(f.read(), '\x00\x01\x02\x03')
|
| 134 |
+
with resources.open_text(
|
| 135 |
+
self.anchor01,
|
| 136 |
+
'utf-16.file',
|
| 137 |
+
errors='backslashreplace',
|
| 138 |
+
) as f:
|
| 139 |
+
self.assertEndsWith( # ignore the BOM
|
| 140 |
+
f.read(),
|
| 141 |
+
'Hello, UTF-16 world!\n'.encode('utf-16-le').decode(
|
| 142 |
+
errors='backslashreplace',
|
| 143 |
+
),
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
def test_open_binary(self):
|
| 147 |
+
with resources.open_binary(self.anchor01, 'utf-8.file') as f:
|
| 148 |
+
self.assertEqual(f.read(), b'Hello, UTF-8 world!\n')
|
| 149 |
+
for path_parts in self._gen_resourcetxt_path_parts():
|
| 150 |
+
with resources.open_binary(
|
| 151 |
+
self.anchor02,
|
| 152 |
+
*path_parts,
|
| 153 |
+
) as f:
|
| 154 |
+
self.assertEqual(f.read(), b'a resource')
|
| 155 |
+
|
| 156 |
+
def test_path(self):
|
| 157 |
+
with resources.path(self.anchor01, 'utf-8.file') as path:
|
| 158 |
+
with open(str(path), encoding='utf-8') as f:
|
| 159 |
+
self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
|
| 160 |
+
with resources.path(self.anchor01) as path:
|
| 161 |
+
with open(os.path.join(path, 'utf-8.file'), encoding='utf-8') as f:
|
| 162 |
+
self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
|
| 163 |
+
|
| 164 |
+
def test_is_resource(self):
|
| 165 |
+
is_resource = resources.is_resource
|
| 166 |
+
self.assertTrue(is_resource(self.anchor01, 'utf-8.file'))
|
| 167 |
+
self.assertFalse(is_resource(self.anchor01, 'no_such_file'))
|
| 168 |
+
self.assertFalse(is_resource(self.anchor01))
|
| 169 |
+
self.assertFalse(is_resource(self.anchor01, 'subdirectory'))
|
| 170 |
+
for path_parts in self._gen_resourcetxt_path_parts():
|
| 171 |
+
self.assertTrue(is_resource(self.anchor02, *path_parts))
|
| 172 |
+
|
| 173 |
+
def test_contents(self):
|
| 174 |
+
with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)):
|
| 175 |
+
c = resources.contents(self.anchor01)
|
| 176 |
+
self.assertGreaterEqual(
|
| 177 |
+
set(c),
|
| 178 |
+
{'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'},
|
| 179 |
+
)
|
| 180 |
+
with (
|
| 181 |
+
self.assertRaises(OSError),
|
| 182 |
+
warnings_helper.check_warnings((
|
| 183 |
+
".*contents.*",
|
| 184 |
+
DeprecationWarning,
|
| 185 |
+
)),
|
| 186 |
+
):
|
| 187 |
+
list(resources.contents(self.anchor01, 'utf-8.file'))
|
| 188 |
+
|
| 189 |
+
for path_parts in self._gen_resourcetxt_path_parts():
|
| 190 |
+
with (
|
| 191 |
+
self.assertRaises((OSError, resources.abc.TraversalError)),
|
| 192 |
+
warnings_helper.check_warnings((
|
| 193 |
+
".*contents.*",
|
| 194 |
+
DeprecationWarning,
|
| 195 |
+
)),
|
| 196 |
+
):
|
| 197 |
+
list(resources.contents(self.anchor01, *path_parts))
|
| 198 |
+
with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)):
|
| 199 |
+
c = resources.contents(self.anchor01, 'subdirectory')
|
| 200 |
+
self.assertGreaterEqual(
|
| 201 |
+
set(c),
|
| 202 |
+
{'binary.file'},
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
@warnings_helper.ignore_warnings(category=DeprecationWarning)
|
| 206 |
+
def test_common_errors(self):
|
| 207 |
+
for func in (
|
| 208 |
+
resources.read_text,
|
| 209 |
+
resources.read_binary,
|
| 210 |
+
resources.open_text,
|
| 211 |
+
resources.open_binary,
|
| 212 |
+
resources.path,
|
| 213 |
+
resources.is_resource,
|
| 214 |
+
resources.contents,
|
| 215 |
+
):
|
| 216 |
+
with self.subTest(func=func):
|
| 217 |
+
# Rejecting None anchor
|
| 218 |
+
with self.assertRaises(TypeError):
|
| 219 |
+
func(None)
|
| 220 |
+
# Rejecting invalid anchor type
|
| 221 |
+
with self.assertRaises((TypeError, AttributeError)):
|
| 222 |
+
func(1234)
|
| 223 |
+
# Unknown module
|
| 224 |
+
with self.assertRaises(ModuleNotFoundError):
|
| 225 |
+
func('$missing module$')
|
| 226 |
+
|
| 227 |
+
def test_text_errors(self):
|
| 228 |
+
for func in (
|
| 229 |
+
resources.read_text,
|
| 230 |
+
resources.open_text,
|
| 231 |
+
):
|
| 232 |
+
with self.subTest(func=func):
|
| 233 |
+
# Multiple path arguments need explicit encoding argument.
|
| 234 |
+
with self.assertRaises(TypeError):
|
| 235 |
+
func(
|
| 236 |
+
self.anchor02,
|
| 237 |
+
'subdirectory',
|
| 238 |
+
'subsubdir',
|
| 239 |
+
'resource.txt',
|
| 240 |
+
)
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
class FunctionalAPITest_StringAnchor_Disk(
|
| 244 |
+
StringAnchorMixin,
|
| 245 |
+
FunctionalAPIBase,
|
| 246 |
+
util.DiskSetup,
|
| 247 |
+
unittest.TestCase,
|
| 248 |
+
):
|
| 249 |
+
pass
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
class FunctionalAPITest_ModuleAnchor_Disk(
|
| 253 |
+
ModuleAnchorMixin,
|
| 254 |
+
FunctionalAPIBase,
|
| 255 |
+
util.DiskSetup,
|
| 256 |
+
unittest.TestCase,
|
| 257 |
+
):
|
| 258 |
+
pass
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
class FunctionalAPITest_StringAnchor_Memory(
|
| 262 |
+
StringAnchorMixin,
|
| 263 |
+
FunctionalAPIBase,
|
| 264 |
+
util.MemorySetup,
|
| 265 |
+
unittest.TestCase,
|
| 266 |
+
):
|
| 267 |
+
pass
|
lib/python3.10/site-packages/importlib_resources/tests/test_path.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import pathlib
|
| 3 |
+
import unittest
|
| 4 |
+
|
| 5 |
+
import importlib_resources as resources
|
| 6 |
+
|
| 7 |
+
from . import util
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class CommonTests(util.CommonTests, unittest.TestCase):
|
| 11 |
+
def execute(self, package, path):
|
| 12 |
+
with resources.as_file(resources.files(package).joinpath(path)):
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class PathTests:
|
| 17 |
+
def test_reading(self):
|
| 18 |
+
"""
|
| 19 |
+
Path should be readable and a pathlib.Path instance.
|
| 20 |
+
"""
|
| 21 |
+
target = resources.files(self.data) / 'utf-8.file'
|
| 22 |
+
with resources.as_file(target) as path:
|
| 23 |
+
self.assertIsInstance(path, pathlib.Path)
|
| 24 |
+
self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
|
| 25 |
+
self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8'))
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class PathDiskTests(PathTests, util.DiskSetup, unittest.TestCase):
|
| 29 |
+
def test_natural_path(self):
|
| 30 |
+
"""
|
| 31 |
+
Guarantee the internal implementation detail that
|
| 32 |
+
file-system-backed resources do not get the tempdir
|
| 33 |
+
treatment.
|
| 34 |
+
"""
|
| 35 |
+
target = resources.files(self.data) / 'utf-8.file'
|
| 36 |
+
with resources.as_file(target) as path:
|
| 37 |
+
assert 'data' in str(path)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class PathMemoryTests(PathTests, unittest.TestCase):
|
| 41 |
+
def setUp(self):
|
| 42 |
+
file = io.BytesIO(b'Hello, UTF-8 world!\n')
|
| 43 |
+
self.addCleanup(file.close)
|
| 44 |
+
self.data = util.create_package(
|
| 45 |
+
file=file, path=FileNotFoundError("package exists only in memory")
|
| 46 |
+
)
|
| 47 |
+
self.data.__spec__.origin = None
|
| 48 |
+
self.data.__spec__.has_location = False
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
|
| 52 |
+
def test_remove_in_context_manager(self):
|
| 53 |
+
"""
|
| 54 |
+
It is not an error if the file that was temporarily stashed on the
|
| 55 |
+
file system is removed inside the `with` stanza.
|
| 56 |
+
"""
|
| 57 |
+
target = resources.files(self.data) / 'utf-8.file'
|
| 58 |
+
with resources.as_file(target) as path:
|
| 59 |
+
path.unlink()
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
if __name__ == '__main__':
|
| 63 |
+
unittest.main()
|
lib/python3.10/site-packages/importlib_resources/tests/test_read.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import unittest
|
| 2 |
+
from importlib import import_module
|
| 3 |
+
|
| 4 |
+
import importlib_resources as resources
|
| 5 |
+
|
| 6 |
+
from . import util
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class CommonBinaryTests(util.CommonTests, unittest.TestCase):
|
| 10 |
+
def execute(self, package, path):
|
| 11 |
+
resources.files(package).joinpath(path).read_bytes()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class CommonTextTests(util.CommonTests, unittest.TestCase):
|
| 15 |
+
def execute(self, package, path):
|
| 16 |
+
resources.files(package).joinpath(path).read_text(encoding='utf-8')
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class ReadTests:
|
| 20 |
+
def test_read_bytes(self):
|
| 21 |
+
result = resources.files(self.data).joinpath('binary.file').read_bytes()
|
| 22 |
+
self.assertEqual(result, bytes(range(4)))
|
| 23 |
+
|
| 24 |
+
def test_read_text_default_encoding(self):
|
| 25 |
+
result = (
|
| 26 |
+
resources.files(self.data)
|
| 27 |
+
.joinpath('utf-8.file')
|
| 28 |
+
.read_text(encoding='utf-8')
|
| 29 |
+
)
|
| 30 |
+
self.assertEqual(result, 'Hello, UTF-8 world!\n')
|
| 31 |
+
|
| 32 |
+
def test_read_text_given_encoding(self):
|
| 33 |
+
result = (
|
| 34 |
+
resources.files(self.data)
|
| 35 |
+
.joinpath('utf-16.file')
|
| 36 |
+
.read_text(encoding='utf-16')
|
| 37 |
+
)
|
| 38 |
+
self.assertEqual(result, 'Hello, UTF-16 world!\n')
|
| 39 |
+
|
| 40 |
+
def test_read_text_with_errors(self):
|
| 41 |
+
"""
|
| 42 |
+
Raises UnicodeError without the 'errors' argument.
|
| 43 |
+
"""
|
| 44 |
+
target = resources.files(self.data) / 'utf-16.file'
|
| 45 |
+
self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
|
| 46 |
+
result = target.read_text(encoding='utf-8', errors='ignore')
|
| 47 |
+
self.assertEqual(
|
| 48 |
+
result,
|
| 49 |
+
'H\x00e\x00l\x00l\x00o\x00,\x00 '
|
| 50 |
+
'\x00U\x00T\x00F\x00-\x001\x006\x00 '
|
| 51 |
+
'\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class ReadDiskTests(ReadTests, util.DiskSetup, unittest.TestCase):
|
| 56 |
+
pass
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
|
| 60 |
+
def test_read_submodule_resource(self):
|
| 61 |
+
submodule = import_module('data01.subdirectory')
|
| 62 |
+
result = resources.files(submodule).joinpath('binary.file').read_bytes()
|
| 63 |
+
self.assertEqual(result, bytes(range(4, 8)))
|
| 64 |
+
|
| 65 |
+
def test_read_submodule_resource_by_name(self):
|
| 66 |
+
result = (
|
| 67 |
+
resources.files('data01.subdirectory').joinpath('binary.file').read_bytes()
|
| 68 |
+
)
|
| 69 |
+
self.assertEqual(result, bytes(range(4, 8)))
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class ReadNamespaceTests(ReadTests, util.DiskSetup, unittest.TestCase):
|
| 73 |
+
MODULE = 'namespacedata01'
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
|
| 77 |
+
MODULE = 'namespacedata01'
|
| 78 |
+
|
| 79 |
+
def test_read_submodule_resource(self):
|
| 80 |
+
submodule = import_module('namespacedata01.subdirectory')
|
| 81 |
+
result = resources.files(submodule).joinpath('binary.file').read_bytes()
|
| 82 |
+
self.assertEqual(result, bytes(range(12, 16)))
|
| 83 |
+
|
| 84 |
+
def test_read_submodule_resource_by_name(self):
|
| 85 |
+
result = (
|
| 86 |
+
resources.files('namespacedata01.subdirectory')
|
| 87 |
+
.joinpath('binary.file')
|
| 88 |
+
.read_bytes()
|
| 89 |
+
)
|
| 90 |
+
self.assertEqual(result, bytes(range(12, 16)))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
if __name__ == '__main__':
|
| 94 |
+
unittest.main()
|
lib/python3.10/site-packages/importlib_resources/tests/test_reader.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os.path
|
| 2 |
+
import pathlib
|
| 3 |
+
import unittest
|
| 4 |
+
from importlib import import_module
|
| 5 |
+
|
| 6 |
+
from importlib_resources.readers import MultiplexedPath, NamespaceReader
|
| 7 |
+
|
| 8 |
+
from . import util
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class MultiplexedPathTest(util.DiskSetup, unittest.TestCase):
|
| 12 |
+
MODULE = 'namespacedata01'
|
| 13 |
+
|
| 14 |
+
def setUp(self):
|
| 15 |
+
super().setUp()
|
| 16 |
+
self.folder = pathlib.Path(self.data.__path__[0])
|
| 17 |
+
self.data01 = pathlib.Path(self.load_fixture('data01').__file__).parent
|
| 18 |
+
self.data02 = pathlib.Path(self.load_fixture('data02').__file__).parent
|
| 19 |
+
|
| 20 |
+
def test_init_no_paths(self):
|
| 21 |
+
with self.assertRaises(FileNotFoundError):
|
| 22 |
+
MultiplexedPath()
|
| 23 |
+
|
| 24 |
+
def test_init_file(self):
|
| 25 |
+
with self.assertRaises(NotADirectoryError):
|
| 26 |
+
MultiplexedPath(self.folder / 'binary.file')
|
| 27 |
+
|
| 28 |
+
def test_iterdir(self):
|
| 29 |
+
contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
|
| 30 |
+
try:
|
| 31 |
+
contents.remove('__pycache__')
|
| 32 |
+
except (KeyError, ValueError):
|
| 33 |
+
pass
|
| 34 |
+
self.assertEqual(
|
| 35 |
+
contents, {'subdirectory', 'binary.file', 'utf-16.file', 'utf-8.file'}
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
def test_iterdir_duplicate(self):
|
| 39 |
+
contents = {
|
| 40 |
+
path.name for path in MultiplexedPath(self.folder, self.data01).iterdir()
|
| 41 |
+
}
|
| 42 |
+
for remove in ('__pycache__', '__init__.pyc'):
|
| 43 |
+
try:
|
| 44 |
+
contents.remove(remove)
|
| 45 |
+
except (KeyError, ValueError):
|
| 46 |
+
pass
|
| 47 |
+
self.assertEqual(
|
| 48 |
+
contents,
|
| 49 |
+
{'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'},
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
def test_is_dir(self):
|
| 53 |
+
self.assertEqual(MultiplexedPath(self.folder).is_dir(), True)
|
| 54 |
+
|
| 55 |
+
def test_is_file(self):
|
| 56 |
+
self.assertEqual(MultiplexedPath(self.folder).is_file(), False)
|
| 57 |
+
|
| 58 |
+
def test_open_file(self):
|
| 59 |
+
path = MultiplexedPath(self.folder)
|
| 60 |
+
with self.assertRaises(FileNotFoundError):
|
| 61 |
+
path.read_bytes()
|
| 62 |
+
with self.assertRaises(FileNotFoundError):
|
| 63 |
+
path.read_text()
|
| 64 |
+
with self.assertRaises(FileNotFoundError):
|
| 65 |
+
path.open()
|
| 66 |
+
|
| 67 |
+
def test_join_path(self):
|
| 68 |
+
prefix = str(self.folder.parent)
|
| 69 |
+
path = MultiplexedPath(self.folder, self.data01)
|
| 70 |
+
self.assertEqual(
|
| 71 |
+
str(path.joinpath('binary.file'))[len(prefix) + 1 :],
|
| 72 |
+
os.path.join('namespacedata01', 'binary.file'),
|
| 73 |
+
)
|
| 74 |
+
sub = path.joinpath('subdirectory')
|
| 75 |
+
assert isinstance(sub, MultiplexedPath)
|
| 76 |
+
assert 'namespacedata01' in str(sub)
|
| 77 |
+
assert 'data01' in str(sub)
|
| 78 |
+
self.assertEqual(
|
| 79 |
+
str(path.joinpath('imaginary'))[len(prefix) + 1 :],
|
| 80 |
+
os.path.join('namespacedata01', 'imaginary'),
|
| 81 |
+
)
|
| 82 |
+
self.assertEqual(path.joinpath(), path)
|
| 83 |
+
|
| 84 |
+
def test_join_path_compound(self):
|
| 85 |
+
path = MultiplexedPath(self.folder)
|
| 86 |
+
assert not path.joinpath('imaginary/foo.py').exists()
|
| 87 |
+
|
| 88 |
+
def test_join_path_common_subdir(self):
|
| 89 |
+
prefix = str(self.data02.parent)
|
| 90 |
+
path = MultiplexedPath(self.data01, self.data02)
|
| 91 |
+
self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath)
|
| 92 |
+
self.assertEqual(
|
| 93 |
+
str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :],
|
| 94 |
+
os.path.join('data02', 'subdirectory', 'subsubdir'),
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def test_repr(self):
|
| 98 |
+
self.assertEqual(
|
| 99 |
+
repr(MultiplexedPath(self.folder)),
|
| 100 |
+
f"MultiplexedPath('{self.folder}')",
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
def test_name(self):
|
| 104 |
+
self.assertEqual(
|
| 105 |
+
MultiplexedPath(self.folder).name,
|
| 106 |
+
os.path.basename(self.folder),
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class NamespaceReaderTest(util.DiskSetup, unittest.TestCase):
|
| 111 |
+
MODULE = 'namespacedata01'
|
| 112 |
+
|
| 113 |
+
def test_init_error(self):
|
| 114 |
+
with self.assertRaises(ValueError):
|
| 115 |
+
NamespaceReader(['path1', 'path2'])
|
| 116 |
+
|
| 117 |
+
def test_resource_path(self):
|
| 118 |
+
namespacedata01 = import_module('namespacedata01')
|
| 119 |
+
reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
|
| 120 |
+
|
| 121 |
+
root = self.data.__path__[0]
|
| 122 |
+
self.assertEqual(
|
| 123 |
+
reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
|
| 124 |
+
)
|
| 125 |
+
self.assertEqual(
|
| 126 |
+
reader.resource_path('imaginary'), os.path.join(root, 'imaginary')
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
def test_files(self):
|
| 130 |
+
reader = NamespaceReader(self.data.__spec__.submodule_search_locations)
|
| 131 |
+
root = self.data.__path__[0]
|
| 132 |
+
self.assertIsInstance(reader.files(), MultiplexedPath)
|
| 133 |
+
self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
if __name__ == '__main__':
|
| 137 |
+
unittest.main()
|
lib/python3.10/site-packages/importlib_resources/tests/test_resource.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import unittest
|
| 2 |
+
from importlib import import_module
|
| 3 |
+
|
| 4 |
+
import importlib_resources as resources
|
| 5 |
+
|
| 6 |
+
from . import util
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class ResourceTests:
|
| 10 |
+
# Subclasses are expected to set the `data` attribute.
|
| 11 |
+
|
| 12 |
+
def test_is_file_exists(self):
|
| 13 |
+
target = resources.files(self.data) / 'binary.file'
|
| 14 |
+
self.assertTrue(target.is_file())
|
| 15 |
+
|
| 16 |
+
def test_is_file_missing(self):
|
| 17 |
+
target = resources.files(self.data) / 'not-a-file'
|
| 18 |
+
self.assertFalse(target.is_file())
|
| 19 |
+
|
| 20 |
+
def test_is_dir(self):
|
| 21 |
+
target = resources.files(self.data) / 'subdirectory'
|
| 22 |
+
self.assertFalse(target.is_file())
|
| 23 |
+
self.assertTrue(target.is_dir())
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class ResourceDiskTests(ResourceTests, util.DiskSetup, unittest.TestCase):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def names(traversable):
|
| 35 |
+
return {item.name for item in traversable.iterdir()}
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class ResourceLoaderTests(util.DiskSetup, unittest.TestCase):
|
| 39 |
+
def test_resource_contents(self):
|
| 40 |
+
package = util.create_package(
|
| 41 |
+
file=self.data, path=self.data.__file__, contents=['A', 'B', 'C']
|
| 42 |
+
)
|
| 43 |
+
self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
|
| 44 |
+
|
| 45 |
+
def test_is_file(self):
|
| 46 |
+
package = util.create_package(
|
| 47 |
+
file=self.data,
|
| 48 |
+
path=self.data.__file__,
|
| 49 |
+
contents=['A', 'B', 'C', 'D/E', 'D/F'],
|
| 50 |
+
)
|
| 51 |
+
self.assertTrue(resources.files(package).joinpath('B').is_file())
|
| 52 |
+
|
| 53 |
+
def test_is_dir(self):
|
| 54 |
+
package = util.create_package(
|
| 55 |
+
file=self.data,
|
| 56 |
+
path=self.data.__file__,
|
| 57 |
+
contents=['A', 'B', 'C', 'D/E', 'D/F'],
|
| 58 |
+
)
|
| 59 |
+
self.assertTrue(resources.files(package).joinpath('D').is_dir())
|
| 60 |
+
|
| 61 |
+
def test_resource_missing(self):
|
| 62 |
+
package = util.create_package(
|
| 63 |
+
file=self.data,
|
| 64 |
+
path=self.data.__file__,
|
| 65 |
+
contents=['A', 'B', 'C', 'D/E', 'D/F'],
|
| 66 |
+
)
|
| 67 |
+
self.assertFalse(resources.files(package).joinpath('Z').is_file())
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class ResourceCornerCaseTests(util.DiskSetup, unittest.TestCase):
|
| 71 |
+
def test_package_has_no_reader_fallback(self):
|
| 72 |
+
"""
|
| 73 |
+
Test odd ball packages which:
|
| 74 |
+
# 1. Do not have a ResourceReader as a loader
|
| 75 |
+
# 2. Are not on the file system
|
| 76 |
+
# 3. Are not in a zip file
|
| 77 |
+
"""
|
| 78 |
+
module = util.create_package(
|
| 79 |
+
file=self.data, path=self.data.__file__, contents=['A', 'B', 'C']
|
| 80 |
+
)
|
| 81 |
+
# Give the module a dummy loader.
|
| 82 |
+
module.__loader__ = object()
|
| 83 |
+
# Give the module a dummy origin.
|
| 84 |
+
module.__file__ = '/path/which/shall/not/be/named'
|
| 85 |
+
module.__spec__.loader = module.__loader__
|
| 86 |
+
module.__spec__.origin = module.__file__
|
| 87 |
+
self.assertFalse(resources.files(module).joinpath('A').is_file())
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class ResourceFromZipsTest01(util.ZipSetup, unittest.TestCase):
|
| 91 |
+
def test_is_submodule_resource(self):
|
| 92 |
+
submodule = import_module('data01.subdirectory')
|
| 93 |
+
self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
|
| 94 |
+
|
| 95 |
+
def test_read_submodule_resource_by_name(self):
|
| 96 |
+
self.assertTrue(
|
| 97 |
+
resources.files('data01.subdirectory').joinpath('binary.file').is_file()
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
def test_submodule_contents(self):
|
| 101 |
+
submodule = import_module('data01.subdirectory')
|
| 102 |
+
self.assertEqual(
|
| 103 |
+
names(resources.files(submodule)), {'__init__.py', 'binary.file'}
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
def test_submodule_contents_by_name(self):
|
| 107 |
+
self.assertEqual(
|
| 108 |
+
names(resources.files('data01.subdirectory')),
|
| 109 |
+
{'__init__.py', 'binary.file'},
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
def test_as_file_directory(self):
|
| 113 |
+
with resources.as_file(resources.files('data01')) as data:
|
| 114 |
+
assert data.name == 'data01'
|
| 115 |
+
assert data.is_dir()
|
| 116 |
+
assert data.joinpath('subdirectory').is_dir()
|
| 117 |
+
assert len(list(data.iterdir()))
|
| 118 |
+
assert not data.parent.exists()
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class ResourceFromZipsTest02(util.ZipSetup, unittest.TestCase):
|
| 122 |
+
MODULE = 'data02'
|
| 123 |
+
|
| 124 |
+
def test_unrelated_contents(self):
|
| 125 |
+
"""
|
| 126 |
+
Test thata zip with two unrelated subpackages return
|
| 127 |
+
distinct resources. Ref python/importlib_resources#44.
|
| 128 |
+
"""
|
| 129 |
+
self.assertEqual(
|
| 130 |
+
names(resources.files('data02.one')),
|
| 131 |
+
{'__init__.py', 'resource1.txt'},
|
| 132 |
+
)
|
| 133 |
+
self.assertEqual(
|
| 134 |
+
names(resources.files('data02.two')),
|
| 135 |
+
{'__init__.py', 'resource2.txt'},
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class DeletingZipsTest(util.ZipSetup, unittest.TestCase):
|
| 140 |
+
"""Having accessed resources in a zip file should not keep an open
|
| 141 |
+
reference to the zip.
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
def test_iterdir_does_not_keep_open(self):
|
| 145 |
+
[item.name for item in resources.files('data01').iterdir()]
|
| 146 |
+
|
| 147 |
+
def test_is_file_does_not_keep_open(self):
|
| 148 |
+
resources.files('data01').joinpath('binary.file').is_file()
|
| 149 |
+
|
| 150 |
+
def test_is_file_failure_does_not_keep_open(self):
|
| 151 |
+
resources.files('data01').joinpath('not-present').is_file()
|
| 152 |
+
|
| 153 |
+
@unittest.skip("Desired but not supported.")
|
| 154 |
+
def test_as_file_does_not_keep_open(self): # pragma: no cover
|
| 155 |
+
resources.as_file(resources.files('data01') / 'binary.file')
|
| 156 |
+
|
| 157 |
+
def test_entered_path_does_not_keep_open(self):
|
| 158 |
+
"""
|
| 159 |
+
Mimic what certifi does on import to make its bundle
|
| 160 |
+
available for the process duration.
|
| 161 |
+
"""
|
| 162 |
+
resources.as_file(resources.files('data01') / 'binary.file').__enter__()
|
| 163 |
+
|
| 164 |
+
def test_read_binary_does_not_keep_open(self):
|
| 165 |
+
resources.files('data01').joinpath('binary.file').read_bytes()
|
| 166 |
+
|
| 167 |
+
def test_read_text_does_not_keep_open(self):
|
| 168 |
+
resources.files('data01').joinpath('utf-8.file').read_text(encoding='utf-8')
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
class ResourceFromNamespaceTests:
|
| 172 |
+
def test_is_submodule_resource(self):
|
| 173 |
+
self.assertTrue(
|
| 174 |
+
resources.files(import_module('namespacedata01'))
|
| 175 |
+
.joinpath('binary.file')
|
| 176 |
+
.is_file()
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
def test_read_submodule_resource_by_name(self):
|
| 180 |
+
self.assertTrue(
|
| 181 |
+
resources.files('namespacedata01').joinpath('binary.file').is_file()
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
def test_submodule_contents(self):
|
| 185 |
+
contents = names(resources.files(import_module('namespacedata01')))
|
| 186 |
+
try:
|
| 187 |
+
contents.remove('__pycache__')
|
| 188 |
+
except KeyError:
|
| 189 |
+
pass
|
| 190 |
+
self.assertEqual(
|
| 191 |
+
contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
def test_submodule_contents_by_name(self):
|
| 195 |
+
contents = names(resources.files('namespacedata01'))
|
| 196 |
+
try:
|
| 197 |
+
contents.remove('__pycache__')
|
| 198 |
+
except KeyError:
|
| 199 |
+
pass
|
| 200 |
+
self.assertEqual(
|
| 201 |
+
contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
|
| 202 |
+
)
|
| 203 |
+
|
| 204 |
+
def test_submodule_sub_contents(self):
|
| 205 |
+
contents = names(resources.files(import_module('namespacedata01.subdirectory')))
|
| 206 |
+
try:
|
| 207 |
+
contents.remove('__pycache__')
|
| 208 |
+
except KeyError:
|
| 209 |
+
pass
|
| 210 |
+
self.assertEqual(contents, {'binary.file'})
|
| 211 |
+
|
| 212 |
+
def test_submodule_sub_contents_by_name(self):
|
| 213 |
+
contents = names(resources.files('namespacedata01.subdirectory'))
|
| 214 |
+
try:
|
| 215 |
+
contents.remove('__pycache__')
|
| 216 |
+
except KeyError:
|
| 217 |
+
pass
|
| 218 |
+
self.assertEqual(contents, {'binary.file'})
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
class ResourceFromNamespaceDiskTests(
|
| 222 |
+
util.DiskSetup,
|
| 223 |
+
ResourceFromNamespaceTests,
|
| 224 |
+
unittest.TestCase,
|
| 225 |
+
):
|
| 226 |
+
MODULE = 'namespacedata01'
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
class ResourceFromNamespaceZipTests(
|
| 230 |
+
util.ZipSetup,
|
| 231 |
+
ResourceFromNamespaceTests,
|
| 232 |
+
unittest.TestCase,
|
| 233 |
+
):
|
| 234 |
+
MODULE = 'namespacedata01'
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
if __name__ == '__main__':
|
| 238 |
+
unittest.main()
|
lib/python3.10/site-packages/importlib_resources/tests/test_util.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import unittest
|
| 2 |
+
|
| 3 |
+
from .util import MemorySetup, Traversable
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestMemoryTraversableImplementation(unittest.TestCase):
|
| 7 |
+
def test_concrete_methods_are_not_overridden(self):
|
| 8 |
+
"""`MemoryTraversable` must not override `Traversable` concrete methods.
|
| 9 |
+
|
| 10 |
+
This test is not an attempt to enforce a particular `Traversable` protocol;
|
| 11 |
+
it merely catches changes in the `Traversable` abstract/concrete methods
|
| 12 |
+
that have not been mirrored in the `MemoryTraversable` subclass.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
traversable_concrete_methods = {
|
| 16 |
+
method
|
| 17 |
+
for method, value in Traversable.__dict__.items()
|
| 18 |
+
if callable(value) and method not in Traversable.__abstractmethods__
|
| 19 |
+
}
|
| 20 |
+
memory_traversable_concrete_methods = {
|
| 21 |
+
method
|
| 22 |
+
for method, value in MemorySetup.MemoryTraversable.__dict__.items()
|
| 23 |
+
if callable(value) and not method.startswith("__")
|
| 24 |
+
}
|
| 25 |
+
overridden_methods = (
|
| 26 |
+
memory_traversable_concrete_methods & traversable_concrete_methods
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
assert not overridden_methods
|
lib/python3.10/site-packages/importlib_resources/tests/zip.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Generate zip test data files.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import zipfile
|
| 6 |
+
|
| 7 |
+
import zipp
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def make_zip_file(tree, dst):
|
| 11 |
+
"""
|
| 12 |
+
Zip the files in tree into a new zipfile at dst.
|
| 13 |
+
"""
|
| 14 |
+
with zipfile.ZipFile(dst, 'w') as zf:
|
| 15 |
+
for name, contents in walk(tree):
|
| 16 |
+
zf.writestr(name, contents)
|
| 17 |
+
zipp.CompleteDirs.inject(zf)
|
| 18 |
+
return dst
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def walk(tree, prefix=''):
|
| 22 |
+
for name, contents in tree.items():
|
| 23 |
+
if isinstance(contents, dict):
|
| 24 |
+
yield from walk(contents, prefix=f'{prefix}{name}/')
|
| 25 |
+
else:
|
| 26 |
+
yield f'{prefix}{name}', contents
|
lib/python3.10/site-packages/pandas/__init__.py
ADDED
|
@@ -0,0 +1,367 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
__docformat__ = "restructuredtext"
|
| 7 |
+
|
| 8 |
+
# Let users know if they're missing any of our hard dependencies
|
| 9 |
+
_hard_dependencies = ("numpy", "pytz", "dateutil")
|
| 10 |
+
_missing_dependencies = []
|
| 11 |
+
|
| 12 |
+
for _dependency in _hard_dependencies:
|
| 13 |
+
try:
|
| 14 |
+
__import__(_dependency)
|
| 15 |
+
except ImportError as _e: # pragma: no cover
|
| 16 |
+
_missing_dependencies.append(f"{_dependency}: {_e}")
|
| 17 |
+
|
| 18 |
+
if _missing_dependencies: # pragma: no cover
|
| 19 |
+
raise ImportError(
|
| 20 |
+
"Unable to import required dependencies:\n" + "\n".join(_missing_dependencies)
|
| 21 |
+
)
|
| 22 |
+
del _hard_dependencies, _dependency, _missing_dependencies
|
| 23 |
+
|
| 24 |
+
try:
|
| 25 |
+
# numpy compat
|
| 26 |
+
from pandas.compat import (
|
| 27 |
+
is_numpy_dev as _is_numpy_dev, # pyright: ignore[reportUnusedImport] # noqa: F401
|
| 28 |
+
)
|
| 29 |
+
except ImportError as _err: # pragma: no cover
|
| 30 |
+
_module = _err.name
|
| 31 |
+
raise ImportError(
|
| 32 |
+
f"C extension: {_module} not built. If you want to import "
|
| 33 |
+
"pandas from the source directory, you may need to run "
|
| 34 |
+
"'python setup.py build_ext' to build the C extensions first."
|
| 35 |
+
) from _err
|
| 36 |
+
|
| 37 |
+
from pandas._config import (
|
| 38 |
+
get_option,
|
| 39 |
+
set_option,
|
| 40 |
+
reset_option,
|
| 41 |
+
describe_option,
|
| 42 |
+
option_context,
|
| 43 |
+
options,
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
# let init-time option registration happen
|
| 47 |
+
import pandas.core.config_init # pyright: ignore[reportUnusedImport] # noqa: F401
|
| 48 |
+
|
| 49 |
+
from pandas.core.api import (
|
| 50 |
+
# dtype
|
| 51 |
+
ArrowDtype,
|
| 52 |
+
Int8Dtype,
|
| 53 |
+
Int16Dtype,
|
| 54 |
+
Int32Dtype,
|
| 55 |
+
Int64Dtype,
|
| 56 |
+
UInt8Dtype,
|
| 57 |
+
UInt16Dtype,
|
| 58 |
+
UInt32Dtype,
|
| 59 |
+
UInt64Dtype,
|
| 60 |
+
Float32Dtype,
|
| 61 |
+
Float64Dtype,
|
| 62 |
+
CategoricalDtype,
|
| 63 |
+
PeriodDtype,
|
| 64 |
+
IntervalDtype,
|
| 65 |
+
DatetimeTZDtype,
|
| 66 |
+
StringDtype,
|
| 67 |
+
BooleanDtype,
|
| 68 |
+
# missing
|
| 69 |
+
NA,
|
| 70 |
+
isna,
|
| 71 |
+
isnull,
|
| 72 |
+
notna,
|
| 73 |
+
notnull,
|
| 74 |
+
# indexes
|
| 75 |
+
Index,
|
| 76 |
+
CategoricalIndex,
|
| 77 |
+
RangeIndex,
|
| 78 |
+
MultiIndex,
|
| 79 |
+
IntervalIndex,
|
| 80 |
+
TimedeltaIndex,
|
| 81 |
+
DatetimeIndex,
|
| 82 |
+
PeriodIndex,
|
| 83 |
+
IndexSlice,
|
| 84 |
+
# tseries
|
| 85 |
+
NaT,
|
| 86 |
+
Period,
|
| 87 |
+
period_range,
|
| 88 |
+
Timedelta,
|
| 89 |
+
timedelta_range,
|
| 90 |
+
Timestamp,
|
| 91 |
+
date_range,
|
| 92 |
+
bdate_range,
|
| 93 |
+
Interval,
|
| 94 |
+
interval_range,
|
| 95 |
+
DateOffset,
|
| 96 |
+
# conversion
|
| 97 |
+
to_numeric,
|
| 98 |
+
to_datetime,
|
| 99 |
+
to_timedelta,
|
| 100 |
+
# misc
|
| 101 |
+
Flags,
|
| 102 |
+
Grouper,
|
| 103 |
+
factorize,
|
| 104 |
+
unique,
|
| 105 |
+
value_counts,
|
| 106 |
+
NamedAgg,
|
| 107 |
+
array,
|
| 108 |
+
Categorical,
|
| 109 |
+
set_eng_float_format,
|
| 110 |
+
Series,
|
| 111 |
+
DataFrame,
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
from pandas.core.dtypes.dtypes import SparseDtype
|
| 115 |
+
|
| 116 |
+
from pandas.tseries.api import infer_freq
|
| 117 |
+
from pandas.tseries import offsets
|
| 118 |
+
|
| 119 |
+
from pandas.core.computation.api import eval
|
| 120 |
+
|
| 121 |
+
from pandas.core.reshape.api import (
|
| 122 |
+
concat,
|
| 123 |
+
lreshape,
|
| 124 |
+
melt,
|
| 125 |
+
wide_to_long,
|
| 126 |
+
merge,
|
| 127 |
+
merge_asof,
|
| 128 |
+
merge_ordered,
|
| 129 |
+
crosstab,
|
| 130 |
+
pivot,
|
| 131 |
+
pivot_table,
|
| 132 |
+
get_dummies,
|
| 133 |
+
from_dummies,
|
| 134 |
+
cut,
|
| 135 |
+
qcut,
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
from pandas import api, arrays, errors, io, plotting, tseries
|
| 139 |
+
from pandas import testing
|
| 140 |
+
from pandas.util._print_versions import show_versions
|
| 141 |
+
|
| 142 |
+
from pandas.io.api import (
|
| 143 |
+
# excel
|
| 144 |
+
ExcelFile,
|
| 145 |
+
ExcelWriter,
|
| 146 |
+
read_excel,
|
| 147 |
+
# parsers
|
| 148 |
+
read_csv,
|
| 149 |
+
read_fwf,
|
| 150 |
+
read_table,
|
| 151 |
+
# pickle
|
| 152 |
+
read_pickle,
|
| 153 |
+
to_pickle,
|
| 154 |
+
# pytables
|
| 155 |
+
HDFStore,
|
| 156 |
+
read_hdf,
|
| 157 |
+
# sql
|
| 158 |
+
read_sql,
|
| 159 |
+
read_sql_query,
|
| 160 |
+
read_sql_table,
|
| 161 |
+
# misc
|
| 162 |
+
read_clipboard,
|
| 163 |
+
read_parquet,
|
| 164 |
+
read_orc,
|
| 165 |
+
read_feather,
|
| 166 |
+
read_gbq,
|
| 167 |
+
read_html,
|
| 168 |
+
read_xml,
|
| 169 |
+
read_json,
|
| 170 |
+
read_stata,
|
| 171 |
+
read_sas,
|
| 172 |
+
read_spss,
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
from pandas.io.json._normalize import json_normalize
|
| 176 |
+
|
| 177 |
+
from pandas.util._tester import test
|
| 178 |
+
|
| 179 |
+
# use the closest tagged version if possible
|
| 180 |
+
_built_with_meson = False
|
| 181 |
+
try:
|
| 182 |
+
from pandas._version_meson import ( # pyright: ignore [reportMissingImports]
|
| 183 |
+
__version__,
|
| 184 |
+
__git_version__,
|
| 185 |
+
)
|
| 186 |
+
|
| 187 |
+
_built_with_meson = True
|
| 188 |
+
except ImportError:
|
| 189 |
+
from pandas._version import get_versions
|
| 190 |
+
|
| 191 |
+
v = get_versions()
|
| 192 |
+
__version__ = v.get("closest-tag", v["version"])
|
| 193 |
+
__git_version__ = v.get("full-revisionid")
|
| 194 |
+
del get_versions, v
|
| 195 |
+
|
| 196 |
+
# GH#55043 - deprecation of the data_manager option
|
| 197 |
+
if "PANDAS_DATA_MANAGER" in os.environ:
|
| 198 |
+
warnings.warn(
|
| 199 |
+
"The env variable PANDAS_DATA_MANAGER is set. The data_manager option is "
|
| 200 |
+
"deprecated and will be removed in a future version. Only the BlockManager "
|
| 201 |
+
"will be available. Unset this environment variable to silence this warning.",
|
| 202 |
+
FutureWarning,
|
| 203 |
+
stacklevel=2,
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
del warnings, os
|
| 207 |
+
|
| 208 |
+
# module level doc-string
|
| 209 |
+
__doc__ = """
|
| 210 |
+
pandas - a powerful data analysis and manipulation library for Python
|
| 211 |
+
=====================================================================
|
| 212 |
+
|
| 213 |
+
**pandas** is a Python package providing fast, flexible, and expressive data
|
| 214 |
+
structures designed to make working with "relational" or "labeled" data both
|
| 215 |
+
easy and intuitive. It aims to be the fundamental high-level building block for
|
| 216 |
+
doing practical, **real world** data analysis in Python. Additionally, it has
|
| 217 |
+
the broader goal of becoming **the most powerful and flexible open source data
|
| 218 |
+
analysis / manipulation tool available in any language**. It is already well on
|
| 219 |
+
its way toward this goal.
|
| 220 |
+
|
| 221 |
+
Main Features
|
| 222 |
+
-------------
|
| 223 |
+
Here are just a few of the things that pandas does well:
|
| 224 |
+
|
| 225 |
+
- Easy handling of missing data in floating point as well as non-floating
|
| 226 |
+
point data.
|
| 227 |
+
- Size mutability: columns can be inserted and deleted from DataFrame and
|
| 228 |
+
higher dimensional objects
|
| 229 |
+
- Automatic and explicit data alignment: objects can be explicitly aligned
|
| 230 |
+
to a set of labels, or the user can simply ignore the labels and let
|
| 231 |
+
`Series`, `DataFrame`, etc. automatically align the data for you in
|
| 232 |
+
computations.
|
| 233 |
+
- Powerful, flexible group by functionality to perform split-apply-combine
|
| 234 |
+
operations on data sets, for both aggregating and transforming data.
|
| 235 |
+
- Make it easy to convert ragged, differently-indexed data in other Python
|
| 236 |
+
and NumPy data structures into DataFrame objects.
|
| 237 |
+
- Intelligent label-based slicing, fancy indexing, and subsetting of large
|
| 238 |
+
data sets.
|
| 239 |
+
- Intuitive merging and joining data sets.
|
| 240 |
+
- Flexible reshaping and pivoting of data sets.
|
| 241 |
+
- Hierarchical labeling of axes (possible to have multiple labels per tick).
|
| 242 |
+
- Robust IO tools for loading data from flat files (CSV and delimited),
|
| 243 |
+
Excel files, databases, and saving/loading data from the ultrafast HDF5
|
| 244 |
+
format.
|
| 245 |
+
- Time series-specific functionality: date range generation and frequency
|
| 246 |
+
conversion, moving window statistics, date shifting and lagging.
|
| 247 |
+
"""
|
| 248 |
+
|
| 249 |
+
# Use __all__ to let type checkers know what is part of the public API.
|
| 250 |
+
# Pandas is not (yet) a py.typed library: the public API is determined
|
| 251 |
+
# based on the documentation.
|
| 252 |
+
__all__ = [
|
| 253 |
+
"ArrowDtype",
|
| 254 |
+
"BooleanDtype",
|
| 255 |
+
"Categorical",
|
| 256 |
+
"CategoricalDtype",
|
| 257 |
+
"CategoricalIndex",
|
| 258 |
+
"DataFrame",
|
| 259 |
+
"DateOffset",
|
| 260 |
+
"DatetimeIndex",
|
| 261 |
+
"DatetimeTZDtype",
|
| 262 |
+
"ExcelFile",
|
| 263 |
+
"ExcelWriter",
|
| 264 |
+
"Flags",
|
| 265 |
+
"Float32Dtype",
|
| 266 |
+
"Float64Dtype",
|
| 267 |
+
"Grouper",
|
| 268 |
+
"HDFStore",
|
| 269 |
+
"Index",
|
| 270 |
+
"IndexSlice",
|
| 271 |
+
"Int16Dtype",
|
| 272 |
+
"Int32Dtype",
|
| 273 |
+
"Int64Dtype",
|
| 274 |
+
"Int8Dtype",
|
| 275 |
+
"Interval",
|
| 276 |
+
"IntervalDtype",
|
| 277 |
+
"IntervalIndex",
|
| 278 |
+
"MultiIndex",
|
| 279 |
+
"NA",
|
| 280 |
+
"NaT",
|
| 281 |
+
"NamedAgg",
|
| 282 |
+
"Period",
|
| 283 |
+
"PeriodDtype",
|
| 284 |
+
"PeriodIndex",
|
| 285 |
+
"RangeIndex",
|
| 286 |
+
"Series",
|
| 287 |
+
"SparseDtype",
|
| 288 |
+
"StringDtype",
|
| 289 |
+
"Timedelta",
|
| 290 |
+
"TimedeltaIndex",
|
| 291 |
+
"Timestamp",
|
| 292 |
+
"UInt16Dtype",
|
| 293 |
+
"UInt32Dtype",
|
| 294 |
+
"UInt64Dtype",
|
| 295 |
+
"UInt8Dtype",
|
| 296 |
+
"api",
|
| 297 |
+
"array",
|
| 298 |
+
"arrays",
|
| 299 |
+
"bdate_range",
|
| 300 |
+
"concat",
|
| 301 |
+
"crosstab",
|
| 302 |
+
"cut",
|
| 303 |
+
"date_range",
|
| 304 |
+
"describe_option",
|
| 305 |
+
"errors",
|
| 306 |
+
"eval",
|
| 307 |
+
"factorize",
|
| 308 |
+
"get_dummies",
|
| 309 |
+
"from_dummies",
|
| 310 |
+
"get_option",
|
| 311 |
+
"infer_freq",
|
| 312 |
+
"interval_range",
|
| 313 |
+
"io",
|
| 314 |
+
"isna",
|
| 315 |
+
"isnull",
|
| 316 |
+
"json_normalize",
|
| 317 |
+
"lreshape",
|
| 318 |
+
"melt",
|
| 319 |
+
"merge",
|
| 320 |
+
"merge_asof",
|
| 321 |
+
"merge_ordered",
|
| 322 |
+
"notna",
|
| 323 |
+
"notnull",
|
| 324 |
+
"offsets",
|
| 325 |
+
"option_context",
|
| 326 |
+
"options",
|
| 327 |
+
"period_range",
|
| 328 |
+
"pivot",
|
| 329 |
+
"pivot_table",
|
| 330 |
+
"plotting",
|
| 331 |
+
"qcut",
|
| 332 |
+
"read_clipboard",
|
| 333 |
+
"read_csv",
|
| 334 |
+
"read_excel",
|
| 335 |
+
"read_feather",
|
| 336 |
+
"read_fwf",
|
| 337 |
+
"read_gbq",
|
| 338 |
+
"read_hdf",
|
| 339 |
+
"read_html",
|
| 340 |
+
"read_json",
|
| 341 |
+
"read_orc",
|
| 342 |
+
"read_parquet",
|
| 343 |
+
"read_pickle",
|
| 344 |
+
"read_sas",
|
| 345 |
+
"read_spss",
|
| 346 |
+
"read_sql",
|
| 347 |
+
"read_sql_query",
|
| 348 |
+
"read_sql_table",
|
| 349 |
+
"read_stata",
|
| 350 |
+
"read_table",
|
| 351 |
+
"read_xml",
|
| 352 |
+
"reset_option",
|
| 353 |
+
"set_eng_float_format",
|
| 354 |
+
"set_option",
|
| 355 |
+
"show_versions",
|
| 356 |
+
"test",
|
| 357 |
+
"testing",
|
| 358 |
+
"timedelta_range",
|
| 359 |
+
"to_datetime",
|
| 360 |
+
"to_numeric",
|
| 361 |
+
"to_pickle",
|
| 362 |
+
"to_timedelta",
|
| 363 |
+
"tseries",
|
| 364 |
+
"unique",
|
| 365 |
+
"value_counts",
|
| 366 |
+
"wide_to_long",
|
| 367 |
+
]
|
lib/python3.10/site-packages/pandas/_config/__init__.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pandas._config is considered explicitly upstream of everything else in pandas,
|
| 3 |
+
should have no intra-pandas dependencies.
|
| 4 |
+
|
| 5 |
+
importing `dates` and `display` ensures that keys needed by _libs
|
| 6 |
+
are initialized.
|
| 7 |
+
"""
|
| 8 |
+
__all__ = [
|
| 9 |
+
"config",
|
| 10 |
+
"detect_console_encoding",
|
| 11 |
+
"get_option",
|
| 12 |
+
"set_option",
|
| 13 |
+
"reset_option",
|
| 14 |
+
"describe_option",
|
| 15 |
+
"option_context",
|
| 16 |
+
"options",
|
| 17 |
+
"using_copy_on_write",
|
| 18 |
+
"warn_copy_on_write",
|
| 19 |
+
]
|
| 20 |
+
from pandas._config import config
|
| 21 |
+
from pandas._config import dates # pyright: ignore[reportUnusedImport] # noqa: F401
|
| 22 |
+
from pandas._config.config import (
|
| 23 |
+
_global_config,
|
| 24 |
+
describe_option,
|
| 25 |
+
get_option,
|
| 26 |
+
option_context,
|
| 27 |
+
options,
|
| 28 |
+
reset_option,
|
| 29 |
+
set_option,
|
| 30 |
+
)
|
| 31 |
+
from pandas._config.display import detect_console_encoding
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def using_copy_on_write() -> bool:
|
| 35 |
+
_mode_options = _global_config["mode"]
|
| 36 |
+
return (
|
| 37 |
+
_mode_options["copy_on_write"] is True
|
| 38 |
+
and _mode_options["data_manager"] == "block"
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def warn_copy_on_write() -> bool:
|
| 43 |
+
_mode_options = _global_config["mode"]
|
| 44 |
+
return (
|
| 45 |
+
_mode_options["copy_on_write"] == "warn"
|
| 46 |
+
and _mode_options["data_manager"] == "block"
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def using_nullable_dtypes() -> bool:
|
| 51 |
+
_mode_options = _global_config["mode"]
|
| 52 |
+
return _mode_options["nullable_dtypes"]
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def using_pyarrow_string_dtype() -> bool:
|
| 56 |
+
_mode_options = _global_config["future"]
|
| 57 |
+
return _mode_options["infer_string"]
|
lib/python3.10/site-packages/pandas/_config/config.py
ADDED
|
@@ -0,0 +1,948 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The config module holds package-wide configurables and provides
|
| 3 |
+
a uniform API for working with them.
|
| 4 |
+
|
| 5 |
+
Overview
|
| 6 |
+
========
|
| 7 |
+
|
| 8 |
+
This module supports the following requirements:
|
| 9 |
+
- options are referenced using keys in dot.notation, e.g. "x.y.option - z".
|
| 10 |
+
- keys are case-insensitive.
|
| 11 |
+
- functions should accept partial/regex keys, when unambiguous.
|
| 12 |
+
- options can be registered by modules at import time.
|
| 13 |
+
- options can be registered at init-time (via core.config_init)
|
| 14 |
+
- options have a default value, and (optionally) a description and
|
| 15 |
+
validation function associated with them.
|
| 16 |
+
- options can be deprecated, in which case referencing them
|
| 17 |
+
should produce a warning.
|
| 18 |
+
- deprecated options can optionally be rerouted to a replacement
|
| 19 |
+
so that accessing a deprecated option reroutes to a differently
|
| 20 |
+
named option.
|
| 21 |
+
- options can be reset to their default value.
|
| 22 |
+
- all option can be reset to their default value at once.
|
| 23 |
+
- all options in a certain sub - namespace can be reset at once.
|
| 24 |
+
- the user can set / get / reset or ask for the description of an option.
|
| 25 |
+
- a developer can register and mark an option as deprecated.
|
| 26 |
+
- you can register a callback to be invoked when the option value
|
| 27 |
+
is set or reset. Changing the stored value is considered misuse, but
|
| 28 |
+
is not verboten.
|
| 29 |
+
|
| 30 |
+
Implementation
|
| 31 |
+
==============
|
| 32 |
+
|
| 33 |
+
- Data is stored using nested dictionaries, and should be accessed
|
| 34 |
+
through the provided API.
|
| 35 |
+
|
| 36 |
+
- "Registered options" and "Deprecated options" have metadata associated
|
| 37 |
+
with them, which are stored in auxiliary dictionaries keyed on the
|
| 38 |
+
fully-qualified key, e.g. "x.y.z.option".
|
| 39 |
+
|
| 40 |
+
- the config_init module is imported by the package's __init__.py file.
|
| 41 |
+
placing any register_option() calls there will ensure those options
|
| 42 |
+
are available as soon as pandas is loaded. If you use register_option
|
| 43 |
+
in a module, it will only be available after that module is imported,
|
| 44 |
+
which you should be aware of.
|
| 45 |
+
|
| 46 |
+
- `config_prefix` is a context_manager (for use with the `with` keyword)
|
| 47 |
+
which can save developers some typing, see the docstring.
|
| 48 |
+
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
from __future__ import annotations
|
| 52 |
+
|
| 53 |
+
from contextlib import (
|
| 54 |
+
ContextDecorator,
|
| 55 |
+
contextmanager,
|
| 56 |
+
)
|
| 57 |
+
import re
|
| 58 |
+
from typing import (
|
| 59 |
+
TYPE_CHECKING,
|
| 60 |
+
Any,
|
| 61 |
+
Callable,
|
| 62 |
+
Generic,
|
| 63 |
+
NamedTuple,
|
| 64 |
+
cast,
|
| 65 |
+
)
|
| 66 |
+
import warnings
|
| 67 |
+
|
| 68 |
+
from pandas._typing import (
|
| 69 |
+
F,
|
| 70 |
+
T,
|
| 71 |
+
)
|
| 72 |
+
from pandas.util._exceptions import find_stack_level
|
| 73 |
+
|
| 74 |
+
if TYPE_CHECKING:
|
| 75 |
+
from collections.abc import (
|
| 76 |
+
Generator,
|
| 77 |
+
Iterable,
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class DeprecatedOption(NamedTuple):
|
| 82 |
+
key: str
|
| 83 |
+
msg: str | None
|
| 84 |
+
rkey: str | None
|
| 85 |
+
removal_ver: str | None
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class RegisteredOption(NamedTuple):
|
| 89 |
+
key: str
|
| 90 |
+
defval: object
|
| 91 |
+
doc: str
|
| 92 |
+
validator: Callable[[object], Any] | None
|
| 93 |
+
cb: Callable[[str], Any] | None
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# holds deprecated option metadata
|
| 97 |
+
_deprecated_options: dict[str, DeprecatedOption] = {}
|
| 98 |
+
|
| 99 |
+
# holds registered option metadata
|
| 100 |
+
_registered_options: dict[str, RegisteredOption] = {}
|
| 101 |
+
|
| 102 |
+
# holds the current values for registered options
|
| 103 |
+
_global_config: dict[str, Any] = {}
|
| 104 |
+
|
| 105 |
+
# keys which have a special meaning
|
| 106 |
+
_reserved_keys: list[str] = ["all"]
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class OptionError(AttributeError, KeyError):
|
| 110 |
+
"""
|
| 111 |
+
Exception raised for pandas.options.
|
| 112 |
+
|
| 113 |
+
Backwards compatible with KeyError checks.
|
| 114 |
+
|
| 115 |
+
Examples
|
| 116 |
+
--------
|
| 117 |
+
>>> pd.options.context
|
| 118 |
+
Traceback (most recent call last):
|
| 119 |
+
OptionError: No such option
|
| 120 |
+
"""
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
#
|
| 124 |
+
# User API
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def _get_single_key(pat: str, silent: bool) -> str:
|
| 128 |
+
keys = _select_options(pat)
|
| 129 |
+
if len(keys) == 0:
|
| 130 |
+
if not silent:
|
| 131 |
+
_warn_if_deprecated(pat)
|
| 132 |
+
raise OptionError(f"No such keys(s): {repr(pat)}")
|
| 133 |
+
if len(keys) > 1:
|
| 134 |
+
raise OptionError("Pattern matched multiple keys")
|
| 135 |
+
key = keys[0]
|
| 136 |
+
|
| 137 |
+
if not silent:
|
| 138 |
+
_warn_if_deprecated(key)
|
| 139 |
+
|
| 140 |
+
key = _translate_key(key)
|
| 141 |
+
|
| 142 |
+
return key
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _get_option(pat: str, silent: bool = False) -> Any:
|
| 146 |
+
key = _get_single_key(pat, silent)
|
| 147 |
+
|
| 148 |
+
# walk the nested dict
|
| 149 |
+
root, k = _get_root(key)
|
| 150 |
+
return root[k]
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def _set_option(*args, **kwargs) -> None:
|
| 154 |
+
# must at least 1 arg deal with constraints later
|
| 155 |
+
nargs = len(args)
|
| 156 |
+
if not nargs or nargs % 2 != 0:
|
| 157 |
+
raise ValueError("Must provide an even number of non-keyword arguments")
|
| 158 |
+
|
| 159 |
+
# default to false
|
| 160 |
+
silent = kwargs.pop("silent", False)
|
| 161 |
+
|
| 162 |
+
if kwargs:
|
| 163 |
+
kwarg = next(iter(kwargs.keys()))
|
| 164 |
+
raise TypeError(f'_set_option() got an unexpected keyword argument "{kwarg}"')
|
| 165 |
+
|
| 166 |
+
for k, v in zip(args[::2], args[1::2]):
|
| 167 |
+
key = _get_single_key(k, silent)
|
| 168 |
+
|
| 169 |
+
o = _get_registered_option(key)
|
| 170 |
+
if o and o.validator:
|
| 171 |
+
o.validator(v)
|
| 172 |
+
|
| 173 |
+
# walk the nested dict
|
| 174 |
+
root, k_root = _get_root(key)
|
| 175 |
+
root[k_root] = v
|
| 176 |
+
|
| 177 |
+
if o.cb:
|
| 178 |
+
if silent:
|
| 179 |
+
with warnings.catch_warnings(record=True):
|
| 180 |
+
o.cb(key)
|
| 181 |
+
else:
|
| 182 |
+
o.cb(key)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _describe_option(pat: str = "", _print_desc: bool = True) -> str | None:
|
| 186 |
+
keys = _select_options(pat)
|
| 187 |
+
if len(keys) == 0:
|
| 188 |
+
raise OptionError("No such keys(s)")
|
| 189 |
+
|
| 190 |
+
s = "\n".join([_build_option_description(k) for k in keys])
|
| 191 |
+
|
| 192 |
+
if _print_desc:
|
| 193 |
+
print(s)
|
| 194 |
+
return None
|
| 195 |
+
return s
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def _reset_option(pat: str, silent: bool = False) -> None:
|
| 199 |
+
keys = _select_options(pat)
|
| 200 |
+
|
| 201 |
+
if len(keys) == 0:
|
| 202 |
+
raise OptionError("No such keys(s)")
|
| 203 |
+
|
| 204 |
+
if len(keys) > 1 and len(pat) < 4 and pat != "all":
|
| 205 |
+
raise ValueError(
|
| 206 |
+
"You must specify at least 4 characters when "
|
| 207 |
+
"resetting multiple keys, use the special keyword "
|
| 208 |
+
'"all" to reset all the options to their default value'
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
for k in keys:
|
| 212 |
+
_set_option(k, _registered_options[k].defval, silent=silent)
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def get_default_val(pat: str):
|
| 216 |
+
key = _get_single_key(pat, silent=True)
|
| 217 |
+
return _get_registered_option(key).defval
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class DictWrapper:
|
| 221 |
+
"""provide attribute-style access to a nested dict"""
|
| 222 |
+
|
| 223 |
+
d: dict[str, Any]
|
| 224 |
+
|
| 225 |
+
def __init__(self, d: dict[str, Any], prefix: str = "") -> None:
|
| 226 |
+
object.__setattr__(self, "d", d)
|
| 227 |
+
object.__setattr__(self, "prefix", prefix)
|
| 228 |
+
|
| 229 |
+
def __setattr__(self, key: str, val: Any) -> None:
|
| 230 |
+
prefix = object.__getattribute__(self, "prefix")
|
| 231 |
+
if prefix:
|
| 232 |
+
prefix += "."
|
| 233 |
+
prefix += key
|
| 234 |
+
# you can't set new keys
|
| 235 |
+
# can you can't overwrite subtrees
|
| 236 |
+
if key in self.d and not isinstance(self.d[key], dict):
|
| 237 |
+
_set_option(prefix, val)
|
| 238 |
+
else:
|
| 239 |
+
raise OptionError("You can only set the value of existing options")
|
| 240 |
+
|
| 241 |
+
def __getattr__(self, key: str):
|
| 242 |
+
prefix = object.__getattribute__(self, "prefix")
|
| 243 |
+
if prefix:
|
| 244 |
+
prefix += "."
|
| 245 |
+
prefix += key
|
| 246 |
+
try:
|
| 247 |
+
v = object.__getattribute__(self, "d")[key]
|
| 248 |
+
except KeyError as err:
|
| 249 |
+
raise OptionError("No such option") from err
|
| 250 |
+
if isinstance(v, dict):
|
| 251 |
+
return DictWrapper(v, prefix)
|
| 252 |
+
else:
|
| 253 |
+
return _get_option(prefix)
|
| 254 |
+
|
| 255 |
+
def __dir__(self) -> list[str]:
|
| 256 |
+
return list(self.d.keys())
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
# For user convenience, we'd like to have the available options described
|
| 260 |
+
# in the docstring. For dev convenience we'd like to generate the docstrings
|
| 261 |
+
# dynamically instead of maintaining them by hand. To this, we use the
|
| 262 |
+
# class below which wraps functions inside a callable, and converts
|
| 263 |
+
# __doc__ into a property function. The doctsrings below are templates
|
| 264 |
+
# using the py2.6+ advanced formatting syntax to plug in a concise list
|
| 265 |
+
# of options, and option descriptions.
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
class CallableDynamicDoc(Generic[T]):
|
| 269 |
+
def __init__(self, func: Callable[..., T], doc_tmpl: str) -> None:
|
| 270 |
+
self.__doc_tmpl__ = doc_tmpl
|
| 271 |
+
self.__func__ = func
|
| 272 |
+
|
| 273 |
+
def __call__(self, *args, **kwds) -> T:
|
| 274 |
+
return self.__func__(*args, **kwds)
|
| 275 |
+
|
| 276 |
+
# error: Signature of "__doc__" incompatible with supertype "object"
|
| 277 |
+
@property
|
| 278 |
+
def __doc__(self) -> str: # type: ignore[override]
|
| 279 |
+
opts_desc = _describe_option("all", _print_desc=False)
|
| 280 |
+
opts_list = pp_options_list(list(_registered_options.keys()))
|
| 281 |
+
return self.__doc_tmpl__.format(opts_desc=opts_desc, opts_list=opts_list)
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
_get_option_tmpl = """
|
| 285 |
+
get_option(pat)
|
| 286 |
+
|
| 287 |
+
Retrieves the value of the specified option.
|
| 288 |
+
|
| 289 |
+
Available options:
|
| 290 |
+
|
| 291 |
+
{opts_list}
|
| 292 |
+
|
| 293 |
+
Parameters
|
| 294 |
+
----------
|
| 295 |
+
pat : str
|
| 296 |
+
Regexp which should match a single option.
|
| 297 |
+
Note: partial matches are supported for convenience, but unless you use the
|
| 298 |
+
full option name (e.g. x.y.z.option_name), your code may break in future
|
| 299 |
+
versions if new options with similar names are introduced.
|
| 300 |
+
|
| 301 |
+
Returns
|
| 302 |
+
-------
|
| 303 |
+
result : the value of the option
|
| 304 |
+
|
| 305 |
+
Raises
|
| 306 |
+
------
|
| 307 |
+
OptionError : if no such option exists
|
| 308 |
+
|
| 309 |
+
Notes
|
| 310 |
+
-----
|
| 311 |
+
Please reference the :ref:`User Guide <options>` for more information.
|
| 312 |
+
|
| 313 |
+
The available options with its descriptions:
|
| 314 |
+
|
| 315 |
+
{opts_desc}
|
| 316 |
+
|
| 317 |
+
Examples
|
| 318 |
+
--------
|
| 319 |
+
>>> pd.get_option('display.max_columns') # doctest: +SKIP
|
| 320 |
+
4
|
| 321 |
+
"""
|
| 322 |
+
|
| 323 |
+
_set_option_tmpl = """
|
| 324 |
+
set_option(pat, value)
|
| 325 |
+
|
| 326 |
+
Sets the value of the specified option.
|
| 327 |
+
|
| 328 |
+
Available options:
|
| 329 |
+
|
| 330 |
+
{opts_list}
|
| 331 |
+
|
| 332 |
+
Parameters
|
| 333 |
+
----------
|
| 334 |
+
pat : str
|
| 335 |
+
Regexp which should match a single option.
|
| 336 |
+
Note: partial matches are supported for convenience, but unless you use the
|
| 337 |
+
full option name (e.g. x.y.z.option_name), your code may break in future
|
| 338 |
+
versions if new options with similar names are introduced.
|
| 339 |
+
value : object
|
| 340 |
+
New value of option.
|
| 341 |
+
|
| 342 |
+
Returns
|
| 343 |
+
-------
|
| 344 |
+
None
|
| 345 |
+
|
| 346 |
+
Raises
|
| 347 |
+
------
|
| 348 |
+
OptionError if no such option exists
|
| 349 |
+
|
| 350 |
+
Notes
|
| 351 |
+
-----
|
| 352 |
+
Please reference the :ref:`User Guide <options>` for more information.
|
| 353 |
+
|
| 354 |
+
The available options with its descriptions:
|
| 355 |
+
|
| 356 |
+
{opts_desc}
|
| 357 |
+
|
| 358 |
+
Examples
|
| 359 |
+
--------
|
| 360 |
+
>>> pd.set_option('display.max_columns', 4)
|
| 361 |
+
>>> df = pd.DataFrame([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]])
|
| 362 |
+
>>> df
|
| 363 |
+
0 1 ... 3 4
|
| 364 |
+
0 1 2 ... 4 5
|
| 365 |
+
1 6 7 ... 9 10
|
| 366 |
+
[2 rows x 5 columns]
|
| 367 |
+
>>> pd.reset_option('display.max_columns')
|
| 368 |
+
"""
|
| 369 |
+
|
| 370 |
+
_describe_option_tmpl = """
|
| 371 |
+
describe_option(pat, _print_desc=False)
|
| 372 |
+
|
| 373 |
+
Prints the description for one or more registered options.
|
| 374 |
+
|
| 375 |
+
Call with no arguments to get a listing for all registered options.
|
| 376 |
+
|
| 377 |
+
Available options:
|
| 378 |
+
|
| 379 |
+
{opts_list}
|
| 380 |
+
|
| 381 |
+
Parameters
|
| 382 |
+
----------
|
| 383 |
+
pat : str
|
| 384 |
+
Regexp pattern. All matching keys will have their description displayed.
|
| 385 |
+
_print_desc : bool, default True
|
| 386 |
+
If True (default) the description(s) will be printed to stdout.
|
| 387 |
+
Otherwise, the description(s) will be returned as a unicode string
|
| 388 |
+
(for testing).
|
| 389 |
+
|
| 390 |
+
Returns
|
| 391 |
+
-------
|
| 392 |
+
None by default, the description(s) as a unicode string if _print_desc
|
| 393 |
+
is False
|
| 394 |
+
|
| 395 |
+
Notes
|
| 396 |
+
-----
|
| 397 |
+
Please reference the :ref:`User Guide <options>` for more information.
|
| 398 |
+
|
| 399 |
+
The available options with its descriptions:
|
| 400 |
+
|
| 401 |
+
{opts_desc}
|
| 402 |
+
|
| 403 |
+
Examples
|
| 404 |
+
--------
|
| 405 |
+
>>> pd.describe_option('display.max_columns') # doctest: +SKIP
|
| 406 |
+
display.max_columns : int
|
| 407 |
+
If max_cols is exceeded, switch to truncate view...
|
| 408 |
+
"""
|
| 409 |
+
|
| 410 |
+
_reset_option_tmpl = """
|
| 411 |
+
reset_option(pat)
|
| 412 |
+
|
| 413 |
+
Reset one or more options to their default value.
|
| 414 |
+
|
| 415 |
+
Pass "all" as argument to reset all options.
|
| 416 |
+
|
| 417 |
+
Available options:
|
| 418 |
+
|
| 419 |
+
{opts_list}
|
| 420 |
+
|
| 421 |
+
Parameters
|
| 422 |
+
----------
|
| 423 |
+
pat : str/regex
|
| 424 |
+
If specified only options matching `prefix*` will be reset.
|
| 425 |
+
Note: partial matches are supported for convenience, but unless you
|
| 426 |
+
use the full option name (e.g. x.y.z.option_name), your code may break
|
| 427 |
+
in future versions if new options with similar names are introduced.
|
| 428 |
+
|
| 429 |
+
Returns
|
| 430 |
+
-------
|
| 431 |
+
None
|
| 432 |
+
|
| 433 |
+
Notes
|
| 434 |
+
-----
|
| 435 |
+
Please reference the :ref:`User Guide <options>` for more information.
|
| 436 |
+
|
| 437 |
+
The available options with its descriptions:
|
| 438 |
+
|
| 439 |
+
{opts_desc}
|
| 440 |
+
|
| 441 |
+
Examples
|
| 442 |
+
--------
|
| 443 |
+
>>> pd.reset_option('display.max_columns') # doctest: +SKIP
|
| 444 |
+
"""
|
| 445 |
+
|
| 446 |
+
# bind the functions with their docstrings into a Callable
|
| 447 |
+
# and use that as the functions exposed in pd.api
|
| 448 |
+
get_option = CallableDynamicDoc(_get_option, _get_option_tmpl)
|
| 449 |
+
set_option = CallableDynamicDoc(_set_option, _set_option_tmpl)
|
| 450 |
+
reset_option = CallableDynamicDoc(_reset_option, _reset_option_tmpl)
|
| 451 |
+
describe_option = CallableDynamicDoc(_describe_option, _describe_option_tmpl)
|
| 452 |
+
options = DictWrapper(_global_config)
|
| 453 |
+
|
| 454 |
+
#
|
| 455 |
+
# Functions for use by pandas developers, in addition to User - api
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
class option_context(ContextDecorator):
|
| 459 |
+
"""
|
| 460 |
+
Context manager to temporarily set options in the `with` statement context.
|
| 461 |
+
|
| 462 |
+
You need to invoke as ``option_context(pat, val, [(pat, val), ...])``.
|
| 463 |
+
|
| 464 |
+
Examples
|
| 465 |
+
--------
|
| 466 |
+
>>> from pandas import option_context
|
| 467 |
+
>>> with option_context('display.max_rows', 10, 'display.max_columns', 5):
|
| 468 |
+
... pass
|
| 469 |
+
"""
|
| 470 |
+
|
| 471 |
+
def __init__(self, *args) -> None:
|
| 472 |
+
if len(args) % 2 != 0 or len(args) < 2:
|
| 473 |
+
raise ValueError(
|
| 474 |
+
"Need to invoke as option_context(pat, val, [(pat, val), ...])."
|
| 475 |
+
)
|
| 476 |
+
|
| 477 |
+
self.ops = list(zip(args[::2], args[1::2]))
|
| 478 |
+
|
| 479 |
+
def __enter__(self) -> None:
|
| 480 |
+
self.undo = [(pat, _get_option(pat)) for pat, val in self.ops]
|
| 481 |
+
|
| 482 |
+
for pat, val in self.ops:
|
| 483 |
+
_set_option(pat, val, silent=True)
|
| 484 |
+
|
| 485 |
+
def __exit__(self, *args) -> None:
|
| 486 |
+
if self.undo:
|
| 487 |
+
for pat, val in self.undo:
|
| 488 |
+
_set_option(pat, val, silent=True)
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
def register_option(
|
| 492 |
+
key: str,
|
| 493 |
+
defval: object,
|
| 494 |
+
doc: str = "",
|
| 495 |
+
validator: Callable[[object], Any] | None = None,
|
| 496 |
+
cb: Callable[[str], Any] | None = None,
|
| 497 |
+
) -> None:
|
| 498 |
+
"""
|
| 499 |
+
Register an option in the package-wide pandas config object
|
| 500 |
+
|
| 501 |
+
Parameters
|
| 502 |
+
----------
|
| 503 |
+
key : str
|
| 504 |
+
Fully-qualified key, e.g. "x.y.option - z".
|
| 505 |
+
defval : object
|
| 506 |
+
Default value of the option.
|
| 507 |
+
doc : str
|
| 508 |
+
Description of the option.
|
| 509 |
+
validator : Callable, optional
|
| 510 |
+
Function of a single argument, should raise `ValueError` if
|
| 511 |
+
called with a value which is not a legal value for the option.
|
| 512 |
+
cb
|
| 513 |
+
a function of a single argument "key", which is called
|
| 514 |
+
immediately after an option value is set/reset. key is
|
| 515 |
+
the full name of the option.
|
| 516 |
+
|
| 517 |
+
Raises
|
| 518 |
+
------
|
| 519 |
+
ValueError if `validator` is specified and `defval` is not a valid value.
|
| 520 |
+
|
| 521 |
+
"""
|
| 522 |
+
import keyword
|
| 523 |
+
import tokenize
|
| 524 |
+
|
| 525 |
+
key = key.lower()
|
| 526 |
+
|
| 527 |
+
if key in _registered_options:
|
| 528 |
+
raise OptionError(f"Option '{key}' has already been registered")
|
| 529 |
+
if key in _reserved_keys:
|
| 530 |
+
raise OptionError(f"Option '{key}' is a reserved key")
|
| 531 |
+
|
| 532 |
+
# the default value should be legal
|
| 533 |
+
if validator:
|
| 534 |
+
validator(defval)
|
| 535 |
+
|
| 536 |
+
# walk the nested dict, creating dicts as needed along the path
|
| 537 |
+
path = key.split(".")
|
| 538 |
+
|
| 539 |
+
for k in path:
|
| 540 |
+
if not re.match("^" + tokenize.Name + "$", k):
|
| 541 |
+
raise ValueError(f"{k} is not a valid identifier")
|
| 542 |
+
if keyword.iskeyword(k):
|
| 543 |
+
raise ValueError(f"{k} is a python keyword")
|
| 544 |
+
|
| 545 |
+
cursor = _global_config
|
| 546 |
+
msg = "Path prefix to option '{option}' is already an option"
|
| 547 |
+
|
| 548 |
+
for i, p in enumerate(path[:-1]):
|
| 549 |
+
if not isinstance(cursor, dict):
|
| 550 |
+
raise OptionError(msg.format(option=".".join(path[:i])))
|
| 551 |
+
if p not in cursor:
|
| 552 |
+
cursor[p] = {}
|
| 553 |
+
cursor = cursor[p]
|
| 554 |
+
|
| 555 |
+
if not isinstance(cursor, dict):
|
| 556 |
+
raise OptionError(msg.format(option=".".join(path[:-1])))
|
| 557 |
+
|
| 558 |
+
cursor[path[-1]] = defval # initialize
|
| 559 |
+
|
| 560 |
+
# save the option metadata
|
| 561 |
+
_registered_options[key] = RegisteredOption(
|
| 562 |
+
key=key, defval=defval, doc=doc, validator=validator, cb=cb
|
| 563 |
+
)
|
| 564 |
+
|
| 565 |
+
|
| 566 |
+
def deprecate_option(
|
| 567 |
+
key: str,
|
| 568 |
+
msg: str | None = None,
|
| 569 |
+
rkey: str | None = None,
|
| 570 |
+
removal_ver: str | None = None,
|
| 571 |
+
) -> None:
|
| 572 |
+
"""
|
| 573 |
+
Mark option `key` as deprecated, if code attempts to access this option,
|
| 574 |
+
a warning will be produced, using `msg` if given, or a default message
|
| 575 |
+
if not.
|
| 576 |
+
if `rkey` is given, any access to the key will be re-routed to `rkey`.
|
| 577 |
+
|
| 578 |
+
Neither the existence of `key` nor that if `rkey` is checked. If they
|
| 579 |
+
do not exist, any subsequence access will fail as usual, after the
|
| 580 |
+
deprecation warning is given.
|
| 581 |
+
|
| 582 |
+
Parameters
|
| 583 |
+
----------
|
| 584 |
+
key : str
|
| 585 |
+
Name of the option to be deprecated.
|
| 586 |
+
must be a fully-qualified option name (e.g "x.y.z.rkey").
|
| 587 |
+
msg : str, optional
|
| 588 |
+
Warning message to output when the key is referenced.
|
| 589 |
+
if no message is given a default message will be emitted.
|
| 590 |
+
rkey : str, optional
|
| 591 |
+
Name of an option to reroute access to.
|
| 592 |
+
If specified, any referenced `key` will be
|
| 593 |
+
re-routed to `rkey` including set/get/reset.
|
| 594 |
+
rkey must be a fully-qualified option name (e.g "x.y.z.rkey").
|
| 595 |
+
used by the default message if no `msg` is specified.
|
| 596 |
+
removal_ver : str, optional
|
| 597 |
+
Specifies the version in which this option will
|
| 598 |
+
be removed. used by the default message if no `msg` is specified.
|
| 599 |
+
|
| 600 |
+
Raises
|
| 601 |
+
------
|
| 602 |
+
OptionError
|
| 603 |
+
If the specified key has already been deprecated.
|
| 604 |
+
"""
|
| 605 |
+
key = key.lower()
|
| 606 |
+
|
| 607 |
+
if key in _deprecated_options:
|
| 608 |
+
raise OptionError(f"Option '{key}' has already been defined as deprecated.")
|
| 609 |
+
|
| 610 |
+
_deprecated_options[key] = DeprecatedOption(key, msg, rkey, removal_ver)
|
| 611 |
+
|
| 612 |
+
|
| 613 |
+
#
|
| 614 |
+
# functions internal to the module
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
def _select_options(pat: str) -> list[str]:
|
| 618 |
+
"""
|
| 619 |
+
returns a list of keys matching `pat`
|
| 620 |
+
|
| 621 |
+
if pat=="all", returns all registered options
|
| 622 |
+
"""
|
| 623 |
+
# short-circuit for exact key
|
| 624 |
+
if pat in _registered_options:
|
| 625 |
+
return [pat]
|
| 626 |
+
|
| 627 |
+
# else look through all of them
|
| 628 |
+
keys = sorted(_registered_options.keys())
|
| 629 |
+
if pat == "all": # reserved key
|
| 630 |
+
return keys
|
| 631 |
+
|
| 632 |
+
return [k for k in keys if re.search(pat, k, re.I)]
|
| 633 |
+
|
| 634 |
+
|
| 635 |
+
def _get_root(key: str) -> tuple[dict[str, Any], str]:
|
| 636 |
+
path = key.split(".")
|
| 637 |
+
cursor = _global_config
|
| 638 |
+
for p in path[:-1]:
|
| 639 |
+
cursor = cursor[p]
|
| 640 |
+
return cursor, path[-1]
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
def _is_deprecated(key: str) -> bool:
|
| 644 |
+
"""Returns True if the given option has been deprecated"""
|
| 645 |
+
key = key.lower()
|
| 646 |
+
return key in _deprecated_options
|
| 647 |
+
|
| 648 |
+
|
| 649 |
+
def _get_deprecated_option(key: str):
|
| 650 |
+
"""
|
| 651 |
+
Retrieves the metadata for a deprecated option, if `key` is deprecated.
|
| 652 |
+
|
| 653 |
+
Returns
|
| 654 |
+
-------
|
| 655 |
+
DeprecatedOption (namedtuple) if key is deprecated, None otherwise
|
| 656 |
+
"""
|
| 657 |
+
try:
|
| 658 |
+
d = _deprecated_options[key]
|
| 659 |
+
except KeyError:
|
| 660 |
+
return None
|
| 661 |
+
else:
|
| 662 |
+
return d
|
| 663 |
+
|
| 664 |
+
|
| 665 |
+
def _get_registered_option(key: str):
|
| 666 |
+
"""
|
| 667 |
+
Retrieves the option metadata if `key` is a registered option.
|
| 668 |
+
|
| 669 |
+
Returns
|
| 670 |
+
-------
|
| 671 |
+
RegisteredOption (namedtuple) if key is deprecated, None otherwise
|
| 672 |
+
"""
|
| 673 |
+
return _registered_options.get(key)
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
def _translate_key(key: str) -> str:
|
| 677 |
+
"""
|
| 678 |
+
if key id deprecated and a replacement key defined, will return the
|
| 679 |
+
replacement key, otherwise returns `key` as - is
|
| 680 |
+
"""
|
| 681 |
+
d = _get_deprecated_option(key)
|
| 682 |
+
if d:
|
| 683 |
+
return d.rkey or key
|
| 684 |
+
else:
|
| 685 |
+
return key
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
def _warn_if_deprecated(key: str) -> bool:
|
| 689 |
+
"""
|
| 690 |
+
Checks if `key` is a deprecated option and if so, prints a warning.
|
| 691 |
+
|
| 692 |
+
Returns
|
| 693 |
+
-------
|
| 694 |
+
bool - True if `key` is deprecated, False otherwise.
|
| 695 |
+
"""
|
| 696 |
+
d = _get_deprecated_option(key)
|
| 697 |
+
if d:
|
| 698 |
+
if d.msg:
|
| 699 |
+
warnings.warn(
|
| 700 |
+
d.msg,
|
| 701 |
+
FutureWarning,
|
| 702 |
+
stacklevel=find_stack_level(),
|
| 703 |
+
)
|
| 704 |
+
else:
|
| 705 |
+
msg = f"'{key}' is deprecated"
|
| 706 |
+
if d.removal_ver:
|
| 707 |
+
msg += f" and will be removed in {d.removal_ver}"
|
| 708 |
+
if d.rkey:
|
| 709 |
+
msg += f", please use '{d.rkey}' instead."
|
| 710 |
+
else:
|
| 711 |
+
msg += ", please refrain from using it."
|
| 712 |
+
|
| 713 |
+
warnings.warn(msg, FutureWarning, stacklevel=find_stack_level())
|
| 714 |
+
return True
|
| 715 |
+
return False
|
| 716 |
+
|
| 717 |
+
|
| 718 |
+
def _build_option_description(k: str) -> str:
|
| 719 |
+
"""Builds a formatted description of a registered option and prints it"""
|
| 720 |
+
o = _get_registered_option(k)
|
| 721 |
+
d = _get_deprecated_option(k)
|
| 722 |
+
|
| 723 |
+
s = f"{k} "
|
| 724 |
+
|
| 725 |
+
if o.doc:
|
| 726 |
+
s += "\n".join(o.doc.strip().split("\n"))
|
| 727 |
+
else:
|
| 728 |
+
s += "No description available."
|
| 729 |
+
|
| 730 |
+
if o:
|
| 731 |
+
s += f"\n [default: {o.defval}] [currently: {_get_option(k, True)}]"
|
| 732 |
+
|
| 733 |
+
if d:
|
| 734 |
+
rkey = d.rkey or ""
|
| 735 |
+
s += "\n (Deprecated"
|
| 736 |
+
s += f", use `{rkey}` instead."
|
| 737 |
+
s += ")"
|
| 738 |
+
|
| 739 |
+
return s
|
| 740 |
+
|
| 741 |
+
|
| 742 |
+
def pp_options_list(keys: Iterable[str], width: int = 80, _print: bool = False):
|
| 743 |
+
"""Builds a concise listing of available options, grouped by prefix"""
|
| 744 |
+
from itertools import groupby
|
| 745 |
+
from textwrap import wrap
|
| 746 |
+
|
| 747 |
+
def pp(name: str, ks: Iterable[str]) -> list[str]:
|
| 748 |
+
pfx = "- " + name + ".[" if name else ""
|
| 749 |
+
ls = wrap(
|
| 750 |
+
", ".join(ks),
|
| 751 |
+
width,
|
| 752 |
+
initial_indent=pfx,
|
| 753 |
+
subsequent_indent=" ",
|
| 754 |
+
break_long_words=False,
|
| 755 |
+
)
|
| 756 |
+
if ls and ls[-1] and name:
|
| 757 |
+
ls[-1] = ls[-1] + "]"
|
| 758 |
+
return ls
|
| 759 |
+
|
| 760 |
+
ls: list[str] = []
|
| 761 |
+
singles = [x for x in sorted(keys) if x.find(".") < 0]
|
| 762 |
+
if singles:
|
| 763 |
+
ls += pp("", singles)
|
| 764 |
+
keys = [x for x in keys if x.find(".") >= 0]
|
| 765 |
+
|
| 766 |
+
for k, g in groupby(sorted(keys), lambda x: x[: x.rfind(".")]):
|
| 767 |
+
ks = [x[len(k) + 1 :] for x in list(g)]
|
| 768 |
+
ls += pp(k, ks)
|
| 769 |
+
s = "\n".join(ls)
|
| 770 |
+
if _print:
|
| 771 |
+
print(s)
|
| 772 |
+
else:
|
| 773 |
+
return s
|
| 774 |
+
|
| 775 |
+
|
| 776 |
+
#
|
| 777 |
+
# helpers
|
| 778 |
+
|
| 779 |
+
|
| 780 |
+
@contextmanager
|
| 781 |
+
def config_prefix(prefix: str) -> Generator[None, None, None]:
|
| 782 |
+
"""
|
| 783 |
+
contextmanager for multiple invocations of API with a common prefix
|
| 784 |
+
|
| 785 |
+
supported API functions: (register / get / set )__option
|
| 786 |
+
|
| 787 |
+
Warning: This is not thread - safe, and won't work properly if you import
|
| 788 |
+
the API functions into your module using the "from x import y" construct.
|
| 789 |
+
|
| 790 |
+
Example
|
| 791 |
+
-------
|
| 792 |
+
import pandas._config.config as cf
|
| 793 |
+
with cf.config_prefix("display.font"):
|
| 794 |
+
cf.register_option("color", "red")
|
| 795 |
+
cf.register_option("size", " 5 pt")
|
| 796 |
+
cf.set_option(size, " 6 pt")
|
| 797 |
+
cf.get_option(size)
|
| 798 |
+
...
|
| 799 |
+
|
| 800 |
+
etc'
|
| 801 |
+
|
| 802 |
+
will register options "display.font.color", "display.font.size", set the
|
| 803 |
+
value of "display.font.size"... and so on.
|
| 804 |
+
"""
|
| 805 |
+
# Note: reset_option relies on set_option, and on key directly
|
| 806 |
+
# it does not fit in to this monkey-patching scheme
|
| 807 |
+
|
| 808 |
+
global register_option, get_option, set_option
|
| 809 |
+
|
| 810 |
+
def wrap(func: F) -> F:
|
| 811 |
+
def inner(key: str, *args, **kwds):
|
| 812 |
+
pkey = f"{prefix}.{key}"
|
| 813 |
+
return func(pkey, *args, **kwds)
|
| 814 |
+
|
| 815 |
+
return cast(F, inner)
|
| 816 |
+
|
| 817 |
+
_register_option = register_option
|
| 818 |
+
_get_option = get_option
|
| 819 |
+
_set_option = set_option
|
| 820 |
+
set_option = wrap(set_option)
|
| 821 |
+
get_option = wrap(get_option)
|
| 822 |
+
register_option = wrap(register_option)
|
| 823 |
+
try:
|
| 824 |
+
yield
|
| 825 |
+
finally:
|
| 826 |
+
set_option = _set_option
|
| 827 |
+
get_option = _get_option
|
| 828 |
+
register_option = _register_option
|
| 829 |
+
|
| 830 |
+
|
| 831 |
+
# These factories and methods are handy for use as the validator
|
| 832 |
+
# arg in register_option
|
| 833 |
+
|
| 834 |
+
|
| 835 |
+
def is_type_factory(_type: type[Any]) -> Callable[[Any], None]:
|
| 836 |
+
"""
|
| 837 |
+
|
| 838 |
+
Parameters
|
| 839 |
+
----------
|
| 840 |
+
`_type` - a type to be compared against (e.g. type(x) == `_type`)
|
| 841 |
+
|
| 842 |
+
Returns
|
| 843 |
+
-------
|
| 844 |
+
validator - a function of a single argument x , which raises
|
| 845 |
+
ValueError if type(x) is not equal to `_type`
|
| 846 |
+
|
| 847 |
+
"""
|
| 848 |
+
|
| 849 |
+
def inner(x) -> None:
|
| 850 |
+
if type(x) != _type:
|
| 851 |
+
raise ValueError(f"Value must have type '{_type}'")
|
| 852 |
+
|
| 853 |
+
return inner
|
| 854 |
+
|
| 855 |
+
|
| 856 |
+
def is_instance_factory(_type) -> Callable[[Any], None]:
|
| 857 |
+
"""
|
| 858 |
+
|
| 859 |
+
Parameters
|
| 860 |
+
----------
|
| 861 |
+
`_type` - the type to be checked against
|
| 862 |
+
|
| 863 |
+
Returns
|
| 864 |
+
-------
|
| 865 |
+
validator - a function of a single argument x , which raises
|
| 866 |
+
ValueError if x is not an instance of `_type`
|
| 867 |
+
|
| 868 |
+
"""
|
| 869 |
+
if isinstance(_type, (tuple, list)):
|
| 870 |
+
_type = tuple(_type)
|
| 871 |
+
type_repr = "|".join(map(str, _type))
|
| 872 |
+
else:
|
| 873 |
+
type_repr = f"'{_type}'"
|
| 874 |
+
|
| 875 |
+
def inner(x) -> None:
|
| 876 |
+
if not isinstance(x, _type):
|
| 877 |
+
raise ValueError(f"Value must be an instance of {type_repr}")
|
| 878 |
+
|
| 879 |
+
return inner
|
| 880 |
+
|
| 881 |
+
|
| 882 |
+
def is_one_of_factory(legal_values) -> Callable[[Any], None]:
|
| 883 |
+
callables = [c for c in legal_values if callable(c)]
|
| 884 |
+
legal_values = [c for c in legal_values if not callable(c)]
|
| 885 |
+
|
| 886 |
+
def inner(x) -> None:
|
| 887 |
+
if x not in legal_values:
|
| 888 |
+
if not any(c(x) for c in callables):
|
| 889 |
+
uvals = [str(lval) for lval in legal_values]
|
| 890 |
+
pp_values = "|".join(uvals)
|
| 891 |
+
msg = f"Value must be one of {pp_values}"
|
| 892 |
+
if len(callables):
|
| 893 |
+
msg += " or a callable"
|
| 894 |
+
raise ValueError(msg)
|
| 895 |
+
|
| 896 |
+
return inner
|
| 897 |
+
|
| 898 |
+
|
| 899 |
+
def is_nonnegative_int(value: object) -> None:
|
| 900 |
+
"""
|
| 901 |
+
Verify that value is None or a positive int.
|
| 902 |
+
|
| 903 |
+
Parameters
|
| 904 |
+
----------
|
| 905 |
+
value : None or int
|
| 906 |
+
The `value` to be checked.
|
| 907 |
+
|
| 908 |
+
Raises
|
| 909 |
+
------
|
| 910 |
+
ValueError
|
| 911 |
+
When the value is not None or is a negative integer
|
| 912 |
+
"""
|
| 913 |
+
if value is None:
|
| 914 |
+
return
|
| 915 |
+
|
| 916 |
+
elif isinstance(value, int):
|
| 917 |
+
if value >= 0:
|
| 918 |
+
return
|
| 919 |
+
|
| 920 |
+
msg = "Value must be a nonnegative integer or None"
|
| 921 |
+
raise ValueError(msg)
|
| 922 |
+
|
| 923 |
+
|
| 924 |
+
# common type validators, for convenience
|
| 925 |
+
# usage: register_option(... , validator = is_int)
|
| 926 |
+
is_int = is_type_factory(int)
|
| 927 |
+
is_bool = is_type_factory(bool)
|
| 928 |
+
is_float = is_type_factory(float)
|
| 929 |
+
is_str = is_type_factory(str)
|
| 930 |
+
is_text = is_instance_factory((str, bytes))
|
| 931 |
+
|
| 932 |
+
|
| 933 |
+
def is_callable(obj) -> bool:
|
| 934 |
+
"""
|
| 935 |
+
|
| 936 |
+
Parameters
|
| 937 |
+
----------
|
| 938 |
+
`obj` - the object to be checked
|
| 939 |
+
|
| 940 |
+
Returns
|
| 941 |
+
-------
|
| 942 |
+
validator - returns True if object is callable
|
| 943 |
+
raises ValueError otherwise.
|
| 944 |
+
|
| 945 |
+
"""
|
| 946 |
+
if not callable(obj):
|
| 947 |
+
raise ValueError("Value must be a callable")
|
| 948 |
+
return True
|
lib/python3.10/site-packages/pandas/_config/dates.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
config for datetime formatting
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from pandas._config import config as cf
|
| 7 |
+
|
| 8 |
+
pc_date_dayfirst_doc = """
|
| 9 |
+
: boolean
|
| 10 |
+
When True, prints and parses dates with the day first, eg 20/01/2005
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
pc_date_yearfirst_doc = """
|
| 14 |
+
: boolean
|
| 15 |
+
When True, prints and parses dates with the year first, eg 2005/01/20
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
with cf.config_prefix("display"):
|
| 19 |
+
# Needed upstream of `_libs` because these are used in tslibs.parsing
|
| 20 |
+
cf.register_option(
|
| 21 |
+
"date_dayfirst", False, pc_date_dayfirst_doc, validator=cf.is_bool
|
| 22 |
+
)
|
| 23 |
+
cf.register_option(
|
| 24 |
+
"date_yearfirst", False, pc_date_yearfirst_doc, validator=cf.is_bool
|
| 25 |
+
)
|