Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- parrot/lib/python3.10/site-packages/decord/__init__.py +19 -0
- parrot/lib/python3.10/site-packages/decord/__pycache__/logging.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/__pycache__/video_reader.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_api_internal.py +1 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/__init__.py +10 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/base.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/function.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/libinfo.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/ndarray.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/runtime_ctypes.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__init__.py +1 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/function.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/ndarray.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/types.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/function.py +233 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/ndarray.py +107 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/types.py +74 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_cy2/__init__.py +1 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_cy2/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_cy3/__init__.py +1 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/_cy3/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/base.py +127 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/function.py +318 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/libinfo.py +90 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/ndarray.py +344 -0
- parrot/lib/python3.10/site-packages/decord/_ffi/runtime_ctypes.py +258 -0
- parrot/lib/python3.10/site-packages/decord/audio_reader.py +182 -0
- parrot/lib/python3.10/site-packages/decord/av_reader.py +149 -0
- parrot/lib/python3.10/site-packages/decord/base.py +20 -0
- parrot/lib/python3.10/site-packages/decord/bridge/__init__.py +76 -0
- parrot/lib/python3.10/site-packages/decord/bridge/__pycache__/tf.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/bridge/__pycache__/torchdl.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/bridge/__pycache__/tvm.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/decord/bridge/mxnet.py +25 -0
- parrot/lib/python3.10/site-packages/decord/bridge/tf.py +30 -0
- parrot/lib/python3.10/site-packages/decord/bridge/torchdl.py +33 -0
- parrot/lib/python3.10/site-packages/decord/bridge/tvm.py +25 -0
- parrot/lib/python3.10/site-packages/decord/bridge/utils.py +21 -0
- parrot/lib/python3.10/site-packages/decord/data/base_action.py +346 -0
- parrot/lib/python3.10/site-packages/decord/logging.py +23 -0
- parrot/lib/python3.10/site-packages/decord/ndarray.py +90 -0
- parrot/lib/python3.10/site-packages/decord/video_loader.py +121 -0
- parrot/lib/python3.10/site-packages/decord/video_reader.py +251 -0
- parrot/lib/python3.10/site-packages/markdown_it/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/markdown_it/__pycache__/_compat.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/markdown_it/__pycache__/_punycode.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/markdown_it/__pycache__/main.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/markdown_it/__pycache__/parser_block.cpython-310.pyc +0 -0
parrot/lib/python3.10/site-packages/decord/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Decord python package"""
|
| 2 |
+
from . import function
|
| 3 |
+
|
| 4 |
+
from ._ffi.runtime_ctypes import TypeCode
|
| 5 |
+
from ._ffi.function import register_func, get_global_func, list_global_func_names, extract_ext_funcs
|
| 6 |
+
from ._ffi.base import DECORDError, DECORDLimitReachedError, __version__
|
| 7 |
+
|
| 8 |
+
from .base import ALL
|
| 9 |
+
|
| 10 |
+
from . import ndarray as nd
|
| 11 |
+
from .ndarray import cpu, gpu
|
| 12 |
+
from . import bridge
|
| 13 |
+
from . import logging
|
| 14 |
+
from .video_reader import VideoReader
|
| 15 |
+
from .video_loader import VideoLoader
|
| 16 |
+
from .audio_reader import AudioReader
|
| 17 |
+
from .av_reader import AVReader
|
| 18 |
+
|
| 19 |
+
logging.set_level(logging.ERROR)
|
parrot/lib/python3.10/site-packages/decord/__pycache__/logging.cpython-310.pyc
ADDED
|
Binary file (635 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/__pycache__/video_reader.cpython-310.pyc
ADDED
|
Binary file (8.58 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_api_internal.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Namespace for internal apis."""
|
parrot/lib/python3.10/site-packages/decord/_ffi/__init__.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""C interfacing code.
|
| 2 |
+
|
| 3 |
+
This namespace contains everything that interacts with C code.
|
| 4 |
+
Most C related object are ctypes compatible, which means
|
| 5 |
+
they contains a handle field that is ctypes.c_void_p and can
|
| 6 |
+
be used via ctypes function calls.
|
| 7 |
+
|
| 8 |
+
Some performance critical functions are implemented by cython
|
| 9 |
+
and have a ctypes fallback implementation.
|
| 10 |
+
"""
|
parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (521 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (3.39 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/function.cpython-310.pyc
ADDED
|
Binary file (9.03 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/libinfo.cpython-310.pyc
ADDED
|
Binary file (2.76 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/ndarray.cpython-310.pyc
ADDED
|
Binary file (10.4 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/__pycache__/runtime_ctypes.cpython-310.pyc
ADDED
|
Binary file (7.61 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""ctypes specific implementation of FFI"""
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (220 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/function.cpython-310.pyc
ADDED
|
Binary file (6.93 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/ndarray.cpython-310.pyc
ADDED
|
Binary file (3.74 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/__pycache__/types.cpython-310.pyc
ADDED
|
Binary file (2.72 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/function.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
# pylint: disable=invalid-name, protected-access, too-many-branches, global-statement
|
| 3 |
+
"""Function configuration API."""
|
| 4 |
+
from __future__ import absolute_import
|
| 5 |
+
|
| 6 |
+
import ctypes
|
| 7 |
+
import traceback
|
| 8 |
+
from numbers import Number, Integral
|
| 9 |
+
|
| 10 |
+
from ..base import _LIB, check_call
|
| 11 |
+
from ..base import c_str, string_types
|
| 12 |
+
from ..runtime_ctypes import DECORDType, DECORDByteArray, DECORDContext
|
| 13 |
+
from . import ndarray as _nd
|
| 14 |
+
from .ndarray import NDArrayBase, _make_array
|
| 15 |
+
from .types import DECORDValue, TypeCode
|
| 16 |
+
from .types import DECORDPackedCFunc, DECORDCFuncFinalizer
|
| 17 |
+
from .types import RETURN_SWITCH, C_TO_PY_ARG_SWITCH, _wrap_arg_func
|
| 18 |
+
|
| 19 |
+
FunctionHandle = ctypes.c_void_p
|
| 20 |
+
ModuleHandle = ctypes.c_void_p
|
| 21 |
+
DECORDRetValueHandle = ctypes.c_void_p
|
| 22 |
+
|
| 23 |
+
def _ctypes_free_resource(rhandle):
|
| 24 |
+
"""callback to free resources when it it not needed."""
|
| 25 |
+
pyobj = ctypes.cast(rhandle, ctypes.py_object)
|
| 26 |
+
ctypes.pythonapi.Py_DecRef(pyobj)
|
| 27 |
+
|
| 28 |
+
# Global callback that is always alive
|
| 29 |
+
DECORD_FREE_PYOBJ = DECORDCFuncFinalizer(_ctypes_free_resource)
|
| 30 |
+
ctypes.pythonapi.Py_IncRef(ctypes.py_object(DECORD_FREE_PYOBJ))
|
| 31 |
+
|
| 32 |
+
def convert_to_decord_func(pyfunc):
|
| 33 |
+
"""Convert a python function to DECORD function
|
| 34 |
+
|
| 35 |
+
Parameters
|
| 36 |
+
----------
|
| 37 |
+
pyfunc : python function
|
| 38 |
+
The python function to be converted.
|
| 39 |
+
|
| 40 |
+
Returns
|
| 41 |
+
-------
|
| 42 |
+
decordfunc: decord.nd.Function
|
| 43 |
+
The converted decord function.
|
| 44 |
+
"""
|
| 45 |
+
local_pyfunc = pyfunc
|
| 46 |
+
def cfun(args, type_codes, num_args, ret, _):
|
| 47 |
+
""" ctypes function """
|
| 48 |
+
num_args = num_args.value if isinstance(num_args, ctypes.c_int) else num_args
|
| 49 |
+
pyargs = (C_TO_PY_ARG_SWITCH[type_codes[i]](args[i]) for i in range(num_args))
|
| 50 |
+
# pylint: disable=broad-except
|
| 51 |
+
try:
|
| 52 |
+
rv = local_pyfunc(*pyargs)
|
| 53 |
+
except Exception:
|
| 54 |
+
msg = traceback.format_exc()
|
| 55 |
+
_LIB.DECORDAPISetLastError(c_str(msg))
|
| 56 |
+
return -1
|
| 57 |
+
|
| 58 |
+
if rv is not None:
|
| 59 |
+
if isinstance(rv, tuple):
|
| 60 |
+
raise ValueError("PackedFunction can only support one return value")
|
| 61 |
+
temp_args = []
|
| 62 |
+
values, tcodes, _ = _make_decord_args((rv,), temp_args)
|
| 63 |
+
if not isinstance(ret, DECORDRetValueHandle):
|
| 64 |
+
ret = DECORDRetValueHandle(ret)
|
| 65 |
+
check_call(_LIB.DECORDCFuncSetReturn(ret, values, tcodes, ctypes.c_int(1)))
|
| 66 |
+
_ = temp_args
|
| 67 |
+
_ = rv
|
| 68 |
+
return 0
|
| 69 |
+
|
| 70 |
+
handle = FunctionHandle()
|
| 71 |
+
f = DECORDPackedCFunc(cfun)
|
| 72 |
+
# NOTE: We will need to use python-api to increase ref count of the f
|
| 73 |
+
# DECORD_FREE_PYOBJ will be called after it is no longer needed.
|
| 74 |
+
pyobj = ctypes.py_object(f)
|
| 75 |
+
ctypes.pythonapi.Py_IncRef(pyobj)
|
| 76 |
+
check_call(_LIB.DECORDFuncCreateFromCFunc(
|
| 77 |
+
f, pyobj, DECORD_FREE_PYOBJ, ctypes.byref(handle)))
|
| 78 |
+
return _CLASS_FUNCTION(handle, False)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _make_decord_args(args, temp_args):
|
| 82 |
+
"""Pack arguments into c args decord call accept"""
|
| 83 |
+
num_args = len(args)
|
| 84 |
+
values = (DECORDValue * num_args)()
|
| 85 |
+
type_codes = (ctypes.c_int * num_args)()
|
| 86 |
+
for i, arg in enumerate(args):
|
| 87 |
+
if arg is None:
|
| 88 |
+
values[i].v_handle = None
|
| 89 |
+
type_codes[i] = TypeCode.NULL
|
| 90 |
+
elif isinstance(arg, NDArrayBase):
|
| 91 |
+
values[i].v_handle = ctypes.cast(arg.handle, ctypes.c_void_p)
|
| 92 |
+
type_codes[i] = (TypeCode.NDARRAY_CONTAINER
|
| 93 |
+
if not arg.is_view else TypeCode.ARRAY_HANDLE)
|
| 94 |
+
elif isinstance(arg, _nd._DECORD_COMPATS):
|
| 95 |
+
values[i].v_handle = ctypes.c_void_p(arg._decord_handle)
|
| 96 |
+
type_codes[i] = arg.__class__._decord_tcode
|
| 97 |
+
elif isinstance(arg, Integral):
|
| 98 |
+
values[i].v_int64 = arg
|
| 99 |
+
type_codes[i] = TypeCode.INT
|
| 100 |
+
elif isinstance(arg, Number):
|
| 101 |
+
values[i].v_float64 = arg
|
| 102 |
+
type_codes[i] = TypeCode.FLOAT
|
| 103 |
+
elif isinstance(arg, DECORDType):
|
| 104 |
+
values[i].v_str = c_str(str(arg))
|
| 105 |
+
type_codes[i] = TypeCode.STR
|
| 106 |
+
elif isinstance(arg, DECORDContext):
|
| 107 |
+
values[i].v_ctx = arg
|
| 108 |
+
type_codes[i] = TypeCode.DECORD_CONTEXT
|
| 109 |
+
elif isinstance(arg, bytearray):
|
| 110 |
+
arr = DECORDByteArray()
|
| 111 |
+
arr.data = ctypes.cast(
|
| 112 |
+
(ctypes.c_byte * len(arg)).from_buffer(arg),
|
| 113 |
+
ctypes.POINTER(ctypes.c_byte))
|
| 114 |
+
arr.size = len(arg)
|
| 115 |
+
values[i].v_handle = ctypes.c_void_p(ctypes.addressof(arr))
|
| 116 |
+
temp_args.append(arr)
|
| 117 |
+
type_codes[i] = TypeCode.BYTES
|
| 118 |
+
elif isinstance(arg, string_types):
|
| 119 |
+
values[i].v_str = c_str(arg)
|
| 120 |
+
type_codes[i] = TypeCode.STR
|
| 121 |
+
# NOTE(minjie): module is not used in DECORD
|
| 122 |
+
#elif isinstance(arg, _CLASS_MODULE):
|
| 123 |
+
# values[i].v_handle = arg.handle
|
| 124 |
+
# type_codes[i] = TypeCode.MODULE_HANDLE
|
| 125 |
+
elif isinstance(arg, FunctionBase):
|
| 126 |
+
values[i].v_handle = arg.handle
|
| 127 |
+
type_codes[i] = TypeCode.FUNC_HANDLE
|
| 128 |
+
elif isinstance(arg, ctypes.c_void_p):
|
| 129 |
+
values[i].v_handle = arg
|
| 130 |
+
type_codes[i] = TypeCode.HANDLE
|
| 131 |
+
elif callable(arg):
|
| 132 |
+
arg = convert_to_decord_func(arg)
|
| 133 |
+
values[i].v_handle = arg.handle
|
| 134 |
+
type_codes[i] = TypeCode.FUNC_HANDLE
|
| 135 |
+
temp_args.append(arg)
|
| 136 |
+
else:
|
| 137 |
+
raise TypeError("Don't know how to handle type %s" % type(arg))
|
| 138 |
+
return values, type_codes, num_args
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
class FunctionBase(object):
|
| 142 |
+
"""Function base."""
|
| 143 |
+
__slots__ = ["handle", "is_global"]
|
| 144 |
+
# pylint: disable=no-member
|
| 145 |
+
def __init__(self, handle, is_global):
|
| 146 |
+
"""Initialize the function with handle
|
| 147 |
+
|
| 148 |
+
Parameters
|
| 149 |
+
----------
|
| 150 |
+
handle : FunctionHandle
|
| 151 |
+
the handle to the underlying function.
|
| 152 |
+
|
| 153 |
+
is_global : bool
|
| 154 |
+
Whether this is a global function in python
|
| 155 |
+
"""
|
| 156 |
+
self.handle = handle
|
| 157 |
+
self.is_global = is_global
|
| 158 |
+
|
| 159 |
+
def __del__(self):
|
| 160 |
+
if not self.is_global and _LIB is not None:
|
| 161 |
+
check_call(_LIB.DECORDFuncFree(self.handle))
|
| 162 |
+
|
| 163 |
+
def __call__(self, *args):
|
| 164 |
+
"""Call the function with positional arguments
|
| 165 |
+
|
| 166 |
+
args : list
|
| 167 |
+
The positional arguments to the function call.
|
| 168 |
+
"""
|
| 169 |
+
temp_args = []
|
| 170 |
+
values, tcodes, num_args = _make_decord_args(args, temp_args)
|
| 171 |
+
ret_val = DECORDValue()
|
| 172 |
+
ret_tcode = ctypes.c_int()
|
| 173 |
+
check_call(_LIB.DECORDFuncCall(
|
| 174 |
+
self.handle, values, tcodes, ctypes.c_int(num_args),
|
| 175 |
+
ctypes.byref(ret_val), ctypes.byref(ret_tcode)))
|
| 176 |
+
_ = temp_args
|
| 177 |
+
_ = args
|
| 178 |
+
return RETURN_SWITCH[ret_tcode.value](ret_val)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def __init_handle_by_constructor__(fconstructor, args):
|
| 182 |
+
"""Initialize handle by constructor"""
|
| 183 |
+
temp_args = []
|
| 184 |
+
values, tcodes, num_args = _make_decord_args(args, temp_args)
|
| 185 |
+
ret_val = DECORDValue()
|
| 186 |
+
ret_tcode = ctypes.c_int()
|
| 187 |
+
check_call(_LIB.DECORDFuncCall(
|
| 188 |
+
fconstructor.handle, values, tcodes, ctypes.c_int(num_args),
|
| 189 |
+
ctypes.byref(ret_val), ctypes.byref(ret_tcode)))
|
| 190 |
+
_ = temp_args
|
| 191 |
+
_ = args
|
| 192 |
+
assert ret_tcode.value == TypeCode.NODE_HANDLE
|
| 193 |
+
handle = ret_val.v_handle
|
| 194 |
+
return handle
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def _return_module(x):
|
| 198 |
+
"""Return function"""
|
| 199 |
+
handle = x.v_handle
|
| 200 |
+
if not isinstance(handle, ModuleHandle):
|
| 201 |
+
handle = ModuleHandle(handle)
|
| 202 |
+
return _CLASS_MODULE(handle)
|
| 203 |
+
|
| 204 |
+
def _handle_return_func(x):
|
| 205 |
+
"""Return function"""
|
| 206 |
+
handle = x.v_handle
|
| 207 |
+
if not isinstance(handle, FunctionHandle):
|
| 208 |
+
handle = FunctionHandle(handle)
|
| 209 |
+
return _CLASS_FUNCTION(handle, False)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
# setup return handle for function type
|
| 213 |
+
RETURN_SWITCH[TypeCode.FUNC_HANDLE] = _handle_return_func
|
| 214 |
+
RETURN_SWITCH[TypeCode.MODULE_HANDLE] = _return_module
|
| 215 |
+
RETURN_SWITCH[TypeCode.NDARRAY_CONTAINER] = lambda x: _make_array(x.v_handle, False)
|
| 216 |
+
C_TO_PY_ARG_SWITCH[TypeCode.FUNC_HANDLE] = _wrap_arg_func(
|
| 217 |
+
_handle_return_func, TypeCode.FUNC_HANDLE)
|
| 218 |
+
C_TO_PY_ARG_SWITCH[TypeCode.MODULE_HANDLE] = _wrap_arg_func(
|
| 219 |
+
_return_module, TypeCode.MODULE_HANDLE)
|
| 220 |
+
C_TO_PY_ARG_SWITCH[TypeCode.ARRAY_HANDLE] = lambda x: _make_array(x.v_handle, True)
|
| 221 |
+
C_TO_PY_ARG_SWITCH[TypeCode.NDARRAY_CONTAINER] = lambda x: _make_array(x.v_handle, False)
|
| 222 |
+
|
| 223 |
+
_CLASS_MODULE = None
|
| 224 |
+
_CLASS_FUNCTION = None
|
| 225 |
+
|
| 226 |
+
def _set_class_module(module_class):
|
| 227 |
+
"""Initialize the module."""
|
| 228 |
+
global _CLASS_MODULE
|
| 229 |
+
_CLASS_MODULE = module_class
|
| 230 |
+
|
| 231 |
+
def _set_class_function(func_class):
|
| 232 |
+
global _CLASS_FUNCTION
|
| 233 |
+
_CLASS_FUNCTION = func_class
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/ndarray.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# pylint: disable=invalid-name
|
| 2 |
+
"""Runtime NDArray api"""
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
|
| 5 |
+
import ctypes
|
| 6 |
+
from ..base import _LIB, check_call, c_str
|
| 7 |
+
from ..runtime_ctypes import DECORDArrayHandle
|
| 8 |
+
from .types import RETURN_SWITCH, C_TO_PY_ARG_SWITCH, _wrap_arg_func, _return_handle
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
DECORDPyCapsuleDestructor = ctypes.CFUNCTYPE(None, ctypes.c_void_p)
|
| 12 |
+
_c_str_dltensor = c_str('dltensor')
|
| 13 |
+
_c_str_used_dltensor = c_str('used_dltensor')
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# used for PyCapsule manipulation
|
| 17 |
+
if hasattr(ctypes, 'pythonapi'):
|
| 18 |
+
ctypes.pythonapi.PyCapsule_GetName.restype = ctypes.c_char_p
|
| 19 |
+
ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p
|
| 20 |
+
ctypes.pythonapi.PyCapsule_New.restype = ctypes.py_object
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _from_dlpack(dltensor):
|
| 24 |
+
dltensor = ctypes.py_object(dltensor)
|
| 25 |
+
if ctypes.pythonapi.PyCapsule_IsValid(dltensor, _c_str_dltensor):
|
| 26 |
+
ptr = ctypes.pythonapi.PyCapsule_GetPointer(dltensor, _c_str_dltensor)
|
| 27 |
+
# XXX(minjie): The below cast should be unnecessary given the code to
|
| 28 |
+
# set restype of PyCapsule calls. But weirdly, this does not
|
| 29 |
+
# work out always.
|
| 30 |
+
ptr = ctypes.cast(ptr, ctypes.c_void_p)
|
| 31 |
+
handle = DECORDArrayHandle()
|
| 32 |
+
check_call(_LIB.DECORDArrayFromDLPack(ptr, ctypes.byref(handle)))
|
| 33 |
+
ctypes.pythonapi.PyCapsule_SetName(dltensor, _c_str_used_dltensor)
|
| 34 |
+
ctypes.pythonapi.PyCapsule_SetDestructor(dltensor, DECORDPyCapsuleDestructor(0))
|
| 35 |
+
return _make_array(handle, False)
|
| 36 |
+
raise ValueError("Expect a dltensor field, PyCapsule can only be consumed once")
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _dlpack_deleter(pycapsule):
|
| 40 |
+
pycapsule = ctypes.cast(pycapsule, ctypes.py_object)
|
| 41 |
+
if ctypes.pythonapi.PyCapsule_IsValid(pycapsule, _c_str_dltensor):
|
| 42 |
+
ptr = ctypes.pythonapi.PyCapsule_GetPointer(pycapsule, _c_str_dltensor)
|
| 43 |
+
# XXX(minjie): The below cast should be unnecessary given the code to
|
| 44 |
+
# set restype of PyCapsule calls. But weirdly, this does not
|
| 45 |
+
# work out always.
|
| 46 |
+
ptr = ctypes.cast(ptr, ctypes.c_void_p)
|
| 47 |
+
_LIB.DECORDDLManagedTensorCallDeleter(ptr)
|
| 48 |
+
ctypes.pythonapi.PyCapsule_SetDestructor(pycapsule, DECORDPyCapsuleDestructor(0))
|
| 49 |
+
|
| 50 |
+
_c_dlpack_deleter = DECORDPyCapsuleDestructor(_dlpack_deleter)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class NDArrayBase(object):
|
| 54 |
+
"""A simple Device/CPU Array object in runtime."""
|
| 55 |
+
__slots__ = ["handle", "is_view"]
|
| 56 |
+
# pylint: disable=no-member
|
| 57 |
+
def __init__(self, handle, is_view=False):
|
| 58 |
+
"""Initialize the function with handle
|
| 59 |
+
|
| 60 |
+
Parameters
|
| 61 |
+
----------
|
| 62 |
+
handle : DECORDArrayHandle
|
| 63 |
+
the handle to the underlying C++ DECORDArray
|
| 64 |
+
"""
|
| 65 |
+
self.handle = handle
|
| 66 |
+
self.is_view = is_view
|
| 67 |
+
|
| 68 |
+
def __del__(self):
|
| 69 |
+
if not self.is_view and _LIB:
|
| 70 |
+
check_call(_LIB.DECORDArrayFree(self.handle))
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
def _decord_handle(self):
|
| 74 |
+
return ctypes.cast(self.handle, ctypes.c_void_p).value
|
| 75 |
+
|
| 76 |
+
def to_dlpack(self):
|
| 77 |
+
"""Produce an array from a DLPack Tensor without copying memory
|
| 78 |
+
|
| 79 |
+
Returns
|
| 80 |
+
-------
|
| 81 |
+
dlpack : DLPack tensor view of the array data
|
| 82 |
+
"""
|
| 83 |
+
ptr = ctypes.c_void_p()
|
| 84 |
+
check_call(_LIB.DECORDArrayToDLPack(self.handle, ctypes.byref(ptr)))
|
| 85 |
+
return ctypes.pythonapi.PyCapsule_New(ptr, _c_str_dltensor, _c_dlpack_deleter)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _make_array(handle, is_view):
|
| 89 |
+
handle = ctypes.cast(handle, DECORDArrayHandle)
|
| 90 |
+
return _CLASS_NDARRAY(handle, is_view)
|
| 91 |
+
|
| 92 |
+
_DECORD_COMPATS = ()
|
| 93 |
+
|
| 94 |
+
def _reg_extension(cls, fcreate):
|
| 95 |
+
global _DECORD_COMPATS
|
| 96 |
+
_DECORD_COMPATS += (cls,)
|
| 97 |
+
if fcreate:
|
| 98 |
+
fret = lambda x: fcreate(_return_handle(x))
|
| 99 |
+
RETURN_SWITCH[cls._decord_tcode] = fret
|
| 100 |
+
C_TO_PY_ARG_SWITCH[cls._decord_tcode] = _wrap_arg_func(fret, cls._decord_tcode)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
_CLASS_NDARRAY = None
|
| 104 |
+
|
| 105 |
+
def _set_class_ndarray(cls):
|
| 106 |
+
global _CLASS_NDARRAY
|
| 107 |
+
_CLASS_NDARRAY = cls
|
parrot/lib/python3.10/site-packages/decord/_ffi/_ctypes/types.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""The C Types used in API."""
|
| 2 |
+
# pylint: disable=invalid-name
|
| 3 |
+
from __future__ import absolute_import as _abs
|
| 4 |
+
|
| 5 |
+
import ctypes
|
| 6 |
+
from ..base import py_str, check_call, _LIB
|
| 7 |
+
from ..runtime_ctypes import DECORDByteArray, TypeCode
|
| 8 |
+
|
| 9 |
+
class DECORDValue(ctypes.Union):
|
| 10 |
+
"""DECORDValue in C API"""
|
| 11 |
+
_fields_ = [("v_int64", ctypes.c_int64),
|
| 12 |
+
("v_float64", ctypes.c_double),
|
| 13 |
+
("v_handle", ctypes.c_void_p),
|
| 14 |
+
("v_str", ctypes.c_char_p)]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DECORDPackedCFunc = ctypes.CFUNCTYPE(
|
| 18 |
+
ctypes.c_int,
|
| 19 |
+
ctypes.POINTER(DECORDValue),
|
| 20 |
+
ctypes.POINTER(ctypes.c_int),
|
| 21 |
+
ctypes.c_int,
|
| 22 |
+
ctypes.c_void_p,
|
| 23 |
+
ctypes.c_void_p)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
DECORDCFuncFinalizer = ctypes.CFUNCTYPE(
|
| 27 |
+
None,
|
| 28 |
+
ctypes.c_void_p)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _return_handle(x):
|
| 32 |
+
"""return handle"""
|
| 33 |
+
handle = x.v_handle
|
| 34 |
+
if not isinstance(handle, ctypes.c_void_p):
|
| 35 |
+
handle = ctypes.c_void_p(handle)
|
| 36 |
+
return handle
|
| 37 |
+
|
| 38 |
+
def _return_bytes(x):
|
| 39 |
+
"""return handle"""
|
| 40 |
+
handle = x.v_handle
|
| 41 |
+
if not isinstance(handle, ctypes.c_void_p):
|
| 42 |
+
handle = ctypes.c_void_p(handle)
|
| 43 |
+
arr = ctypes.cast(handle, ctypes.POINTER(DECORDByteArray))[0]
|
| 44 |
+
size = arr.size
|
| 45 |
+
res = bytearray(size)
|
| 46 |
+
rptr = (ctypes.c_byte * size).from_buffer(res)
|
| 47 |
+
if not ctypes.memmove(rptr, arr.data, size):
|
| 48 |
+
raise RuntimeError('memmove failed')
|
| 49 |
+
return res
|
| 50 |
+
|
| 51 |
+
def _wrap_arg_func(return_f, type_code):
|
| 52 |
+
tcode = ctypes.c_int(type_code)
|
| 53 |
+
def _wrap_func(x):
|
| 54 |
+
check_call(_LIB.DECORDCbArgToReturn(ctypes.byref(x), tcode))
|
| 55 |
+
return return_f(x)
|
| 56 |
+
return _wrap_func
|
| 57 |
+
|
| 58 |
+
RETURN_SWITCH = {
|
| 59 |
+
TypeCode.INT: lambda x: x.v_int64,
|
| 60 |
+
TypeCode.FLOAT: lambda x: x.v_float64,
|
| 61 |
+
TypeCode.HANDLE: _return_handle,
|
| 62 |
+
TypeCode.NULL: lambda x: None,
|
| 63 |
+
TypeCode.STR: lambda x: py_str(x.v_str),
|
| 64 |
+
TypeCode.BYTES: _return_bytes
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
C_TO_PY_ARG_SWITCH = {
|
| 68 |
+
TypeCode.INT: lambda x: x.v_int64,
|
| 69 |
+
TypeCode.FLOAT: lambda x: x.v_float64,
|
| 70 |
+
TypeCode.HANDLE: _return_handle,
|
| 71 |
+
TypeCode.NULL: lambda x: None,
|
| 72 |
+
TypeCode.STR: lambda x: py_str(x.v_str),
|
| 73 |
+
TypeCode.BYTES: _return_bytes
|
| 74 |
+
}
|
parrot/lib/python3.10/site-packages/decord/_ffi/_cy2/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""cython2 namespace"""
|
parrot/lib/python3.10/site-packages/decord/_ffi/_cy2/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (197 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/_cy3/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""cython3 namespace"""
|
parrot/lib/python3.10/site-packages/decord/_ffi/_cy3/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (197 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/_ffi/base.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
# pylint: disable=invalid-name
|
| 3 |
+
"""ctypes library and helper functions """
|
| 4 |
+
from __future__ import absolute_import
|
| 5 |
+
|
| 6 |
+
import sys
|
| 7 |
+
import os
|
| 8 |
+
import ctypes
|
| 9 |
+
import numpy as np
|
| 10 |
+
from . import libinfo
|
| 11 |
+
|
| 12 |
+
#----------------------------
|
| 13 |
+
# library loading
|
| 14 |
+
#----------------------------
|
| 15 |
+
if sys.version_info[0] == 3:
|
| 16 |
+
string_types = (str,)
|
| 17 |
+
numeric_types = (float, int, np.float32, np.int32)
|
| 18 |
+
# this function is needed for python3
|
| 19 |
+
# to convert ctypes.char_p .value back to python str
|
| 20 |
+
py_str = lambda x: x.decode('utf-8')
|
| 21 |
+
else:
|
| 22 |
+
string_types = (basestring,)
|
| 23 |
+
numeric_types = (float, int, long, np.float32, np.int32)
|
| 24 |
+
py_str = lambda x: x
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class DECORDError(Exception):
|
| 28 |
+
"""Error thrown by DECORD function"""
|
| 29 |
+
pass # pylint: disable=unnecessary-pass
|
| 30 |
+
|
| 31 |
+
class DECORDLimitReachedError(Exception):
|
| 32 |
+
"""Limit Reached Error thrown by DECORD function"""
|
| 33 |
+
pass # pylint: disable=unnecessary-pass
|
| 34 |
+
|
| 35 |
+
def _load_lib():
|
| 36 |
+
"""Load libary by searching possible path."""
|
| 37 |
+
lib_path = libinfo.find_lib_path()
|
| 38 |
+
os.environ['PATH'] += os.pathsep + os.path.dirname(lib_path[0])
|
| 39 |
+
lib = ctypes.CDLL(lib_path[0], ctypes.RTLD_GLOBAL)
|
| 40 |
+
# DMatrix functions
|
| 41 |
+
lib.DECORDGetLastError.restype = ctypes.c_char_p
|
| 42 |
+
return lib, os.path.basename(lib_path[0])
|
| 43 |
+
|
| 44 |
+
# version number
|
| 45 |
+
__version__ = libinfo.__version__
|
| 46 |
+
# library instance of nnvm
|
| 47 |
+
_LIB, _LIB_NAME = _load_lib()
|
| 48 |
+
|
| 49 |
+
# The FFI mode of DECORD
|
| 50 |
+
_FFI_MODE = os.environ.get("DECORD_FFI", "auto")
|
| 51 |
+
|
| 52 |
+
# enable stack trace or not
|
| 53 |
+
_ENABLE_STACK_TRACE = int(os.environ.get("DECORD_ENABLE_STACK_TRACE", "0"))
|
| 54 |
+
|
| 55 |
+
#----------------------------
|
| 56 |
+
# helper function in ctypes.
|
| 57 |
+
#----------------------------
|
| 58 |
+
def check_call(ret):
|
| 59 |
+
"""Check the return value of C API call
|
| 60 |
+
|
| 61 |
+
This function will raise exception when error occurs.
|
| 62 |
+
Wrap every API call with this function
|
| 63 |
+
|
| 64 |
+
Parameters
|
| 65 |
+
----------
|
| 66 |
+
ret : int
|
| 67 |
+
return value from API calls
|
| 68 |
+
"""
|
| 69 |
+
if ret != 0:
|
| 70 |
+
err_str = py_str(_LIB.DECORDGetLastError())
|
| 71 |
+
if not _ENABLE_STACK_TRACE:
|
| 72 |
+
if 'Stack trace' in err_str:
|
| 73 |
+
err_str = err_str.split('Stack trace')[0].strip()
|
| 74 |
+
if 'recovered from nearest frames' in err_str:
|
| 75 |
+
if 'Stack trace' in err_str:
|
| 76 |
+
err_str = err_str.split('Stack trace')[0].strip()
|
| 77 |
+
raise DECORDLimitReachedError(err_str)
|
| 78 |
+
raise DECORDError(err_str)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def c_str(string):
|
| 82 |
+
"""Create ctypes char * from a python string
|
| 83 |
+
Parameters
|
| 84 |
+
----------
|
| 85 |
+
string : string type
|
| 86 |
+
python string
|
| 87 |
+
|
| 88 |
+
Returns
|
| 89 |
+
-------
|
| 90 |
+
str : c_char_p
|
| 91 |
+
A char pointer that can be passed to C API
|
| 92 |
+
"""
|
| 93 |
+
return ctypes.c_char_p(string.encode('utf-8'))
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def c_array(ctype, values):
|
| 97 |
+
"""Create ctypes array from a python array
|
| 98 |
+
|
| 99 |
+
Parameters
|
| 100 |
+
----------
|
| 101 |
+
ctype : ctypes data type
|
| 102 |
+
data type of the array we want to convert to
|
| 103 |
+
|
| 104 |
+
values : tuple or list
|
| 105 |
+
data content
|
| 106 |
+
|
| 107 |
+
Returns
|
| 108 |
+
-------
|
| 109 |
+
out : ctypes array
|
| 110 |
+
Created ctypes array
|
| 111 |
+
"""
|
| 112 |
+
return (ctype * len(values))(*values)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def decorate(func, fwrapped):
|
| 116 |
+
"""A wrapper call of decorator package, differs to call time
|
| 117 |
+
|
| 118 |
+
Parameters
|
| 119 |
+
----------
|
| 120 |
+
func : function
|
| 121 |
+
The original function
|
| 122 |
+
|
| 123 |
+
fwrapped : function
|
| 124 |
+
The wrapped function
|
| 125 |
+
"""
|
| 126 |
+
import decorator
|
| 127 |
+
return decorator.decorate(func, fwrapped)
|
parrot/lib/python3.10/site-packages/decord/_ffi/function.py
ADDED
|
@@ -0,0 +1,318 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# pylint: disable=invalid-name, unused-import
|
| 2 |
+
"""Function namespace."""
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
import ctypes
|
| 7 |
+
from .base import _LIB, check_call, py_str, c_str, string_types, _FFI_MODE
|
| 8 |
+
|
| 9 |
+
IMPORT_EXCEPT = RuntimeError if _FFI_MODE == "cython" else ImportError
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
# pylint: disable=wrong-import-position
|
| 13 |
+
if _FFI_MODE == "ctypes":
|
| 14 |
+
raise ImportError()
|
| 15 |
+
if sys.version_info >= (3, 0):
|
| 16 |
+
from ._cy3.core import _set_class_function, _set_class_module
|
| 17 |
+
from ._cy3.core import FunctionBase as _FunctionBase
|
| 18 |
+
from ._cy3.core import convert_to_decord_func
|
| 19 |
+
else:
|
| 20 |
+
from ._cy2.core import _set_class_function, _set_class_module
|
| 21 |
+
from ._cy2.core import FunctionBase as _FunctionBase
|
| 22 |
+
from ._cy2.core import convert_to_decord_func
|
| 23 |
+
except IMPORT_EXCEPT:
|
| 24 |
+
# pylint: disable=wrong-import-position
|
| 25 |
+
from ._ctypes.function import _set_class_function, _set_class_module
|
| 26 |
+
from ._ctypes.function import FunctionBase as _FunctionBase
|
| 27 |
+
from ._ctypes.function import convert_to_decord_func
|
| 28 |
+
|
| 29 |
+
FunctionHandle = ctypes.c_void_p
|
| 30 |
+
|
| 31 |
+
class Function(_FunctionBase):
|
| 32 |
+
"""The PackedFunc object.
|
| 33 |
+
|
| 34 |
+
Function plays an key role to bridge front and backend in DECORD.
|
| 35 |
+
Function provide a type-erased interface, you can call function with positional arguments.
|
| 36 |
+
|
| 37 |
+
The compiled module returns Function.
|
| 38 |
+
DECORD backend also registers and exposes its API as Functions.
|
| 39 |
+
For example, the developer function exposed in decord.ir_pass are actually
|
| 40 |
+
C++ functions that are registered as PackedFunc
|
| 41 |
+
|
| 42 |
+
The following are list of common usage scenario of decord.Function.
|
| 43 |
+
|
| 44 |
+
- Automatic exposure of C++ API into python
|
| 45 |
+
- To call PackedFunc from python side
|
| 46 |
+
- To call python callbacks to inspect results in generated code
|
| 47 |
+
- Bring python hook into C++ backend
|
| 48 |
+
|
| 49 |
+
See Also
|
| 50 |
+
--------
|
| 51 |
+
decord.register_func: How to register global function.
|
| 52 |
+
decord.get_global_func: How to get global function.
|
| 53 |
+
"""
|
| 54 |
+
pass # pylint: disable=unnecessary-pass
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class ModuleBase(object):
|
| 58 |
+
"""Base class for module"""
|
| 59 |
+
__slots__ = ["handle", "_entry", "entry_name"]
|
| 60 |
+
|
| 61 |
+
def __init__(self, handle):
|
| 62 |
+
self.handle = handle
|
| 63 |
+
self._entry = None
|
| 64 |
+
self.entry_name = "__decord_main__"
|
| 65 |
+
|
| 66 |
+
def __del__(self):
|
| 67 |
+
check_call(_LIB.DECORDModFree(self.handle))
|
| 68 |
+
|
| 69 |
+
@property
|
| 70 |
+
def entry_func(self):
|
| 71 |
+
"""Get the entry function
|
| 72 |
+
|
| 73 |
+
Returns
|
| 74 |
+
-------
|
| 75 |
+
f : Function
|
| 76 |
+
The entry function if exist
|
| 77 |
+
"""
|
| 78 |
+
if self._entry:
|
| 79 |
+
return self._entry
|
| 80 |
+
self._entry = self.get_function(self.entry_name)
|
| 81 |
+
return self._entry
|
| 82 |
+
|
| 83 |
+
def get_function(self, name, query_imports=False):
|
| 84 |
+
"""Get function from the module.
|
| 85 |
+
|
| 86 |
+
Parameters
|
| 87 |
+
----------
|
| 88 |
+
name : str
|
| 89 |
+
The name of the function
|
| 90 |
+
|
| 91 |
+
query_imports : bool
|
| 92 |
+
Whether also query modules imported by this module.
|
| 93 |
+
|
| 94 |
+
Returns
|
| 95 |
+
-------
|
| 96 |
+
f : Function
|
| 97 |
+
The result function.
|
| 98 |
+
"""
|
| 99 |
+
ret_handle = FunctionHandle()
|
| 100 |
+
check_call(_LIB.DECORDModGetFunction(
|
| 101 |
+
self.handle, c_str(name),
|
| 102 |
+
ctypes.c_int(query_imports),
|
| 103 |
+
ctypes.byref(ret_handle)))
|
| 104 |
+
if not ret_handle.value:
|
| 105 |
+
raise AttributeError(
|
| 106 |
+
"Module has no function '%s'" % name)
|
| 107 |
+
return Function(ret_handle, False)
|
| 108 |
+
|
| 109 |
+
def import_module(self, module):
|
| 110 |
+
"""Add module to the import list of current one.
|
| 111 |
+
|
| 112 |
+
Parameters
|
| 113 |
+
----------
|
| 114 |
+
module : Module
|
| 115 |
+
The other module.
|
| 116 |
+
"""
|
| 117 |
+
check_call(_LIB.DECORDModImport(self.handle, module.handle))
|
| 118 |
+
|
| 119 |
+
def __getitem__(self, name):
|
| 120 |
+
if not isinstance(name, string_types):
|
| 121 |
+
raise ValueError("Can only take string as function name")
|
| 122 |
+
return self.get_function(name)
|
| 123 |
+
|
| 124 |
+
def __call__(self, *args):
|
| 125 |
+
if self._entry:
|
| 126 |
+
return self._entry(*args)
|
| 127 |
+
f = self.entry_func
|
| 128 |
+
return f(*args)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def register_func(func_name, f=None, override=False):
|
| 132 |
+
"""Register global function
|
| 133 |
+
|
| 134 |
+
Parameters
|
| 135 |
+
----------
|
| 136 |
+
func_name : str or function
|
| 137 |
+
The function name
|
| 138 |
+
|
| 139 |
+
f : function, optional
|
| 140 |
+
The function to be registered.
|
| 141 |
+
|
| 142 |
+
override: boolean optional
|
| 143 |
+
Whether override existing entry.
|
| 144 |
+
|
| 145 |
+
Returns
|
| 146 |
+
-------
|
| 147 |
+
fregister : function
|
| 148 |
+
Register function if f is not specified.
|
| 149 |
+
|
| 150 |
+
Examples
|
| 151 |
+
--------
|
| 152 |
+
The following code registers my_packed_func as global function.
|
| 153 |
+
Note that we simply get it back from global function table to invoke
|
| 154 |
+
it from python side. However, we can also invoke the same function
|
| 155 |
+
from C++ backend, or in the compiled DECORD code.
|
| 156 |
+
|
| 157 |
+
.. code-block:: python
|
| 158 |
+
|
| 159 |
+
targs = (10, 10.0, "hello")
|
| 160 |
+
@decord.register_func
|
| 161 |
+
def my_packed_func(*args):
|
| 162 |
+
assert(tuple(args) == targs)
|
| 163 |
+
return 10
|
| 164 |
+
# Get it out from global function table
|
| 165 |
+
f = decord.get_global_func("my_packed_func")
|
| 166 |
+
assert isinstance(f, decord.nd.Function)
|
| 167 |
+
y = f(*targs)
|
| 168 |
+
assert y == 10
|
| 169 |
+
"""
|
| 170 |
+
if callable(func_name):
|
| 171 |
+
f = func_name
|
| 172 |
+
func_name = f.__name__
|
| 173 |
+
|
| 174 |
+
if not isinstance(func_name, str):
|
| 175 |
+
raise ValueError("expect string function name")
|
| 176 |
+
|
| 177 |
+
ioverride = ctypes.c_int(override)
|
| 178 |
+
def register(myf):
|
| 179 |
+
"""internal register function"""
|
| 180 |
+
if not isinstance(myf, Function):
|
| 181 |
+
myf = convert_to_decord_func(myf)
|
| 182 |
+
check_call(_LIB.DECORDFuncRegisterGlobal(
|
| 183 |
+
c_str(func_name), myf.handle, ioverride))
|
| 184 |
+
return myf
|
| 185 |
+
if f:
|
| 186 |
+
return register(f)
|
| 187 |
+
return register
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def get_global_func(name, allow_missing=False):
|
| 191 |
+
"""Get a global function by name
|
| 192 |
+
|
| 193 |
+
Parameters
|
| 194 |
+
----------
|
| 195 |
+
name : str
|
| 196 |
+
The name of the global function
|
| 197 |
+
|
| 198 |
+
allow_missing : bool
|
| 199 |
+
Whether allow missing function or raise an error.
|
| 200 |
+
|
| 201 |
+
Returns
|
| 202 |
+
-------
|
| 203 |
+
func : decord.Function
|
| 204 |
+
The function to be returned, None if function is missing.
|
| 205 |
+
"""
|
| 206 |
+
handle = FunctionHandle()
|
| 207 |
+
check_call(_LIB.DECORDFuncGetGlobal(c_str(name), ctypes.byref(handle)))
|
| 208 |
+
if handle.value:
|
| 209 |
+
return Function(handle, False)
|
| 210 |
+
else:
|
| 211 |
+
if allow_missing:
|
| 212 |
+
return None
|
| 213 |
+
else:
|
| 214 |
+
raise ValueError("Cannot find global function %s" % name)
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def list_global_func_names():
|
| 219 |
+
"""Get list of global functions registered.
|
| 220 |
+
|
| 221 |
+
Returns
|
| 222 |
+
-------
|
| 223 |
+
names : list
|
| 224 |
+
List of global functions names.
|
| 225 |
+
"""
|
| 226 |
+
plist = ctypes.POINTER(ctypes.c_char_p)()
|
| 227 |
+
size = ctypes.c_uint()
|
| 228 |
+
|
| 229 |
+
check_call(_LIB.DECORDFuncListGlobalNames(ctypes.byref(size),
|
| 230 |
+
ctypes.byref(plist)))
|
| 231 |
+
fnames = []
|
| 232 |
+
for i in range(size.value):
|
| 233 |
+
fnames.append(py_str(plist[i]))
|
| 234 |
+
return fnames
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def extract_ext_funcs(finit):
|
| 238 |
+
"""
|
| 239 |
+
Extract the extension PackedFuncs from a C module.
|
| 240 |
+
|
| 241 |
+
Parameters
|
| 242 |
+
----------
|
| 243 |
+
finit : ctypes function
|
| 244 |
+
a ctypes that takes signature of DECORDExtensionDeclarer
|
| 245 |
+
|
| 246 |
+
Returns
|
| 247 |
+
-------
|
| 248 |
+
fdict : dict of str to Function
|
| 249 |
+
The extracted functions
|
| 250 |
+
"""
|
| 251 |
+
fdict = {}
|
| 252 |
+
def _list(name, func):
|
| 253 |
+
fdict[name] = func
|
| 254 |
+
myf = convert_to_decord_func(_list)
|
| 255 |
+
ret = finit(myf.handle)
|
| 256 |
+
_ = myf
|
| 257 |
+
if ret != 0:
|
| 258 |
+
raise RuntimeError("cannot initialize with %s" % finit)
|
| 259 |
+
return fdict
|
| 260 |
+
|
| 261 |
+
def _get_api(f):
|
| 262 |
+
flocal = f
|
| 263 |
+
flocal.is_global = True
|
| 264 |
+
return flocal
|
| 265 |
+
|
| 266 |
+
def _init_api(namespace, target_module_name=None):
|
| 267 |
+
"""Initialize api for a given module name
|
| 268 |
+
|
| 269 |
+
namespace : str
|
| 270 |
+
The namespace of the source registry
|
| 271 |
+
|
| 272 |
+
target_module_name : str
|
| 273 |
+
The target module name if different from namespace
|
| 274 |
+
"""
|
| 275 |
+
target_module_name = (
|
| 276 |
+
target_module_name if target_module_name else namespace)
|
| 277 |
+
if namespace.startswith("decord."):
|
| 278 |
+
_init_api_prefix(target_module_name, namespace[7:])
|
| 279 |
+
else:
|
| 280 |
+
_init_api_prefix(target_module_name, namespace)
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
def _init_api_prefix(module_name, prefix):
|
| 284 |
+
module = sys.modules[module_name]
|
| 285 |
+
|
| 286 |
+
for name in list_global_func_names():
|
| 287 |
+
if name.startswith("_"):
|
| 288 |
+
continue
|
| 289 |
+
if not name.startswith(prefix):
|
| 290 |
+
continue
|
| 291 |
+
fname = name[len(prefix)+1:]
|
| 292 |
+
target_module = module
|
| 293 |
+
|
| 294 |
+
if fname.find(".") != -1:
|
| 295 |
+
print('Warning: invalid API name "%s".' % fname)
|
| 296 |
+
continue
|
| 297 |
+
f = get_global_func(name)
|
| 298 |
+
ff = _get_api(f)
|
| 299 |
+
ff.__name__ = fname
|
| 300 |
+
ff.__doc__ = ("DECORD PackedFunc %s. " % fname)
|
| 301 |
+
setattr(target_module, ff.__name__, ff)
|
| 302 |
+
|
| 303 |
+
def _init_internal_api():
|
| 304 |
+
for name in list_global_func_names():
|
| 305 |
+
if not name.startswith("_"):
|
| 306 |
+
continue
|
| 307 |
+
target_module = sys.modules["decord._api_internal"]
|
| 308 |
+
fname = name
|
| 309 |
+
if fname.find(".") != -1:
|
| 310 |
+
print('Warning: invalid API name "%s".' % fname)
|
| 311 |
+
continue
|
| 312 |
+
f = get_global_func(name)
|
| 313 |
+
ff = _get_api(f)
|
| 314 |
+
ff.__name__ = fname
|
| 315 |
+
ff.__doc__ = ("DECORD PackedFunc %s. " % fname)
|
| 316 |
+
setattr(target_module, ff.__name__, ff)
|
| 317 |
+
|
| 318 |
+
_set_class_function(Function)
|
parrot/lib/python3.10/site-packages/decord/_ffi/libinfo.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Library information."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
import sys
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def find_lib_path(name=None, search_path=None, optional=False):
|
| 8 |
+
"""Find dynamic library files.
|
| 9 |
+
|
| 10 |
+
Parameters
|
| 11 |
+
----------
|
| 12 |
+
name : list of str
|
| 13 |
+
List of names to be found.
|
| 14 |
+
|
| 15 |
+
Returns
|
| 16 |
+
-------
|
| 17 |
+
lib_path : list(string)
|
| 18 |
+
List of all found path to the libraries
|
| 19 |
+
"""
|
| 20 |
+
# See https://github.com/dmlc/tvm/issues/281 for some background.
|
| 21 |
+
|
| 22 |
+
# NB: This will either be the source directory (if DECORD is run
|
| 23 |
+
# inplace) or the install directory (if DECORD is installed).
|
| 24 |
+
# An installed DECORD's curr_path will look something like:
|
| 25 |
+
# $PREFIX/lib/python3.6/site-packages/decord/_ffi
|
| 26 |
+
ffi_dir = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
|
| 27 |
+
source_dir = os.path.join(ffi_dir, "..", "..", "..")
|
| 28 |
+
install_lib_dir = os.path.join(ffi_dir, "..", "..", "..", "..")
|
| 29 |
+
|
| 30 |
+
dll_path = []
|
| 31 |
+
|
| 32 |
+
if os.environ.get('DECORD_LIBRARY_PATH', None):
|
| 33 |
+
dll_path.append(os.environ['DECORD_LIBRARY_PATH'])
|
| 34 |
+
|
| 35 |
+
if sys.platform.startswith('linux') and os.environ.get('LD_LIBRARY_PATH', None):
|
| 36 |
+
dll_path.extend([p.strip() for p in os.environ['LD_LIBRARY_PATH'].split(":")])
|
| 37 |
+
elif sys.platform.startswith('darwin') and os.environ.get('DYLD_LIBRARY_PATH', None):
|
| 38 |
+
dll_path.extend([p.strip() for p in os.environ['DYLD_LIBRARY_PATH'].split(":")])
|
| 39 |
+
|
| 40 |
+
# Pip lib directory
|
| 41 |
+
dll_path.append(os.path.join(ffi_dir, ".."))
|
| 42 |
+
# Default cmake build directory
|
| 43 |
+
dll_path.append(os.path.join(source_dir, "build"))
|
| 44 |
+
dll_path.append(os.path.join(source_dir, "build", "Release"))
|
| 45 |
+
# Default make build directory
|
| 46 |
+
dll_path.append(os.path.join(source_dir, "lib"))
|
| 47 |
+
|
| 48 |
+
dll_path.append(install_lib_dir)
|
| 49 |
+
|
| 50 |
+
dll_path = [os.path.abspath(x) for x in dll_path]
|
| 51 |
+
if search_path is not None:
|
| 52 |
+
if search_path is list:
|
| 53 |
+
dll_path = dll_path + search_path
|
| 54 |
+
else:
|
| 55 |
+
dll_path.append(search_path)
|
| 56 |
+
if name is not None:
|
| 57 |
+
if isinstance(name, list):
|
| 58 |
+
lib_dll_path = []
|
| 59 |
+
for n in name:
|
| 60 |
+
lib_dll_path += [os.path.join(p, n) for p in dll_path]
|
| 61 |
+
else:
|
| 62 |
+
lib_dll_path = [os.path.join(p, name) for p in dll_path]
|
| 63 |
+
else:
|
| 64 |
+
if sys.platform.startswith('win32'):
|
| 65 |
+
lib_dll_path = [os.path.join(p, 'libdecord.dll') for p in dll_path] +\
|
| 66 |
+
[os.path.join(p, 'decord.dll') for p in dll_path]
|
| 67 |
+
elif sys.platform.startswith('darwin'):
|
| 68 |
+
lib_dll_path = [os.path.join(p, 'libdecord.dylib') for p in dll_path]
|
| 69 |
+
else:
|
| 70 |
+
lib_dll_path = [os.path.join(p, 'libdecord.so') for p in dll_path]
|
| 71 |
+
|
| 72 |
+
# try to find lib_dll_path
|
| 73 |
+
lib_found = [p for p in lib_dll_path if os.path.exists(p) and os.path.isfile(p)]
|
| 74 |
+
|
| 75 |
+
if not lib_found:
|
| 76 |
+
message = ('Cannot find the files.\n' +
|
| 77 |
+
'List of candidates:\n' +
|
| 78 |
+
str('\n'.join(lib_dll_path)))
|
| 79 |
+
if not optional:
|
| 80 |
+
raise RuntimeError(message)
|
| 81 |
+
return None
|
| 82 |
+
|
| 83 |
+
return lib_found
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
# current version
|
| 87 |
+
# We use the version of the incoming release for code
|
| 88 |
+
# that is under development.
|
| 89 |
+
# The following line is set by decord/python/update_version.py
|
| 90 |
+
__version__ = "0.6.0"
|
parrot/lib/python3.10/site-packages/decord/_ffi/ndarray.py
ADDED
|
@@ -0,0 +1,344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# pylint: disable=invalid-name, unused-import
|
| 2 |
+
"""Runtime NDArray api"""
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
import ctypes
|
| 7 |
+
import numpy as np
|
| 8 |
+
from .base import _LIB, check_call, c_array, string_types, _FFI_MODE, c_str
|
| 9 |
+
from .runtime_ctypes import DECORDType, DECORDContext, DECORDArray, DECORDArrayHandle
|
| 10 |
+
from .runtime_ctypes import TypeCode, decord_shape_index_t
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
IMPORT_EXCEPT = RuntimeError if _FFI_MODE == "cython" else ImportError
|
| 14 |
+
|
| 15 |
+
try:
|
| 16 |
+
# pylint: disable=wrong-import-position
|
| 17 |
+
if _FFI_MODE == "ctypes":
|
| 18 |
+
raise ImportError()
|
| 19 |
+
if sys.version_info >= (3, 0):
|
| 20 |
+
from ._cy3.core import _set_class_ndarray, _reg_extension, _make_array, _from_dlpack
|
| 21 |
+
from ._cy3.core import NDArrayBase as _NDArrayBase
|
| 22 |
+
else:
|
| 23 |
+
from ._cy2.core import _set_class_ndarray, _reg_extension, _make_array, _from_dlpack
|
| 24 |
+
from ._cy2.core import NDArrayBase as _NDArrayBase
|
| 25 |
+
except IMPORT_EXCEPT:
|
| 26 |
+
# pylint: disable=wrong-import-position
|
| 27 |
+
from ._ctypes.ndarray import _set_class_ndarray, _reg_extension, _make_array, _from_dlpack
|
| 28 |
+
from ._ctypes.ndarray import NDArrayBase as _NDArrayBase
|
| 29 |
+
|
| 30 |
+
def context(dev_type, dev_id=0):
|
| 31 |
+
"""Construct a DECORD context with given device type and id.
|
| 32 |
+
|
| 33 |
+
Parameters
|
| 34 |
+
----------
|
| 35 |
+
dev_type: int or str
|
| 36 |
+
The device type mask or name of the device.
|
| 37 |
+
|
| 38 |
+
dev_id : int, optional
|
| 39 |
+
The integer device id
|
| 40 |
+
|
| 41 |
+
Returns
|
| 42 |
+
-------
|
| 43 |
+
ctx: DECORDContext
|
| 44 |
+
The corresponding context.
|
| 45 |
+
|
| 46 |
+
Examples
|
| 47 |
+
--------
|
| 48 |
+
Context can be used to create reflection of context by
|
| 49 |
+
string representation of the device type.
|
| 50 |
+
|
| 51 |
+
.. code-block:: python
|
| 52 |
+
|
| 53 |
+
assert decord.context("cpu", 1) == decord.cpu(1)
|
| 54 |
+
assert decord.context("gpu", 0) == decord.gpu(0)
|
| 55 |
+
assert decord.context("cuda", 0) == decord.gpu(0)
|
| 56 |
+
"""
|
| 57 |
+
if isinstance(dev_type, string_types):
|
| 58 |
+
dev_type = dev_type.split()[0]
|
| 59 |
+
if dev_type not in DECORDContext.STR2MASK:
|
| 60 |
+
raise ValueError("Unknown device type %s" % dev_type)
|
| 61 |
+
dev_type = DECORDContext.STR2MASK[dev_type]
|
| 62 |
+
return DECORDContext(dev_type, dev_id)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def numpyasarray(np_data):
|
| 66 |
+
"""Return a DECORDArray representation of a numpy array.
|
| 67 |
+
"""
|
| 68 |
+
data = np_data
|
| 69 |
+
assert data.flags['C_CONTIGUOUS']
|
| 70 |
+
arr = DECORDArray()
|
| 71 |
+
shape = c_array(decord_shape_index_t, data.shape)
|
| 72 |
+
arr.data = data.ctypes.data_as(ctypes.c_void_p)
|
| 73 |
+
arr.shape = shape
|
| 74 |
+
arr.strides = None
|
| 75 |
+
arr.dtype = DECORDType(np.dtype(data.dtype).name)
|
| 76 |
+
arr.ndim = data.ndim
|
| 77 |
+
# CPU device
|
| 78 |
+
arr.ctx = context(1, 0)
|
| 79 |
+
return arr, shape
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def empty(shape, dtype="float32", ctx=context(1, 0)):
|
| 83 |
+
"""Create an empty array given shape and device
|
| 84 |
+
|
| 85 |
+
Parameters
|
| 86 |
+
----------
|
| 87 |
+
shape : tuple of int
|
| 88 |
+
The shape of the array
|
| 89 |
+
|
| 90 |
+
dtype : type or str
|
| 91 |
+
The data type of the array.
|
| 92 |
+
|
| 93 |
+
ctx : DECORDContext
|
| 94 |
+
The context of the array
|
| 95 |
+
|
| 96 |
+
Returns
|
| 97 |
+
-------
|
| 98 |
+
arr : decord.nd.NDArray
|
| 99 |
+
The array decord supported.
|
| 100 |
+
"""
|
| 101 |
+
shape = c_array(decord_shape_index_t, shape)
|
| 102 |
+
ndim = ctypes.c_int(len(shape))
|
| 103 |
+
handle = DECORDArrayHandle()
|
| 104 |
+
dtype = DECORDType(dtype)
|
| 105 |
+
check_call(_LIB.DECORDArrayAlloc(
|
| 106 |
+
shape, ndim,
|
| 107 |
+
ctypes.c_int(dtype.type_code),
|
| 108 |
+
ctypes.c_int(dtype.bits),
|
| 109 |
+
ctypes.c_int(dtype.lanes),
|
| 110 |
+
ctx.device_type,
|
| 111 |
+
ctx.device_id,
|
| 112 |
+
ctypes.byref(handle)))
|
| 113 |
+
return _make_array(handle, False)
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def from_dlpack(dltensor):
|
| 117 |
+
"""Produce an array from a DLPack tensor without memory copy.
|
| 118 |
+
Retrieves the underlying DLPack tensor's pointer to create an array from the
|
| 119 |
+
data. Removes the original DLPack tensor's destructor as now the array is
|
| 120 |
+
responsible for destruction.
|
| 121 |
+
|
| 122 |
+
Parameters
|
| 123 |
+
----------
|
| 124 |
+
dltensor : DLPack tensor
|
| 125 |
+
Input DLManagedTensor, can only be consumed once.
|
| 126 |
+
|
| 127 |
+
Returns
|
| 128 |
+
-------
|
| 129 |
+
arr: decord.nd.NDArray
|
| 130 |
+
The array view of the tensor data.
|
| 131 |
+
"""
|
| 132 |
+
return _from_dlpack(dltensor)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
class NDArrayBase(_NDArrayBase):
|
| 136 |
+
"""A simple Device/CPU Array object in runtime."""
|
| 137 |
+
@property
|
| 138 |
+
def shape(self):
|
| 139 |
+
"""Shape of this array"""
|
| 140 |
+
return tuple(self.handle.contents.shape[i] for i in range(self.handle.contents.ndim))
|
| 141 |
+
|
| 142 |
+
@property
|
| 143 |
+
def dtype(self):
|
| 144 |
+
"""Type of this array"""
|
| 145 |
+
return str(self.handle.contents.dtype)
|
| 146 |
+
|
| 147 |
+
@property
|
| 148 |
+
def ctx(self):
|
| 149 |
+
"""context of this array"""
|
| 150 |
+
return self.handle.contents.ctx
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def context(self):
|
| 154 |
+
"""context of this array"""
|
| 155 |
+
return self.ctx
|
| 156 |
+
|
| 157 |
+
def __hash__(self):
|
| 158 |
+
return ctypes.cast(self.handle, ctypes.c_void_p).value
|
| 159 |
+
|
| 160 |
+
def __eq__(self, other):
|
| 161 |
+
return self.same_as(other)
|
| 162 |
+
|
| 163 |
+
def __ne__(self, other):
|
| 164 |
+
return not self.__eq__(other)
|
| 165 |
+
|
| 166 |
+
def same_as(self, other):
|
| 167 |
+
"""Check object identity equality
|
| 168 |
+
|
| 169 |
+
Parameters
|
| 170 |
+
----------
|
| 171 |
+
other : object
|
| 172 |
+
The other object to compare to
|
| 173 |
+
|
| 174 |
+
Returns
|
| 175 |
+
-------
|
| 176 |
+
same : bool
|
| 177 |
+
Whether other is same as self.
|
| 178 |
+
"""
|
| 179 |
+
if not isinstance(other, NDArrayBase):
|
| 180 |
+
return False
|
| 181 |
+
return self.__hash__() == other.__hash__()
|
| 182 |
+
|
| 183 |
+
def __setitem__(self, in_slice, value):
|
| 184 |
+
"""Set ndarray value"""
|
| 185 |
+
if (not isinstance(in_slice, slice) or
|
| 186 |
+
in_slice.start is not None
|
| 187 |
+
or in_slice.stop is not None):
|
| 188 |
+
raise ValueError('Array only support set from numpy array')
|
| 189 |
+
if isinstance(value, NDArrayBase):
|
| 190 |
+
if value.handle is not self.handle:
|
| 191 |
+
value.copyto(self)
|
| 192 |
+
elif isinstance(value, (np.ndarray, np.generic)):
|
| 193 |
+
self.copyfrom(value)
|
| 194 |
+
else:
|
| 195 |
+
raise TypeError('type %s not supported' % str(type(value)))
|
| 196 |
+
|
| 197 |
+
def copyfrom(self, source_array):
|
| 198 |
+
"""Perform a synchronized copy from the array.
|
| 199 |
+
|
| 200 |
+
Parameters
|
| 201 |
+
----------
|
| 202 |
+
source_array : array_like
|
| 203 |
+
The data source we should like to copy from.
|
| 204 |
+
|
| 205 |
+
Returns
|
| 206 |
+
-------
|
| 207 |
+
arr : NDArray
|
| 208 |
+
Reference to self.
|
| 209 |
+
"""
|
| 210 |
+
if isinstance(source_array, NDArrayBase):
|
| 211 |
+
source_array.copyto(self)
|
| 212 |
+
return self
|
| 213 |
+
|
| 214 |
+
if not isinstance(source_array, np.ndarray):
|
| 215 |
+
try:
|
| 216 |
+
source_array = np.array(source_array, dtype=self.dtype)
|
| 217 |
+
except:
|
| 218 |
+
raise TypeError('array must be an array_like data,' +
|
| 219 |
+
'type %s is not supported' % str(type(source_array)))
|
| 220 |
+
t = DECORDType(self.dtype)
|
| 221 |
+
shape, dtype = self.shape, self.dtype
|
| 222 |
+
if t.lanes > 1:
|
| 223 |
+
shape = shape + (t.lanes,)
|
| 224 |
+
t.lanes = 1
|
| 225 |
+
dtype = str(t)
|
| 226 |
+
|
| 227 |
+
if source_array.shape != shape:
|
| 228 |
+
raise ValueError("array shape do not match the shape of NDArray {0} vs {1}".format(
|
| 229 |
+
source_array.shape, shape))
|
| 230 |
+
source_array = np.ascontiguousarray(source_array, dtype=dtype)
|
| 231 |
+
assert source_array.flags['C_CONTIGUOUS']
|
| 232 |
+
data = source_array.ctypes.data_as(ctypes.c_void_p)
|
| 233 |
+
nbytes = ctypes.c_size_t(source_array.size * source_array.dtype.itemsize)
|
| 234 |
+
check_call(_LIB.DECORDArrayCopyFromBytes(self.handle, data, nbytes))
|
| 235 |
+
return self
|
| 236 |
+
|
| 237 |
+
def __repr__(self):
|
| 238 |
+
res = "<decord.NDArray shape={0}, {1}>\n".format(self.shape, self.context)
|
| 239 |
+
res += self.asnumpy().__repr__()
|
| 240 |
+
return res
|
| 241 |
+
|
| 242 |
+
def __str__(self):
|
| 243 |
+
return str(self.asnumpy())
|
| 244 |
+
|
| 245 |
+
def asnumpy(self):
|
| 246 |
+
"""Convert this array to numpy array
|
| 247 |
+
|
| 248 |
+
Returns
|
| 249 |
+
-------
|
| 250 |
+
np_arr : numpy.ndarray
|
| 251 |
+
The corresponding numpy array.
|
| 252 |
+
"""
|
| 253 |
+
t = DECORDType(self.dtype)
|
| 254 |
+
shape, dtype = self.shape, self.dtype
|
| 255 |
+
if t.lanes > 1:
|
| 256 |
+
shape = shape + (t.lanes,)
|
| 257 |
+
t.lanes = 1
|
| 258 |
+
dtype = str(t)
|
| 259 |
+
np_arr = np.empty(shape, dtype=dtype)
|
| 260 |
+
assert np_arr.flags['C_CONTIGUOUS']
|
| 261 |
+
data = np_arr.ctypes.data_as(ctypes.c_void_p)
|
| 262 |
+
nbytes = ctypes.c_size_t(np_arr.size * np_arr.dtype.itemsize)
|
| 263 |
+
check_call(_LIB.DECORDArrayCopyToBytes(self.handle, data, nbytes))
|
| 264 |
+
return np_arr
|
| 265 |
+
|
| 266 |
+
def copyto(self, target):
|
| 267 |
+
"""Copy array to target
|
| 268 |
+
|
| 269 |
+
Parameters
|
| 270 |
+
----------
|
| 271 |
+
target : NDArray
|
| 272 |
+
The target array to be copied, must have same shape as this array.
|
| 273 |
+
"""
|
| 274 |
+
if isinstance(target, DECORDContext):
|
| 275 |
+
target = empty(self.shape, self.dtype, target)
|
| 276 |
+
if isinstance(target, NDArrayBase):
|
| 277 |
+
check_call(_LIB.DECORDArrayCopyFromTo(
|
| 278 |
+
self.handle, target.handle, None))
|
| 279 |
+
else:
|
| 280 |
+
raise ValueError("Unsupported target type %s" % str(type(target)))
|
| 281 |
+
return target
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
def free_extension_handle(handle, type_code):
|
| 285 |
+
"""Free c++ extension type handle
|
| 286 |
+
|
| 287 |
+
Parameters
|
| 288 |
+
----------
|
| 289 |
+
handle : ctypes.c_void_p
|
| 290 |
+
The handle to the extension type.
|
| 291 |
+
|
| 292 |
+
type_code : int
|
| 293 |
+
The tyoe code
|
| 294 |
+
"""
|
| 295 |
+
check_call(_LIB.DECORDExtTypeFree(handle, ctypes.c_int(type_code)))
|
| 296 |
+
|
| 297 |
+
def register_extension(cls, fcreate=None):
|
| 298 |
+
"""Register a extension class to DECORD.
|
| 299 |
+
|
| 300 |
+
After the class is registered, the class will be able
|
| 301 |
+
to directly pass as Function argument generated by DECORD.
|
| 302 |
+
|
| 303 |
+
Parameters
|
| 304 |
+
----------
|
| 305 |
+
cls : class
|
| 306 |
+
The class object to be registered as extension.
|
| 307 |
+
|
| 308 |
+
Note
|
| 309 |
+
----
|
| 310 |
+
The registered class is requires one property: _decord_handle and a class attribute _decord_tcode.
|
| 311 |
+
|
| 312 |
+
- ```_decord_handle``` returns integer represents the address of the handle.
|
| 313 |
+
- ```_decord_tcode``` gives integer represents type code of the class.
|
| 314 |
+
|
| 315 |
+
Returns
|
| 316 |
+
-------
|
| 317 |
+
cls : class
|
| 318 |
+
The class being registered.
|
| 319 |
+
|
| 320 |
+
fcreate : function, optional
|
| 321 |
+
The creation function to create a class object given handle value.
|
| 322 |
+
|
| 323 |
+
Example
|
| 324 |
+
-------
|
| 325 |
+
The following code registers user defined class
|
| 326 |
+
MyTensor to be DLTensor compatible.
|
| 327 |
+
|
| 328 |
+
.. code-block:: python
|
| 329 |
+
|
| 330 |
+
@decord.register_extension
|
| 331 |
+
class MyTensor(object):
|
| 332 |
+
_decord_tcode = decord.TypeCode.ARRAY_HANDLE
|
| 333 |
+
|
| 334 |
+
def __init__(self):
|
| 335 |
+
self.handle = _LIB.NewDLTensor()
|
| 336 |
+
|
| 337 |
+
@property
|
| 338 |
+
def _decord_handle(self):
|
| 339 |
+
return self.handle.value
|
| 340 |
+
"""
|
| 341 |
+
if fcreate and cls._decord_tcode < TypeCode.EXT_BEGIN:
|
| 342 |
+
raise ValueError("Cannot register create when extension tcode is same as buildin")
|
| 343 |
+
_reg_extension(cls, fcreate)
|
| 344 |
+
return cls
|
parrot/lib/python3.10/site-packages/decord/_ffi/runtime_ctypes.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Common runtime ctypes."""
|
| 2 |
+
# pylint: disable=invalid-name, super-init-not-called
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
|
| 5 |
+
import ctypes
|
| 6 |
+
import json
|
| 7 |
+
import numpy as np
|
| 8 |
+
from .base import _LIB, check_call
|
| 9 |
+
from .. import _api_internal
|
| 10 |
+
|
| 11 |
+
decord_shape_index_t = ctypes.c_int64
|
| 12 |
+
|
| 13 |
+
class TypeCode(object):
|
| 14 |
+
"""Type code used in API calls"""
|
| 15 |
+
INT = 0
|
| 16 |
+
UINT = 1
|
| 17 |
+
FLOAT = 2
|
| 18 |
+
HANDLE = 3
|
| 19 |
+
NULL = 4
|
| 20 |
+
DECORD_TYPE = 5
|
| 21 |
+
DECORD_CONTEXT = 6
|
| 22 |
+
ARRAY_HANDLE = 7
|
| 23 |
+
NODE_HANDLE = 8
|
| 24 |
+
MODULE_HANDLE = 9
|
| 25 |
+
FUNC_HANDLE = 10
|
| 26 |
+
STR = 11
|
| 27 |
+
BYTES = 12
|
| 28 |
+
NDARRAY_CONTAINER = 13
|
| 29 |
+
EXT_BEGIN = 15
|
| 30 |
+
|
| 31 |
+
class DECORDByteArray(ctypes.Structure):
|
| 32 |
+
"""Temp data structure for byte array."""
|
| 33 |
+
_fields_ = [("data", ctypes.POINTER(ctypes.c_byte)),
|
| 34 |
+
("size", ctypes.c_size_t)]
|
| 35 |
+
|
| 36 |
+
class DECORDType(ctypes.Structure):
|
| 37 |
+
"""DECORD datatype structure"""
|
| 38 |
+
_fields_ = [("type_code", ctypes.c_uint8),
|
| 39 |
+
("bits", ctypes.c_uint8),
|
| 40 |
+
("lanes", ctypes.c_uint16)]
|
| 41 |
+
CODE2STR = {
|
| 42 |
+
0 : 'int',
|
| 43 |
+
1 : 'uint',
|
| 44 |
+
2 : 'float',
|
| 45 |
+
4 : 'handle'
|
| 46 |
+
}
|
| 47 |
+
_cache = {}
|
| 48 |
+
|
| 49 |
+
def __new__(cls, type_str):
|
| 50 |
+
if type_str in cls._cache:
|
| 51 |
+
return cls._cache[type_str]
|
| 52 |
+
|
| 53 |
+
inst = super(DECORDType, cls).__new__(DECORDType)
|
| 54 |
+
|
| 55 |
+
if isinstance(type_str, np.dtype):
|
| 56 |
+
type_str = str(type_str)
|
| 57 |
+
arr = type_str.split("x")
|
| 58 |
+
head = arr[0]
|
| 59 |
+
inst.lanes = int(arr[1]) if len(arr) > 1 else 1
|
| 60 |
+
bits = 32
|
| 61 |
+
|
| 62 |
+
if head.startswith("int"):
|
| 63 |
+
inst.type_code = 0
|
| 64 |
+
head = head[3:]
|
| 65 |
+
elif head.startswith("uint"):
|
| 66 |
+
inst.type_code = 1
|
| 67 |
+
head = head[4:]
|
| 68 |
+
elif head.startswith("float"):
|
| 69 |
+
inst.type_code = 2
|
| 70 |
+
head = head[5:]
|
| 71 |
+
elif head.startswith("handle"):
|
| 72 |
+
inst.type_code = 4
|
| 73 |
+
bits = 64
|
| 74 |
+
head = ""
|
| 75 |
+
else:
|
| 76 |
+
raise ValueError("Do not know how to handle type %s" % type_str)
|
| 77 |
+
bits = int(head) if head else bits
|
| 78 |
+
inst.bits = bits
|
| 79 |
+
|
| 80 |
+
cls._cache[type_str] = inst
|
| 81 |
+
return inst
|
| 82 |
+
|
| 83 |
+
def __init__(self, type_str):
|
| 84 |
+
pass
|
| 85 |
+
|
| 86 |
+
def __repr__(self):
|
| 87 |
+
x = "%s%d" % (DECORDType.CODE2STR[self.type_code], self.bits)
|
| 88 |
+
if self.lanes != 1:
|
| 89 |
+
x += "x%d" % self.lanes
|
| 90 |
+
return x
|
| 91 |
+
|
| 92 |
+
def __eq__(self, other):
|
| 93 |
+
return (self.bits == other.bits and
|
| 94 |
+
self.type_code == other.type_code and
|
| 95 |
+
self.lanes == other.lanes)
|
| 96 |
+
|
| 97 |
+
def __ne__(self, other):
|
| 98 |
+
return not self.__eq__(other)
|
| 99 |
+
|
| 100 |
+
RPC_SESS_MASK = 128
|
| 101 |
+
|
| 102 |
+
class DECORDContext(ctypes.Structure):
|
| 103 |
+
"""DECORD context strucure."""
|
| 104 |
+
_fields_ = [("device_type", ctypes.c_int),
|
| 105 |
+
("device_id", ctypes.c_int)]
|
| 106 |
+
MASK2STR = {
|
| 107 |
+
1 : 'cpu',
|
| 108 |
+
2 : 'gpu',
|
| 109 |
+
4 : 'opencl',
|
| 110 |
+
5 : 'aocl',
|
| 111 |
+
6 : 'sdaccel',
|
| 112 |
+
7 : 'vulkan',
|
| 113 |
+
8 : 'metal',
|
| 114 |
+
9 : 'vpi',
|
| 115 |
+
10: 'rocm',
|
| 116 |
+
11: 'opengl',
|
| 117 |
+
12: 'ext_dev',
|
| 118 |
+
}
|
| 119 |
+
STR2MASK = {
|
| 120 |
+
'llvm': 1,
|
| 121 |
+
'stackvm': 1,
|
| 122 |
+
'cpu': 1,
|
| 123 |
+
'gpu': 2,
|
| 124 |
+
'cuda': 2,
|
| 125 |
+
'nvptx': 2,
|
| 126 |
+
'cl': 4,
|
| 127 |
+
'opencl': 4,
|
| 128 |
+
'aocl' : 5,
|
| 129 |
+
'aocl_sw_emu' : 5,
|
| 130 |
+
'sdaccel': 6,
|
| 131 |
+
'vulkan': 7,
|
| 132 |
+
'metal': 8,
|
| 133 |
+
'vpi': 9,
|
| 134 |
+
'rocm': 10,
|
| 135 |
+
'opengl': 11,
|
| 136 |
+
'ext_dev': 12,
|
| 137 |
+
}
|
| 138 |
+
_cache = {}
|
| 139 |
+
|
| 140 |
+
def __new__(cls, device_type, device_id):
|
| 141 |
+
if (device_type, device_id) in cls._cache:
|
| 142 |
+
return cls._cache[(device_type, device_id)]
|
| 143 |
+
|
| 144 |
+
inst = super(DECORDContext, cls).__new__(DECORDContext)
|
| 145 |
+
|
| 146 |
+
inst.device_type = device_type
|
| 147 |
+
inst.device_id = device_id
|
| 148 |
+
|
| 149 |
+
cls._cache[(device_type, device_id)] = inst
|
| 150 |
+
return inst
|
| 151 |
+
|
| 152 |
+
def __init__(self, device_type, device_id):
|
| 153 |
+
pass
|
| 154 |
+
|
| 155 |
+
@property
|
| 156 |
+
def exist(self):
|
| 157 |
+
"""Whether this device exist."""
|
| 158 |
+
return _api_internal._GetDeviceAttr(
|
| 159 |
+
self.device_type, self.device_id, 0) != 0
|
| 160 |
+
|
| 161 |
+
@property
|
| 162 |
+
def max_threads_per_block(self):
|
| 163 |
+
"""Maximum number of threads on each block."""
|
| 164 |
+
return _api_internal._GetDeviceAttr(
|
| 165 |
+
self.device_type, self.device_id, 1)
|
| 166 |
+
|
| 167 |
+
@property
|
| 168 |
+
def warp_size(self):
|
| 169 |
+
"""Number of threads that executes in concurrent."""
|
| 170 |
+
return _api_internal._GetDeviceAttr(
|
| 171 |
+
self.device_type, self.device_id, 2)
|
| 172 |
+
|
| 173 |
+
@property
|
| 174 |
+
def max_shared_memory_per_block(self):
|
| 175 |
+
"""Total amount of shared memory per block in bytes."""
|
| 176 |
+
return _api_internal._GetDeviceAttr(
|
| 177 |
+
self.device_type, self.device_id, 3)
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
def compute_version(self):
|
| 181 |
+
"""Get compute verison number in string.
|
| 182 |
+
|
| 183 |
+
Currently used to get compute capability of CUDA device.
|
| 184 |
+
|
| 185 |
+
Returns
|
| 186 |
+
-------
|
| 187 |
+
version : str
|
| 188 |
+
The version string in `major.minor` format.
|
| 189 |
+
"""
|
| 190 |
+
return _api_internal._GetDeviceAttr(
|
| 191 |
+
self.device_type, self.device_id, 4)
|
| 192 |
+
|
| 193 |
+
@property
|
| 194 |
+
def device_name(self):
|
| 195 |
+
"""Return the string name of device."""
|
| 196 |
+
return _api_internal._GetDeviceAttr(
|
| 197 |
+
self.device_type, self.device_id, 5)
|
| 198 |
+
|
| 199 |
+
@property
|
| 200 |
+
def max_clock_rate(self):
|
| 201 |
+
"""Return the max clock frequency of device."""
|
| 202 |
+
return _api_internal._GetDeviceAttr(
|
| 203 |
+
self.device_type, self.device_id, 6)
|
| 204 |
+
|
| 205 |
+
@property
|
| 206 |
+
def multi_processor_count(self):
|
| 207 |
+
"""Return the number of compute units of device."""
|
| 208 |
+
return _api_internal._GetDeviceAttr(
|
| 209 |
+
self.device_type, self.device_id, 7)
|
| 210 |
+
|
| 211 |
+
@property
|
| 212 |
+
def max_thread_dimensions(self):
|
| 213 |
+
"""Return the maximum size of each thread axis
|
| 214 |
+
|
| 215 |
+
Returns
|
| 216 |
+
-------
|
| 217 |
+
dims: List of int
|
| 218 |
+
The maximum length of threadIdx.x, threadIdx.y, threadIdx.z
|
| 219 |
+
"""
|
| 220 |
+
return json.loads(_api_internal._GetDeviceAttr(
|
| 221 |
+
self.device_type, self.device_id, 8))
|
| 222 |
+
|
| 223 |
+
def sync(self):
|
| 224 |
+
"""Synchronize until jobs finished at the context."""
|
| 225 |
+
check_call(_LIB.DECORDSynchronize(self.device_type, self.device_id, None))
|
| 226 |
+
|
| 227 |
+
def __eq__(self, other):
|
| 228 |
+
return (isinstance(other, DECORDContext) and
|
| 229 |
+
self.device_id == other.device_id and
|
| 230 |
+
self.device_type == other.device_type)
|
| 231 |
+
|
| 232 |
+
def __ne__(self, other):
|
| 233 |
+
return not self.__eq__(other)
|
| 234 |
+
|
| 235 |
+
def __repr__(self):
|
| 236 |
+
if self.device_type >= RPC_SESS_MASK:
|
| 237 |
+
tbl_id = self.device_type / RPC_SESS_MASK - 1
|
| 238 |
+
dev_type = self.device_type % RPC_SESS_MASK
|
| 239 |
+
return "remote[%d]:%s(%d)" % (
|
| 240 |
+
tbl_id, DECORDContext.MASK2STR[dev_type], self.device_id)
|
| 241 |
+
return "%s(%d)" % (
|
| 242 |
+
DECORDContext.MASK2STR[self.device_type], self.device_id)
|
| 243 |
+
|
| 244 |
+
def __hash__(self):
|
| 245 |
+
return hash((self.device_type, self.device_id))
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
class DECORDArray(ctypes.Structure):
|
| 249 |
+
"""DECORDValue in C API"""
|
| 250 |
+
_fields_ = [("data", ctypes.c_void_p),
|
| 251 |
+
("ctx", DECORDContext),
|
| 252 |
+
("ndim", ctypes.c_int),
|
| 253 |
+
("dtype", DECORDType),
|
| 254 |
+
("shape", ctypes.POINTER(decord_shape_index_t)),
|
| 255 |
+
("strides", ctypes.POINTER(decord_shape_index_t)),
|
| 256 |
+
("byte_offset", ctypes.c_uint64)]
|
| 257 |
+
|
| 258 |
+
DECORDArrayHandle = ctypes.POINTER(DECORDArray)
|
parrot/lib/python3.10/site-packages/decord/audio_reader.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Audio Reader."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
import ctypes
|
| 5 |
+
import numpy as np
|
| 6 |
+
import math
|
| 7 |
+
|
| 8 |
+
from ._ffi.base import c_array, c_str
|
| 9 |
+
from ._ffi.function import _init_api
|
| 10 |
+
from ._ffi.ndarray import DECORDContext
|
| 11 |
+
from .base import DECORDError
|
| 12 |
+
from . import ndarray as _nd
|
| 13 |
+
from .ndarray import cpu, gpu
|
| 14 |
+
from .bridge import bridge_out
|
| 15 |
+
|
| 16 |
+
AudioReaderHandle = ctypes.c_void_p
|
| 17 |
+
|
| 18 |
+
class AudioReader(object):
|
| 19 |
+
"""Individual audio reader with convenient indexing function.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
uri: str
|
| 24 |
+
Path of file.
|
| 25 |
+
ctx: decord.Context
|
| 26 |
+
The context to decode the file, can be decord.cpu() or decord.gpu().
|
| 27 |
+
sample_rate: int, default is -1
|
| 28 |
+
Desired output sample rate of the audio, unchanged if `-1` is specified.
|
| 29 |
+
mono: bool, default is True
|
| 30 |
+
Desired output channel layout of the audio.
|
| 31 |
+
Setting `True` will return the audio as mono layout.
|
| 32 |
+
Setting `False` will return the audio channel layout intact.
|
| 33 |
+
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
def __init__(self, uri, ctx=cpu(0), sample_rate=-1, mono=True):
|
| 37 |
+
self._handle = None
|
| 38 |
+
assert isinstance(ctx, DECORDContext)
|
| 39 |
+
is_mono = 1 if mono else 0
|
| 40 |
+
if hasattr(uri, 'read'):
|
| 41 |
+
ba = bytearray(uri.read())
|
| 42 |
+
uri = '{} bytes'.format(len(ba))
|
| 43 |
+
self._handle = _CAPI_AudioReaderGetAudioReader(
|
| 44 |
+
ba, ctx.device_type, ctx.device_id, sample_rate, 2, is_mono)
|
| 45 |
+
else:
|
| 46 |
+
self._handle = _CAPI_AudioReaderGetAudioReader(
|
| 47 |
+
uri, ctx.device_type, ctx.device_id, sample_rate, 0, is_mono)
|
| 48 |
+
if self._handle is None:
|
| 49 |
+
raise RuntimeError("Error reading " + uri + "...")
|
| 50 |
+
self._array = _CAPI_AudioReaderGetNDArray(self._handle)
|
| 51 |
+
self._array = self._array.asnumpy()
|
| 52 |
+
self._duration = _CAPI_AudioReaderGetDuration(self._handle)
|
| 53 |
+
self._num_samples_per_channel = _CAPI_AudioReaderGetNumSamplesPerChannel(self._handle)
|
| 54 |
+
self._num_channels = _CAPI_AudioReaderGetNumChannels(self._handle)
|
| 55 |
+
self.sample_rate = sample_rate
|
| 56 |
+
self._num_padding = None
|
| 57 |
+
|
| 58 |
+
def __len__(self):
|
| 59 |
+
"""Get length of the audio. The length refer to the shape's first dimension. In this case,
|
| 60 |
+
the length is the number of channels.
|
| 61 |
+
Returns
|
| 62 |
+
-------
|
| 63 |
+
int
|
| 64 |
+
The number of channels in the audio track.
|
| 65 |
+
"""
|
| 66 |
+
return self.shape[0]
|
| 67 |
+
|
| 68 |
+
def __del__(self):
|
| 69 |
+
if self._handle:
|
| 70 |
+
_CAPI_AudioReaderFree(self._handle)
|
| 71 |
+
|
| 72 |
+
def __getitem__(self, idx):
|
| 73 |
+
"""Get sample at `idx`. idx is the index of resampled audio, unit is sample.
|
| 74 |
+
|
| 75 |
+
Parameters
|
| 76 |
+
----------
|
| 77 |
+
idx : int or slice
|
| 78 |
+
The sample index, can be negative which means it will index backwards,
|
| 79 |
+
or slice of sample indices.
|
| 80 |
+
|
| 81 |
+
Returns
|
| 82 |
+
-------
|
| 83 |
+
ndarray
|
| 84 |
+
Samples of shape CxS,
|
| 85 |
+
where C is the number of channels, S is the number of samples of the index or slice.
|
| 86 |
+
"""
|
| 87 |
+
assert self._handle is not None
|
| 88 |
+
if isinstance(idx, slice):
|
| 89 |
+
return self.get_batch(range(*idx.indices(self._num_samples_per_channel)))
|
| 90 |
+
if idx < 0:
|
| 91 |
+
idx += self._num_samples_per_channel
|
| 92 |
+
if idx >= self._num_samples_per_channel or idx < 0:
|
| 93 |
+
raise IndexError("Index: {} out of bound: {}".format(idx, self._num_samples_per_channel))
|
| 94 |
+
return bridge_out(_nd.array(self._array[:, idx]))
|
| 95 |
+
|
| 96 |
+
def get_batch(self, indices):
|
| 97 |
+
"""Get entire batch of samples.
|
| 98 |
+
|
| 99 |
+
Parameters
|
| 100 |
+
----------
|
| 101 |
+
indices : list of integers
|
| 102 |
+
A list of frame indices. If negative indices detected, the indices will be indexed from backward
|
| 103 |
+
Returns
|
| 104 |
+
-------
|
| 105 |
+
ndarray
|
| 106 |
+
Samples of shape CxS,
|
| 107 |
+
where C is the number of channels, S is the number of samples of the slice.
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
assert self._handle is not None
|
| 111 |
+
indices = self._validate_indices(indices)
|
| 112 |
+
indices = list(indices)
|
| 113 |
+
return bridge_out(_nd.array(self._array[:, indices]))
|
| 114 |
+
|
| 115 |
+
@property
|
| 116 |
+
def shape(self):
|
| 117 |
+
"""Get shape of the entire audio samples.
|
| 118 |
+
|
| 119 |
+
Returns
|
| 120 |
+
-------
|
| 121 |
+
(int, int)
|
| 122 |
+
The number of channels, and the number of samples in each channel.
|
| 123 |
+
|
| 124 |
+
"""
|
| 125 |
+
return (self._num_channels, self._num_samples_per_channel)
|
| 126 |
+
|
| 127 |
+
def duration(self):
|
| 128 |
+
"""Get duration of the audio.
|
| 129 |
+
|
| 130 |
+
Returns
|
| 131 |
+
-------
|
| 132 |
+
double
|
| 133 |
+
Duration of the audio in secs.
|
| 134 |
+
|
| 135 |
+
"""
|
| 136 |
+
return self._duration
|
| 137 |
+
|
| 138 |
+
def __get_num_padding(self):
|
| 139 |
+
"""Get number of samples needed to pad the audio to start at time 0."""
|
| 140 |
+
if self._num_padding is None:
|
| 141 |
+
self._num_padding = _CAPI_AudioReaderGetNumPaddingSamples(self._handle)
|
| 142 |
+
return self._num_padding
|
| 143 |
+
|
| 144 |
+
def add_padding(self):
|
| 145 |
+
"""Pad the audio samples so that it starts at time 0.
|
| 146 |
+
|
| 147 |
+
Returns
|
| 148 |
+
-------
|
| 149 |
+
int
|
| 150 |
+
Number of samples padded
|
| 151 |
+
|
| 152 |
+
"""
|
| 153 |
+
self._array = np.pad(self._array, ((0, 0), (self.__get_num_padding(), 0)), 'constant', constant_values=0)
|
| 154 |
+
self._duration += self.__get_num_padding() * self.sample_rate
|
| 155 |
+
return self.__get_num_padding()
|
| 156 |
+
|
| 157 |
+
def get_info(self):
|
| 158 |
+
"""Log out the basic info about the audio stream."""
|
| 159 |
+
_CAPI_AudioReaderGetInfo(self._handle)
|
| 160 |
+
|
| 161 |
+
def _time_to_sample(self, timestamp):
|
| 162 |
+
"""Convert time in seconds to sample index"""
|
| 163 |
+
return math.ceil(timestamp * self.sample_rate)
|
| 164 |
+
|
| 165 |
+
def _times_to_samples(self, timestamps):
|
| 166 |
+
"""Convert times in seconds to sample indices"""
|
| 167 |
+
return [self._time_to_sample(timestamp) for timestamp in timestamps]
|
| 168 |
+
|
| 169 |
+
def _validate_indices(self, indices):
|
| 170 |
+
"""Validate int64 integers and convert negative integers to positive by backward search"""
|
| 171 |
+
assert self._handle is not None
|
| 172 |
+
indices = np.array(indices, dtype=np.int64)
|
| 173 |
+
# process negative indices
|
| 174 |
+
indices[indices < 0] += self._num_samples_per_channel
|
| 175 |
+
if not (indices >= 0).all():
|
| 176 |
+
raise IndexError(
|
| 177 |
+
'Invalid negative indices: {}'.format(indices[indices < 0] + self._num_samples_per_channel))
|
| 178 |
+
if not (indices < self._num_samples_per_channel).all():
|
| 179 |
+
raise IndexError('Out of bound indices: {}'.format(indices[indices >= self._num_samples_per_channel]))
|
| 180 |
+
return indices
|
| 181 |
+
|
| 182 |
+
_init_api("decord.audio_reader")
|
parrot/lib/python3.10/site-packages/decord/av_reader.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""AV Reader."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
import ctypes
|
| 5 |
+
import numpy as np
|
| 6 |
+
import math
|
| 7 |
+
from .video_reader import VideoReader
|
| 8 |
+
from .audio_reader import AudioReader
|
| 9 |
+
|
| 10 |
+
from .ndarray import cpu, gpu
|
| 11 |
+
from . import ndarray as _nd
|
| 12 |
+
from .bridge import bridge_out
|
| 13 |
+
|
| 14 |
+
class AVReader(object):
|
| 15 |
+
"""Individual audio video reader with convenient indexing function.
|
| 16 |
+
|
| 17 |
+
Parameters
|
| 18 |
+
----------
|
| 19 |
+
uri: str
|
| 20 |
+
Path of file.
|
| 21 |
+
ctx: decord.Context
|
| 22 |
+
The context to decode the file, can be decord.cpu() or decord.gpu().
|
| 23 |
+
sample_rate: int, default is -1
|
| 24 |
+
Desired output sample rate of the audio, unchanged if `-1` is specified.
|
| 25 |
+
mono: bool, default is True
|
| 26 |
+
Desired output channel layout of the audio. `True` is mono layout. `False` is unchanged.
|
| 27 |
+
width : int, default is -1
|
| 28 |
+
Desired output width of the video, unchanged if `-1` is specified.
|
| 29 |
+
height : int, default is -1
|
| 30 |
+
Desired output height of the video, unchanged if `-1` is specified.
|
| 31 |
+
num_threads : int, default is 0
|
| 32 |
+
Number of decoding thread, auto if `0` is specified.
|
| 33 |
+
fault_tol : int, default is -1
|
| 34 |
+
The threshold of corupted and recovered frames. This is to prevent silent fault
|
| 35 |
+
tolerance when for example 50% frames of a video cannot be decoded and duplicate
|
| 36 |
+
frames are returned. You may find the fault tolerant feature sweet in many cases,
|
| 37 |
+
but not for training models. Say `N = # recovered frames`
|
| 38 |
+
If `fault_tol` < 0, nothing will happen.
|
| 39 |
+
If 0 < `fault_tol` < 1.0, if N > `fault_tol * len(video)`, raise `DECORDLimitReachedError`.
|
| 40 |
+
If 1 < `fault_tol`, if N > `fault_tol`, raise `DECORDLimitReachedError`.
|
| 41 |
+
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, uri, ctx=cpu(0), sample_rate=44100, mono=True, width=-1, height=-1, num_threads=0, fault_tol=-1):
|
| 45 |
+
self.__audio_reader = AudioReader(uri, ctx, sample_rate, mono)
|
| 46 |
+
self.__audio_reader.add_padding()
|
| 47 |
+
if hasattr(uri, 'read'):
|
| 48 |
+
uri.seek(0)
|
| 49 |
+
self.__video_reader = VideoReader(uri, ctx, width, height, num_threads, fault_tol)
|
| 50 |
+
|
| 51 |
+
def __len__(self):
|
| 52 |
+
"""Get length of the video. Note that sometimes FFMPEG reports inaccurate number of frames,
|
| 53 |
+
we always follow what FFMPEG reports.
|
| 54 |
+
Returns
|
| 55 |
+
-------
|
| 56 |
+
int
|
| 57 |
+
The number of frames in the video file.
|
| 58 |
+
"""
|
| 59 |
+
return len(self.__video_reader)
|
| 60 |
+
|
| 61 |
+
def __getitem__(self, idx):
|
| 62 |
+
"""Get audio samples and video frame at `idx`.
|
| 63 |
+
|
| 64 |
+
Parameters
|
| 65 |
+
----------
|
| 66 |
+
idx : int or slice
|
| 67 |
+
The frame index, can be negative which means it will index backwards,
|
| 68 |
+
or slice of frame indices.
|
| 69 |
+
|
| 70 |
+
Returns
|
| 71 |
+
-------
|
| 72 |
+
(ndarray/list of ndarray, ndarray)
|
| 73 |
+
First element is samples of shape CxS or a list of length N containing samples of shape CxS,
|
| 74 |
+
where N is the number of frames, C is the number of channels,
|
| 75 |
+
S is the number of samples of the corresponding frame.
|
| 76 |
+
|
| 77 |
+
Second element is Frame of shape HxWx3 or batch of image frames with shape NxHxWx3,
|
| 78 |
+
where N is the length of the slice.
|
| 79 |
+
"""
|
| 80 |
+
assert self.__video_reader is not None and self.__audio_reader is not None
|
| 81 |
+
if isinstance(idx, slice):
|
| 82 |
+
return self.get_batch(range(*idx.indices(len(self.__video_reader))))
|
| 83 |
+
if idx < 0:
|
| 84 |
+
idx += len(self.__video_reader)
|
| 85 |
+
if idx >= len(self.__video_reader) or idx < 0:
|
| 86 |
+
raise IndexError("Index: {} out of bound: {}".format(idx, len(self.__video_reader)))
|
| 87 |
+
audio_start_idx, audio_end_idx = self.__video_reader.get_frame_timestamp(idx)
|
| 88 |
+
audio_start_idx = self.__audio_reader._time_to_sample(audio_start_idx)
|
| 89 |
+
audio_end_idx = self.__audio_reader._time_to_sample(audio_end_idx)
|
| 90 |
+
return (self.__audio_reader[audio_start_idx:audio_end_idx], self.__video_reader[idx])
|
| 91 |
+
|
| 92 |
+
def get_batch(self, indices):
|
| 93 |
+
"""Get entire batch of audio samples and video frames.
|
| 94 |
+
|
| 95 |
+
Parameters
|
| 96 |
+
----------
|
| 97 |
+
indices : list of integers
|
| 98 |
+
A list of frame indices. If negative indices detected, the indices will be indexed from backward
|
| 99 |
+
Returns
|
| 100 |
+
-------
|
| 101 |
+
(list of ndarray, ndarray)
|
| 102 |
+
First element is a list of length N containing samples of shape CxS,
|
| 103 |
+
where N is the number of frames, C is the number of channels,
|
| 104 |
+
S is the number of samples of the corresponding frame.
|
| 105 |
+
|
| 106 |
+
Second element is Frame of shape HxWx3 or batch of image frames with shape NxHxWx3,
|
| 107 |
+
where N is the length of the slice.
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
assert self.__video_reader is not None and self.__audio_reader is not None
|
| 111 |
+
indices = self._validate_indices(indices)
|
| 112 |
+
audio_arr = []
|
| 113 |
+
prev_video_idx = None
|
| 114 |
+
prev_audio_end_idx = None
|
| 115 |
+
for idx in list(indices):
|
| 116 |
+
frame_start_time, frame_end_time = self.__video_reader.get_frame_timestamp(idx)
|
| 117 |
+
# timestamp and sample conversion could have some error that could cause non-continuous audio
|
| 118 |
+
# we detect if retrieving continuous frame and make the audio continuous
|
| 119 |
+
if prev_video_idx and idx == prev_video_idx+1:
|
| 120 |
+
audio_start_idx = prev_audio_end_idx
|
| 121 |
+
else:
|
| 122 |
+
audio_start_idx = self.__audio_reader._time_to_sample(frame_start_time)
|
| 123 |
+
audio_end_idx = self.__audio_reader._time_to_sample(frame_end_time)
|
| 124 |
+
audio_arr.append(self.__audio_reader[audio_start_idx:audio_end_idx])
|
| 125 |
+
prev_video_idx = idx
|
| 126 |
+
prev_audio_end_idx = audio_end_idx
|
| 127 |
+
return (audio_arr, self.__video_reader.get_batch(indices))
|
| 128 |
+
|
| 129 |
+
def _get_slice(self, sl):
|
| 130 |
+
audio_arr = np.empty(shape=(self.__audio_reader.shape()[0], 0), dtype='float32')
|
| 131 |
+
for idx in list(sl):
|
| 132 |
+
audio_start_idx, audio_end_idx = self.__video_reader.get_frame_timestamp(idx)
|
| 133 |
+
audio_start_idx = self.__audio_reader._time_to_sample(audio_start_idx)
|
| 134 |
+
audio_end_idx = self.__audio_reader._time_to_sample(audio_end_idx)
|
| 135 |
+
audio_arr = np.concatenate((audio_arr, self.__audio_reader[audio_start_idx:audio_end_idx].asnumpy()), axis=1)
|
| 136 |
+
return (bridge_out(_nd.array(audio_arr)), self.__video_reader.get_batch(sl))
|
| 137 |
+
|
| 138 |
+
def _validate_indices(self, indices):
|
| 139 |
+
"""Validate int64 integers and convert negative integers to positive by backward search"""
|
| 140 |
+
assert self.__video_reader is not None and self.__audio_reader is not None
|
| 141 |
+
indices = np.array(indices, dtype=np.int64)
|
| 142 |
+
# process negative indices
|
| 143 |
+
indices[indices < 0] += len(self.__video_reader)
|
| 144 |
+
if not (indices >= 0).all():
|
| 145 |
+
raise IndexError(
|
| 146 |
+
'Invalid negative indices: {}'.format(indices[indices < 0] + len(self.__video_reader)))
|
| 147 |
+
if not (indices < len(self.__video_reader)).all():
|
| 148 |
+
raise IndexError('Out of bound indices: {}'.format(indices[indices >= len(self.__video_reader)]))
|
| 149 |
+
return indices
|
parrot/lib/python3.10/site-packages/decord/base.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Module for base types and utilities."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
from ._ffi.base import DECORDError # pylint: disable=unused-import
|
| 7 |
+
from ._ffi.function import _init_internal_api
|
| 8 |
+
|
| 9 |
+
# A special symbol for selecting all nodes or edges.
|
| 10 |
+
ALL = "__ALL__"
|
| 11 |
+
|
| 12 |
+
def is_all(arg):
|
| 13 |
+
"""Return true if the argument is a special symbol for all nodes or edges."""
|
| 14 |
+
return isinstance(arg, str) and arg == ALL
|
| 15 |
+
|
| 16 |
+
def decord_warning(msg):
|
| 17 |
+
"""Print out warning messages."""
|
| 18 |
+
warnings.warn(msg)
|
| 19 |
+
|
| 20 |
+
_init_internal_api()
|
parrot/lib/python3.10/site-packages/decord/bridge/__init__.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Deep Learning Framework bridges."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
import threading
|
| 4 |
+
|
| 5 |
+
from .mxnet import to_mxnet, from_mxnet
|
| 6 |
+
from .torchdl import to_torch, from_torch
|
| 7 |
+
from .tf import to_tensorflow, from_tensorflow
|
| 8 |
+
from .tvm import to_tvm, from_tvm
|
| 9 |
+
|
| 10 |
+
_BRIDGE_TYPES = {
|
| 11 |
+
'native': (lambda x: x, lambda x: x),
|
| 12 |
+
'mxnet': (to_mxnet, from_mxnet),
|
| 13 |
+
'torch': (to_torch, from_torch),
|
| 14 |
+
'tensorflow': (to_tensorflow, from_tensorflow),
|
| 15 |
+
'tvm': (to_tvm, from_tvm),
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
_CURRENT_BRIDGE = threading.local()
|
| 19 |
+
_CURRENT_BRIDGE.type = 'native'
|
| 20 |
+
_GLOBAL_BRIDGE_TYPE = 'native' # child threads will derive from the global type but not overwrite
|
| 21 |
+
|
| 22 |
+
def reset_bridge():
|
| 23 |
+
_CURRENT_BRIDGE.type = 'native'
|
| 24 |
+
if threading.current_thread().name == 'MainThread':
|
| 25 |
+
_GLOBAL_BRIDGE_TYPE = 'native'
|
| 26 |
+
|
| 27 |
+
def set_bridge(new_bridge):
|
| 28 |
+
assert isinstance(new_bridge, str), (
|
| 29 |
+
"New bridge type must be str. Choices: {}".format(_BRIDGE_TYPES.keys()))
|
| 30 |
+
assert new_bridge in _BRIDGE_TYPES.keys(), (
|
| 31 |
+
"valid bridges: {}".format(_BRIDGE_TYPES.keys()))
|
| 32 |
+
global _CURRENT_BRIDGE
|
| 33 |
+
_CURRENT_BRIDGE.type = new_bridge
|
| 34 |
+
if threading.current_thread().name == 'MainThread':
|
| 35 |
+
_GLOBAL_BRIDGE_TYPE = new_bridge
|
| 36 |
+
|
| 37 |
+
def bridge_out(native_arr):
|
| 38 |
+
if not hasattr(_CURRENT_BRIDGE, 'type'):
|
| 39 |
+
_CURRENT_BRIDGE.type = _GLOBAL_BRIDGE_TYPE
|
| 40 |
+
return _BRIDGE_TYPES[_CURRENT_BRIDGE.type][0](native_arr)
|
| 41 |
+
|
| 42 |
+
def bridge_in(arr):
|
| 43 |
+
if not hasattr(_CURRENT_BRIDGE, 'type'):
|
| 44 |
+
_CURRENT_BRIDGE.type = _GLOBAL_BRIDGE_TYPE
|
| 45 |
+
return _BRIDGE_TYPES[_CURRENT_BRIDGE.type][1](arr)
|
| 46 |
+
|
| 47 |
+
class _BridgeScope(object):
|
| 48 |
+
def __init__(self, bridge_type='native'):
|
| 49 |
+
self._type = bridge_type
|
| 50 |
+
self._prev = None
|
| 51 |
+
|
| 52 |
+
def __enter__(self):
|
| 53 |
+
global _CURRENT_BRIDGE
|
| 54 |
+
if not hasattr(_CURRENT_BRIDGE, 'type'):
|
| 55 |
+
_CURRENT_BRIDGE.type = _GLOBAL_BRIDGE_TYPE
|
| 56 |
+
try:
|
| 57 |
+
self._prev = _CURRENT_BRIDGE.type
|
| 58 |
+
except AttributeError:
|
| 59 |
+
self._prev = 'native'
|
| 60 |
+
set_bridge(self._type)
|
| 61 |
+
|
| 62 |
+
def __exit__(self, type, value, traceback):
|
| 63 |
+
if self._prev != self._type:
|
| 64 |
+
set_bridge(self._prev)
|
| 65 |
+
|
| 66 |
+
def use_mxnet():
|
| 67 |
+
return _BridgeScope('mxnet')
|
| 68 |
+
|
| 69 |
+
def use_torch():
|
| 70 |
+
return _BridgeScope('torch')
|
| 71 |
+
|
| 72 |
+
def use_tensorflow():
|
| 73 |
+
return _BridgeScope('tensorflow')
|
| 74 |
+
|
| 75 |
+
def use_tvm():
|
| 76 |
+
return _BridgeScope('tvm')
|
parrot/lib/python3.10/site-packages/decord/bridge/__pycache__/tf.cpython-310.pyc
ADDED
|
Binary file (1.3 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/bridge/__pycache__/torchdl.cpython-310.pyc
ADDED
|
Binary file (1.33 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/bridge/__pycache__/tvm.cpython-310.pyc
ADDED
|
Binary file (1.05 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/decord/bridge/mxnet.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""DECORD MXNet bridge"""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
from .._ffi._ctypes.ndarray import _from_dlpack
|
| 5 |
+
from .utils import try_import
|
| 6 |
+
|
| 7 |
+
def try_import_mxnet():
|
| 8 |
+
"""Try import mxnet at runtime.
|
| 9 |
+
|
| 10 |
+
Returns
|
| 11 |
+
-------
|
| 12 |
+
mxnet module if found. Raise ImportError otherwise
|
| 13 |
+
"""
|
| 14 |
+
msg = "mxnet is required, you can install by pip.\n \
|
| 15 |
+
CPU: `pip install mxnet-mkl`, GPU: `pip install mxnet-cu100mkl`"
|
| 16 |
+
return try_import('mxnet', msg)
|
| 17 |
+
|
| 18 |
+
def to_mxnet(decord_arr):
|
| 19 |
+
"""from decord to mxnet, no copy"""
|
| 20 |
+
mx = try_import_mxnet()
|
| 21 |
+
return mx.nd.from_dlpack(decord_arr.to_dlpack())
|
| 22 |
+
|
| 23 |
+
def from_mxnet(mxnet_arr):
|
| 24 |
+
"""from mxnet to decord, no copy"""
|
| 25 |
+
return _from_dlpack(mxnet_arr.to_dlpack_for_read())
|
parrot/lib/python3.10/site-packages/decord/bridge/tf.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""DECORD tensorflow bridge"""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
from .._ffi._ctypes.ndarray import _from_dlpack
|
| 5 |
+
|
| 6 |
+
def try_import_tfdl():
|
| 7 |
+
"""Try to import tensorflow dlpack at runtime.
|
| 8 |
+
|
| 9 |
+
Returns
|
| 10 |
+
-------
|
| 11 |
+
tensorflow dlpack module if found. Raise ImportError otherwise
|
| 12 |
+
"""
|
| 13 |
+
try:
|
| 14 |
+
return __import__('tensorflow.experimental.dlpack', fromlist=[''])
|
| 15 |
+
except ImportError as e:
|
| 16 |
+
raise ImportError("tensorflow >= 2.2.0 is required.")
|
| 17 |
+
|
| 18 |
+
def to_tensorflow(decord_arr):
|
| 19 |
+
"""from decord to tensorflow, no copy"""
|
| 20 |
+
tfdl = try_import_tfdl()
|
| 21 |
+
from tensorflow.python import pywrap_tfe
|
| 22 |
+
from tensorflow.python.eager import context
|
| 23 |
+
ctx = context.context()
|
| 24 |
+
ctx.ensure_initialized()
|
| 25 |
+
return pywrap_tfe.TFE_FromDlpackCapsule(decord_arr.to_dlpack(), ctx._handle)
|
| 26 |
+
|
| 27 |
+
def from_tensorflow(tf_tensor):
|
| 28 |
+
"""from tensorflow to decord, no copy"""
|
| 29 |
+
tfdl = try_import_tfdl()
|
| 30 |
+
return _from_dlpack(tfdl.to_dlpack(tf_tensor))
|
parrot/lib/python3.10/site-packages/decord/bridge/torchdl.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""DECORD Pytorch bridge"""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
from .._ffi._ctypes.ndarray import _from_dlpack
|
| 5 |
+
|
| 6 |
+
def try_import_torch():
|
| 7 |
+
"""Try import torch at runtime.
|
| 8 |
+
|
| 9 |
+
Returns
|
| 10 |
+
-------
|
| 11 |
+
torch module if found. Raise ImportError otherwise
|
| 12 |
+
"""
|
| 13 |
+
message = "torch is required, you can install by pip: `pip install torch`"
|
| 14 |
+
try:
|
| 15 |
+
return __import__('torch.utils.dlpack', fromlist=['object'])
|
| 16 |
+
except ImportError as e:
|
| 17 |
+
if not message:
|
| 18 |
+
raise e
|
| 19 |
+
raise ImportError(message)
|
| 20 |
+
|
| 21 |
+
def to_torch(decord_arr):
|
| 22 |
+
"""From decord to torch.
|
| 23 |
+
The tensor will share the memory with the object represented in the dlpack.
|
| 24 |
+
Note that each dlpack can only be consumed once."""
|
| 25 |
+
dlpack = try_import_torch()
|
| 26 |
+
return dlpack.from_dlpack(decord_arr.to_dlpack())
|
| 27 |
+
|
| 28 |
+
def from_torch(tensor):
|
| 29 |
+
"""From torch to decord.
|
| 30 |
+
The dlpack shares the tensors memory.
|
| 31 |
+
Note that each dlpack can only be consumed once."""
|
| 32 |
+
dlpack = try_import_torch()
|
| 33 |
+
return _from_dlpack(dlpack.to_dlpack(tensor))
|
parrot/lib/python3.10/site-packages/decord/bridge/tvm.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""DECORD TVM bridge"""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
from .._ffi._ctypes.ndarray import _from_dlpack
|
| 5 |
+
from .utils import try_import
|
| 6 |
+
|
| 7 |
+
def try_import_tvm():
|
| 8 |
+
"""Try import tvm at runtime.
|
| 9 |
+
|
| 10 |
+
Returns
|
| 11 |
+
-------
|
| 12 |
+
tvm module if found. Raise ImportError otherwise
|
| 13 |
+
"""
|
| 14 |
+
msg = "tvm is required, for installation guide, please checkout:\n \
|
| 15 |
+
https://tvm.apache.org/docs/install/index.html"
|
| 16 |
+
return try_import('tvm', msg)
|
| 17 |
+
|
| 18 |
+
def to_tvm(decord_arr):
|
| 19 |
+
"""from decord to tvm, no copy"""
|
| 20 |
+
tvm = try_import_tvm()
|
| 21 |
+
return tvm.nd.from_dlpack(decord_arr.to_dlpack())
|
| 22 |
+
|
| 23 |
+
def from_tvm(tvm_arr):
|
| 24 |
+
"""from tvm to decord, no copy"""
|
| 25 |
+
return _from_dlpack(tvm_arr.to_dlpack())
|
parrot/lib/python3.10/site-packages/decord/bridge/utils.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Bridge utils."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
def try_import(package, message=None):
|
| 5 |
+
"""Try import specified package, with custom message support.
|
| 6 |
+
Parameters
|
| 7 |
+
----------
|
| 8 |
+
package : str
|
| 9 |
+
The name of the targeting package.
|
| 10 |
+
message : str, default is None
|
| 11 |
+
If not None, this function will raise customized error message when import error is found.
|
| 12 |
+
Returns
|
| 13 |
+
-------
|
| 14 |
+
module if found, raise ImportError otherwise
|
| 15 |
+
"""
|
| 16 |
+
try:
|
| 17 |
+
return __import__(package)
|
| 18 |
+
except ImportError as e:
|
| 19 |
+
if not message:
|
| 20 |
+
raise e
|
| 21 |
+
raise ImportError(message)
|
parrot/lib/python3.10/site-packages/decord/data/base_action.py
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Customized dataloader for general video classification tasks.
|
| 2 |
+
Code adapted from https://github.com/dmlc/gluon-cv"""
|
| 3 |
+
import os
|
| 4 |
+
from ..bridge import use_mxnet
|
| 5 |
+
from ..bridge.mxnet import try_import_mxnet
|
| 6 |
+
from ..video_reader import VideoReader
|
| 7 |
+
from ..ndarray import cpu, gpu
|
| 8 |
+
|
| 9 |
+
try_import_mxnet()
|
| 10 |
+
from mxnet import numpy as np
|
| 11 |
+
from mxnet.util import use_np
|
| 12 |
+
|
| 13 |
+
__all__ = ['VideoClsCustom']
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@use_np
|
| 17 |
+
class VideoClsCustom(object):
|
| 18 |
+
"""Load your own video classification dataset.
|
| 19 |
+
|
| 20 |
+
Parameters
|
| 21 |
+
----------
|
| 22 |
+
root : str, required.
|
| 23 |
+
Path to the root folder storing the dataset.
|
| 24 |
+
setting : str, required.
|
| 25 |
+
A text file describing the dataset, each line per video sample.
|
| 26 |
+
There are three items in each line: (1) video path; (2) video length and (3) video label.
|
| 27 |
+
train : bool, default True.
|
| 28 |
+
Whether to load the training or validation set.
|
| 29 |
+
test_mode : bool, default False.
|
| 30 |
+
Whether to perform evaluation on the test set.
|
| 31 |
+
Usually there is three-crop or ten-crop evaluation strategy involved.
|
| 32 |
+
name_pattern : str, default None.
|
| 33 |
+
The naming pattern of the decoded video frames.
|
| 34 |
+
For example, img_00012.jpg.
|
| 35 |
+
video_ext : str, default 'mp4'.
|
| 36 |
+
Please specify the video format accordinly.
|
| 37 |
+
is_color : bool, default True.
|
| 38 |
+
Whether the loaded image is color or grayscale.
|
| 39 |
+
modality : str, default 'rgb'.
|
| 40 |
+
Input modalities, we support only rgb video frames for now.
|
| 41 |
+
Will add support for rgb difference image and optical flow image later.
|
| 42 |
+
num_segments : int, default 1.
|
| 43 |
+
Number of segments to evenly divide the video into clips.
|
| 44 |
+
A useful technique to obtain global video-level information.
|
| 45 |
+
Limin Wang, etal, Temporal Segment Networks: Towards Good Practices for Deep Action Recognition, ECCV 2016.
|
| 46 |
+
num_crop : int, default 1.
|
| 47 |
+
Number of crops for each image. default is 1.
|
| 48 |
+
Common choices are three crops and ten crops during evaluation.
|
| 49 |
+
new_length : int, default 1.
|
| 50 |
+
The length of input video clip. Default is a single image, but it can be multiple video frames.
|
| 51 |
+
For example, new_length=16 means we will extract a video clip of consecutive 16 frames.
|
| 52 |
+
new_step : int, default 1.
|
| 53 |
+
Temporal sampling rate. For example, new_step=1 means we will extract a video clip of consecutive frames.
|
| 54 |
+
new_step=2 means we will extract a video clip of every other frame.
|
| 55 |
+
new_width : int, default 340.
|
| 56 |
+
Scale the width of loaded image to 'new_width' for later multiscale cropping and resizing.
|
| 57 |
+
new_height : int, default 256.
|
| 58 |
+
Scale the height of loaded image to 'new_height' for later multiscale cropping and resizing.
|
| 59 |
+
target_width : int, default 224.
|
| 60 |
+
Scale the width of transformed image to the same 'target_width' for batch forwarding.
|
| 61 |
+
target_height : int, default 224.
|
| 62 |
+
Scale the height of transformed image to the same 'target_height' for batch forwarding.
|
| 63 |
+
temporal_jitter : bool, default False.
|
| 64 |
+
Whether to temporally jitter if new_step > 1.
|
| 65 |
+
transform : function, default None.
|
| 66 |
+
A function that takes data and label and transforms them.
|
| 67 |
+
slowfast : bool, default False.
|
| 68 |
+
If set to True, use data loader designed for SlowFast network.
|
| 69 |
+
Christoph Feichtenhofer, etal, SlowFast Networks for Video Recognition, ICCV 2019.
|
| 70 |
+
slow_temporal_stride : int, default 16.
|
| 71 |
+
The temporal stride for sparse sampling of video frames in slow branch of a SlowFast network.
|
| 72 |
+
fast_temporal_stride : int, default 2.
|
| 73 |
+
The temporal stride for sparse sampling of video frames in fast branch of a SlowFast network.
|
| 74 |
+
lazy_init : bool, default False.
|
| 75 |
+
If set to True, build a dataset instance without loading any dataset.
|
| 76 |
+
ctx : decord.Context, default is cpu(0)
|
| 77 |
+
Set the context used to load the video. Can be cpu() or gpu(xx)
|
| 78 |
+
"""
|
| 79 |
+
def __init__(self,
|
| 80 |
+
root,
|
| 81 |
+
setting,
|
| 82 |
+
train=True,
|
| 83 |
+
test_mode=False,
|
| 84 |
+
name_pattern='img_%05d.jpg',
|
| 85 |
+
video_ext='mp4',
|
| 86 |
+
is_color=True,
|
| 87 |
+
modality='rgb',
|
| 88 |
+
num_segments=1,
|
| 89 |
+
num_crop=1,
|
| 90 |
+
new_length=1,
|
| 91 |
+
new_step=1,
|
| 92 |
+
new_width=340,
|
| 93 |
+
new_height=256,
|
| 94 |
+
target_width=224,
|
| 95 |
+
target_height=224,
|
| 96 |
+
temporal_jitter=False,
|
| 97 |
+
slowfast=False,
|
| 98 |
+
slow_temporal_stride=16,
|
| 99 |
+
fast_temporal_stride=2,
|
| 100 |
+
lazy_init=False,
|
| 101 |
+
transform=None,
|
| 102 |
+
ctx=cpu(0)):
|
| 103 |
+
super(VideoClsCustom, self).__init__()
|
| 104 |
+
self.root = root
|
| 105 |
+
self.setting = setting
|
| 106 |
+
self.train = train
|
| 107 |
+
self.test_mode = test_mode
|
| 108 |
+
self.is_color = is_color
|
| 109 |
+
self.modality = modality
|
| 110 |
+
self.num_segments = num_segments
|
| 111 |
+
self.num_crop = num_crop
|
| 112 |
+
self.new_height = new_height
|
| 113 |
+
self.new_width = new_width
|
| 114 |
+
self.new_length = new_length
|
| 115 |
+
self.new_step = new_step
|
| 116 |
+
self.skip_length = self.new_length * self.new_step
|
| 117 |
+
self.target_height = target_height
|
| 118 |
+
self.target_width = target_width
|
| 119 |
+
self.transform = transform
|
| 120 |
+
self.temporal_jitter = temporal_jitter
|
| 121 |
+
self.name_pattern = name_pattern
|
| 122 |
+
self.video_ext = video_ext
|
| 123 |
+
self.slowfast = slowfast
|
| 124 |
+
self.slow_temporal_stride = slow_temporal_stride
|
| 125 |
+
self.fast_temporal_stride = fast_temporal_stride
|
| 126 |
+
self.lazy_init = lazy_init
|
| 127 |
+
self.ctx = ctx
|
| 128 |
+
|
| 129 |
+
if self.slowfast:
|
| 130 |
+
assert slow_temporal_stride % fast_temporal_stride == 0, 'slow_temporal_stride needs to be multiples of slow_temporal_stride, please set it accordinly.'
|
| 131 |
+
assert not temporal_jitter, 'Slowfast dataloader does not support temporal jitter. Please set temporal_jitter=False.'
|
| 132 |
+
assert new_step == 1, 'Slowfast dataloader only support consecutive frames reading, please set new_step=1.'
|
| 133 |
+
|
| 134 |
+
if not self.lazy_init:
|
| 135 |
+
self.clips = self._make_dataset(root, setting)
|
| 136 |
+
if len(self.clips) == 0:
|
| 137 |
+
raise(RuntimeError("Found 0 video clips in subfolders of: " + root + "\n"
|
| 138 |
+
"Check your data directory (opt.data-dir)."))
|
| 139 |
+
|
| 140 |
+
def __getitem__(self, index):
|
| 141 |
+
if isinstance(index, tuple):
|
| 142 |
+
index, ctx = index
|
| 143 |
+
else:
|
| 144 |
+
ctx = None
|
| 145 |
+
directory, duration, target = self.clips[index]
|
| 146 |
+
if '.' in directory.split('/')[-1]:
|
| 147 |
+
# data in the "setting" file already have extension, e.g., demo.mp4
|
| 148 |
+
video_name = directory
|
| 149 |
+
else:
|
| 150 |
+
# data in the "setting" file do not have extension, e.g., demo
|
| 151 |
+
# So we need to provide extension (i.e., .mp4) to complete the file name.
|
| 152 |
+
video_name = '{}.{}'.format(directory, self.video_ext)
|
| 153 |
+
decord_vr = VideoReader(video_name, width=self.new_width, height=self.new_height, ctx=self.ctx if ctx is None else ctx)
|
| 154 |
+
duration = len(decord_vr)
|
| 155 |
+
|
| 156 |
+
if self.train and not self.test_mode:
|
| 157 |
+
segment_indices, skip_offsets = self._sample_train_indices(duration)
|
| 158 |
+
elif not self.train and not self.test_mode:
|
| 159 |
+
segment_indices, skip_offsets = self._sample_val_indices(duration)
|
| 160 |
+
else:
|
| 161 |
+
segment_indices, skip_offsets = self._sample_test_indices(duration)
|
| 162 |
+
|
| 163 |
+
# N frames of shape H x W x C, where N = num_oversample * num_segments * new_length
|
| 164 |
+
if self.slowfast:
|
| 165 |
+
clip_input = self._video_TSN_decord_slowfast_loader(directory, decord_vr, duration, segment_indices, skip_offsets)
|
| 166 |
+
else:
|
| 167 |
+
clip_input = self._video_TSN_decord_batch_loader(directory, decord_vr, duration, segment_indices, skip_offsets)
|
| 168 |
+
|
| 169 |
+
if self.transform is not None:
|
| 170 |
+
clip_input = self.transform(clip_input)
|
| 171 |
+
|
| 172 |
+
if self.slowfast:
|
| 173 |
+
sparse_sampels = len(clip_input) // (self.num_segments * self.num_crop)
|
| 174 |
+
clip_input = np.stack(clip_input, axis=0)
|
| 175 |
+
clip_input = clip_input.reshape((-1,) + (sparse_sampels, 3, self.target_height, self.target_width))
|
| 176 |
+
clip_input = np.transpose(clip_input, (0, 2, 1, 3, 4))
|
| 177 |
+
else:
|
| 178 |
+
clip_input = np.stack(clip_input, axis=0)
|
| 179 |
+
clip_input = clip_input.reshape((-1,) + (self.new_length, 3, self.target_height, self.target_width))
|
| 180 |
+
clip_input = np.transpose(clip_input, (0, 2, 1, 3, 4))
|
| 181 |
+
|
| 182 |
+
if self.new_length == 1:
|
| 183 |
+
clip_input = np.squeeze(clip_input, axis=2) # this is for 2D input case
|
| 184 |
+
|
| 185 |
+
return clip_input.as_nd_ndarray(), target
|
| 186 |
+
|
| 187 |
+
def __len__(self):
|
| 188 |
+
return len(self.clips)
|
| 189 |
+
|
| 190 |
+
def _find_classes(self, directory):
|
| 191 |
+
classes = [d for d in os.listdir(directory) if os.path.isdir(os.path.join(directory, d))]
|
| 192 |
+
classes.sort()
|
| 193 |
+
class_to_idx = {classes[i]: i for i in range(len(classes))}
|
| 194 |
+
return classes, class_to_idx
|
| 195 |
+
|
| 196 |
+
def _make_dataset(self, directory, setting):
|
| 197 |
+
if not os.path.exists(setting):
|
| 198 |
+
raise(RuntimeError("Setting file %s doesn't exist. Check opt.train-list and opt.val-list. " % (setting)))
|
| 199 |
+
clips = []
|
| 200 |
+
with open(setting) as split_f:
|
| 201 |
+
data = split_f.readlines()
|
| 202 |
+
for line in data:
|
| 203 |
+
line_info = line.split()
|
| 204 |
+
# line format: video_path, video_duration, video_label
|
| 205 |
+
if len(line_info) < 3:
|
| 206 |
+
raise(RuntimeError('Video input format is not correct, missing one or more element. %s' % line))
|
| 207 |
+
clip_path = os.path.join(directory, line_info[0])
|
| 208 |
+
duration = int(line_info[1])
|
| 209 |
+
target = int(line_info[2])
|
| 210 |
+
item = (clip_path, duration, target)
|
| 211 |
+
clips.append(item)
|
| 212 |
+
return clips
|
| 213 |
+
|
| 214 |
+
def _sample_train_indices(self, num_frames):
|
| 215 |
+
average_duration = (num_frames - self.skip_length + 1) // self.num_segments
|
| 216 |
+
if average_duration > 0:
|
| 217 |
+
offsets = np.multiply(np.array(list(range(self.num_segments))),
|
| 218 |
+
average_duration)
|
| 219 |
+
offsets = offsets + np.random.randint(average_duration,
|
| 220 |
+
size=self.num_segments)
|
| 221 |
+
elif num_frames > max(self.num_segments, self.skip_length):
|
| 222 |
+
offsets = np.sort(np.random.randint(
|
| 223 |
+
num_frames - self.skip_length + 1,
|
| 224 |
+
size=self.num_segments))
|
| 225 |
+
else:
|
| 226 |
+
offsets = np.zeros((self.num_segments,))
|
| 227 |
+
|
| 228 |
+
if self.temporal_jitter:
|
| 229 |
+
skip_offsets = np.random.randint(
|
| 230 |
+
self.new_step, size=self.skip_length // self.new_step)
|
| 231 |
+
else:
|
| 232 |
+
skip_offsets = np.zeros(
|
| 233 |
+
self.skip_length // self.new_step, dtype=int)
|
| 234 |
+
return offsets + 1, skip_offsets
|
| 235 |
+
|
| 236 |
+
def _sample_val_indices(self, num_frames):
|
| 237 |
+
if num_frames > self.num_segments + self.skip_length - 1:
|
| 238 |
+
tick = (num_frames - self.skip_length + 1) / \
|
| 239 |
+
float(self.num_segments)
|
| 240 |
+
offsets = np.array([int(tick / 2.0 + tick * x)
|
| 241 |
+
for x in range(self.num_segments)])
|
| 242 |
+
else:
|
| 243 |
+
offsets = np.zeros((self.num_segments,))
|
| 244 |
+
|
| 245 |
+
if self.temporal_jitter:
|
| 246 |
+
skip_offsets = np.random.randint(
|
| 247 |
+
self.new_step, size=self.skip_length // self.new_step)
|
| 248 |
+
else:
|
| 249 |
+
skip_offsets = np.zeros(
|
| 250 |
+
self.skip_length // self.new_step, dtype=int)
|
| 251 |
+
return offsets + 1, skip_offsets
|
| 252 |
+
|
| 253 |
+
def _sample_test_indices(self, num_frames):
|
| 254 |
+
if num_frames > self.skip_length - 1:
|
| 255 |
+
tick = (num_frames - self.skip_length + 1) / \
|
| 256 |
+
float(self.num_segments)
|
| 257 |
+
offsets = np.array([int(tick / 2.0 + tick * x)
|
| 258 |
+
for x in range(self.num_segments)])
|
| 259 |
+
else:
|
| 260 |
+
offsets = np.zeros((self.num_segments,))
|
| 261 |
+
|
| 262 |
+
if self.temporal_jitter:
|
| 263 |
+
skip_offsets = np.random.randint(
|
| 264 |
+
self.new_step, size=self.skip_length // self.new_step)
|
| 265 |
+
else:
|
| 266 |
+
skip_offsets = np.zeros(
|
| 267 |
+
self.skip_length // self.new_step, dtype=int)
|
| 268 |
+
return offsets + 1, skip_offsets
|
| 269 |
+
|
| 270 |
+
def _video_TSN_decord_loader(self, directory, video_reader, duration, indices, skip_offsets):
|
| 271 |
+
sampled_list = []
|
| 272 |
+
for seg_ind in indices:
|
| 273 |
+
offset = int(seg_ind)
|
| 274 |
+
for i, _ in enumerate(range(0, self.skip_length, self.new_step)):
|
| 275 |
+
try:
|
| 276 |
+
if offset + skip_offsets[i] <= duration:
|
| 277 |
+
with use_mxnet():
|
| 278 |
+
vid_frame = video_reader[offset + skip_offsets[i] - 1].as_np_ndarray()
|
| 279 |
+
else:
|
| 280 |
+
with use_mxnet():
|
| 281 |
+
vid_frame = video_reader[offset - 1].as_np_ndarray()
|
| 282 |
+
except KeyboardInterrupt:
|
| 283 |
+
raise
|
| 284 |
+
except:
|
| 285 |
+
raise RuntimeError('Error occured in reading frames from video {} of duration {}.'.format(directory, duration))
|
| 286 |
+
sampled_list.append(vid_frame)
|
| 287 |
+
if offset + self.new_step < duration:
|
| 288 |
+
offset += self.new_step
|
| 289 |
+
return sampled_list
|
| 290 |
+
|
| 291 |
+
def _video_TSN_decord_batch_loader(self, directory, video_reader, duration, indices, skip_offsets):
|
| 292 |
+
sampled_list = []
|
| 293 |
+
frame_id_list = []
|
| 294 |
+
for seg_ind in indices:
|
| 295 |
+
offset = int(seg_ind)
|
| 296 |
+
for i, _ in enumerate(range(0, self.skip_length, self.new_step)):
|
| 297 |
+
if offset + skip_offsets[i] <= duration:
|
| 298 |
+
frame_id = offset + skip_offsets[i] - 1
|
| 299 |
+
else:
|
| 300 |
+
frame_id = offset - 1
|
| 301 |
+
frame_id_list.append(frame_id)
|
| 302 |
+
if offset + self.new_step < duration:
|
| 303 |
+
offset += self.new_step
|
| 304 |
+
try:
|
| 305 |
+
with use_mxnet():
|
| 306 |
+
video_data = video_reader.get_batch(frame_id_list).as_np_ndarray()
|
| 307 |
+
sampled_list = [video_data[vid, :, :, :] for vid, _ in enumerate(frame_id_list)]
|
| 308 |
+
except KeyboardInterrupt:
|
| 309 |
+
raise
|
| 310 |
+
except:
|
| 311 |
+
raise RuntimeError('Error occured in reading frames {} from video {} of duration {}.'.format(frame_id_list, directory, duration))
|
| 312 |
+
return sampled_list
|
| 313 |
+
|
| 314 |
+
def _video_TSN_decord_slowfast_loader(self, directory, video_reader, duration, indices, skip_offsets):
|
| 315 |
+
sampled_list = []
|
| 316 |
+
frame_id_list = []
|
| 317 |
+
for seg_ind in indices:
|
| 318 |
+
fast_id_list = []
|
| 319 |
+
slow_id_list = []
|
| 320 |
+
offset = int(seg_ind)
|
| 321 |
+
for i, _ in enumerate(range(0, self.skip_length, self.new_step)):
|
| 322 |
+
if offset + skip_offsets[i] <= duration:
|
| 323 |
+
frame_id = offset + skip_offsets[i] - 1
|
| 324 |
+
else:
|
| 325 |
+
frame_id = offset - 1
|
| 326 |
+
|
| 327 |
+
if (i + 1) % self.fast_temporal_stride == 0:
|
| 328 |
+
fast_id_list.append(frame_id)
|
| 329 |
+
|
| 330 |
+
if (i + 1) % self.slow_temporal_stride == 0:
|
| 331 |
+
slow_id_list.append(frame_id)
|
| 332 |
+
|
| 333 |
+
if offset + self.new_step < duration:
|
| 334 |
+
offset += self.new_step
|
| 335 |
+
|
| 336 |
+
fast_id_list.extend(slow_id_list)
|
| 337 |
+
frame_id_list.extend(fast_id_list)
|
| 338 |
+
try:
|
| 339 |
+
with use_mxnet():
|
| 340 |
+
video_data = video_reader.get_batch(frame_id_list).as_np_ndarray()
|
| 341 |
+
sampled_list = [video_data[vid, :, :, :] for vid, _ in enumerate(frame_id_list)]
|
| 342 |
+
except KeyboardInterrupt:
|
| 343 |
+
raise
|
| 344 |
+
except:
|
| 345 |
+
raise RuntimeError('Error occured in reading frames {} from video {} of duration {}.'.format(frame_id_list, directory, duration))
|
| 346 |
+
return sampled_list
|
parrot/lib/python3.10/site-packages/decord/logging.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""DECORD logging module.
|
| 2 |
+
|
| 3 |
+
You can adjust the logging level for ffmpeg.
|
| 4 |
+
"""
|
| 5 |
+
from ._ffi.function import _init_api
|
| 6 |
+
|
| 7 |
+
QUIET = -8
|
| 8 |
+
PANIC = 0
|
| 9 |
+
FATAL = 8
|
| 10 |
+
ERROR = 16
|
| 11 |
+
WARNING = 24
|
| 12 |
+
INFO = 32
|
| 13 |
+
VERBOSE = 40
|
| 14 |
+
DEBUG = 48
|
| 15 |
+
TRACE = 56
|
| 16 |
+
|
| 17 |
+
# Mimicking stdlib.
|
| 18 |
+
CRITICAL = FATAL
|
| 19 |
+
|
| 20 |
+
def set_level(lvl=ERROR):
|
| 21 |
+
_CAPI_SetLoggingLevel(lvl)
|
| 22 |
+
|
| 23 |
+
_init_api("decord.logging")
|
parrot/lib/python3.10/site-packages/decord/ndarray.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""DECORD Runtime NDArray API.
|
| 2 |
+
|
| 3 |
+
decord.ndarray provides a minimum runtime array structure to be
|
| 4 |
+
used with C++ library.
|
| 5 |
+
"""
|
| 6 |
+
# pylint: disable=invalid-name,unused-import
|
| 7 |
+
from __future__ import absolute_import as _abs
|
| 8 |
+
|
| 9 |
+
import ctypes
|
| 10 |
+
import functools
|
| 11 |
+
import operator
|
| 12 |
+
import numpy as _np
|
| 13 |
+
|
| 14 |
+
from ._ffi.ndarray import DECORDContext, DECORDType, NDArrayBase
|
| 15 |
+
from ._ffi.ndarray import context, empty, from_dlpack, numpyasarray
|
| 16 |
+
from ._ffi.ndarray import _set_class_ndarray
|
| 17 |
+
|
| 18 |
+
class NDArray(NDArrayBase):
|
| 19 |
+
"""Lightweight NDArray class for DECORD framework."""
|
| 20 |
+
def __len__(self):
|
| 21 |
+
return functools.reduce(operator.mul, self.shape, 1)
|
| 22 |
+
|
| 23 |
+
def cpu(dev_id=0):
|
| 24 |
+
"""Construct a CPU device
|
| 25 |
+
|
| 26 |
+
Parameters
|
| 27 |
+
----------
|
| 28 |
+
dev_id : int, optional
|
| 29 |
+
The integer device id
|
| 30 |
+
|
| 31 |
+
Returns
|
| 32 |
+
-------
|
| 33 |
+
ctx : DECORDContext
|
| 34 |
+
The created context
|
| 35 |
+
"""
|
| 36 |
+
return DECORDContext(1, dev_id)
|
| 37 |
+
|
| 38 |
+
def gpu(dev_id=0):
|
| 39 |
+
"""Construct a CPU device
|
| 40 |
+
|
| 41 |
+
Parameters
|
| 42 |
+
----------
|
| 43 |
+
dev_id : int, optional
|
| 44 |
+
The integer device id
|
| 45 |
+
|
| 46 |
+
Returns
|
| 47 |
+
-------
|
| 48 |
+
ctx : DECORDContext
|
| 49 |
+
The created context
|
| 50 |
+
"""
|
| 51 |
+
return DECORDContext(2, dev_id)
|
| 52 |
+
|
| 53 |
+
def array(arr, ctx=cpu(0)):
|
| 54 |
+
"""Create an array from source arr.
|
| 55 |
+
|
| 56 |
+
Parameters
|
| 57 |
+
----------
|
| 58 |
+
arr : numpy.ndarray
|
| 59 |
+
The array to be copied from
|
| 60 |
+
|
| 61 |
+
ctx : DECORDContext, optional
|
| 62 |
+
The device context to create the array
|
| 63 |
+
|
| 64 |
+
Returns
|
| 65 |
+
-------
|
| 66 |
+
ret : NDArray
|
| 67 |
+
The created array
|
| 68 |
+
"""
|
| 69 |
+
if not isinstance(arr, (_np.ndarray, NDArray)):
|
| 70 |
+
arr = _np.array(arr)
|
| 71 |
+
return empty(arr.shape, arr.dtype, ctx).copyfrom(arr)
|
| 72 |
+
|
| 73 |
+
def zerocopy_from_numpy(np_data):
|
| 74 |
+
"""Create an array that shares the given numpy data.
|
| 75 |
+
|
| 76 |
+
Parameters
|
| 77 |
+
----------
|
| 78 |
+
np_data : numpy.ndarray
|
| 79 |
+
The numpy data
|
| 80 |
+
|
| 81 |
+
Returns
|
| 82 |
+
-------
|
| 83 |
+
NDArray
|
| 84 |
+
The array
|
| 85 |
+
"""
|
| 86 |
+
arr, _ = numpyasarray(np_data)
|
| 87 |
+
handle = ctypes.pointer(arr)
|
| 88 |
+
return NDArray(handle, is_view=True)
|
| 89 |
+
|
| 90 |
+
_set_class_ndarray(NDArray)
|
parrot/lib/python3.10/site-packages/decord/video_loader.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Video Loader."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
import ctypes
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
from ._ffi.base import c_array, c_str
|
| 8 |
+
from ._ffi.function import _init_api
|
| 9 |
+
from .base import DECORDError
|
| 10 |
+
from . import ndarray as _nd
|
| 11 |
+
from .ndarray import DECORDContext
|
| 12 |
+
from .bridge import bridge_out
|
| 13 |
+
|
| 14 |
+
VideoLoaderHandle = ctypes.c_void_p
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class VideoLoader(object):
|
| 18 |
+
"""Multiple video loader with advanced shuffling and batching methods.
|
| 19 |
+
|
| 20 |
+
Parameters
|
| 21 |
+
----------
|
| 22 |
+
uris : list of str
|
| 23 |
+
List of video paths.
|
| 24 |
+
ctx : decord.Context or list of Context
|
| 25 |
+
The context to decode the video file, can be decord.cpu() or decord.gpu().
|
| 26 |
+
If ctx is a list, videos will be evenly split over many ctxs.
|
| 27 |
+
shape : tuple
|
| 28 |
+
Returned shape of the batch images, e.g., (2, 320, 240, 3) as (Batch, H, W, 3)
|
| 29 |
+
interval : int
|
| 30 |
+
Intra-batch frame interval.
|
| 31 |
+
skip : int
|
| 32 |
+
Inter-batch frame interval.
|
| 33 |
+
shuffle : int
|
| 34 |
+
Shuffling strategy. Can be
|
| 35 |
+
`0`: all sequential, no seeking, following initial filename order
|
| 36 |
+
`1`: random filename order, no random access for each video, very efficient
|
| 37 |
+
`2`: random order
|
| 38 |
+
`3`: random frame access in each video only.
|
| 39 |
+
|
| 40 |
+
"""
|
| 41 |
+
def __init__(self, uris, ctx, shape, interval, skip, shuffle, prefetch=0):
|
| 42 |
+
self._handle = None
|
| 43 |
+
assert isinstance(uris, (list, tuple))
|
| 44 |
+
assert (len(uris) > 0)
|
| 45 |
+
uri = ','.join([x.strip() for x in uris])
|
| 46 |
+
if isinstance(ctx, DECORDContext):
|
| 47 |
+
ctx = [ctx]
|
| 48 |
+
for _ctx in ctx:
|
| 49 |
+
assert isinstance(_ctx, DECORDContext)
|
| 50 |
+
device_types = _nd.array([x.device_type for x in ctx])
|
| 51 |
+
device_ids = _nd.array([x.device_id for x in ctx])
|
| 52 |
+
assert isinstance(shape, (list, tuple))
|
| 53 |
+
assert len(shape) == 4, "expected shape: [bs, height, width, 3], given {}".format(shape)
|
| 54 |
+
self._handle = _CAPI_VideoLoaderGetVideoLoader(
|
| 55 |
+
uri, device_types, device_ids, shape[0], shape[1], shape[2], shape[3], interval, skip, shuffle, prefetch)
|
| 56 |
+
assert self._handle is not None
|
| 57 |
+
self._len = _CAPI_VideoLoaderLength(self._handle)
|
| 58 |
+
self._curr = 0
|
| 59 |
+
|
| 60 |
+
def __del__(self):
|
| 61 |
+
if self._handle:
|
| 62 |
+
_CAPI_VideoLoaderFree(self._handle)
|
| 63 |
+
|
| 64 |
+
def __len__(self):
|
| 65 |
+
"""Get number of batches in each epoch.
|
| 66 |
+
|
| 67 |
+
Returns
|
| 68 |
+
-------
|
| 69 |
+
int
|
| 70 |
+
number of batches in each epoch.
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
return self._len
|
| 74 |
+
|
| 75 |
+
def reset(self):
|
| 76 |
+
"""Reset loader for next epoch.
|
| 77 |
+
|
| 78 |
+
"""
|
| 79 |
+
assert self._handle is not None
|
| 80 |
+
self._curr = 0
|
| 81 |
+
_CAPI_VideoLoaderReset(self._handle)
|
| 82 |
+
|
| 83 |
+
def __next__(self):
|
| 84 |
+
"""Get the next batch.
|
| 85 |
+
|
| 86 |
+
Returns
|
| 87 |
+
-------
|
| 88 |
+
ndarray, ndarray
|
| 89 |
+
Frame data and corresponding indices in videos.
|
| 90 |
+
Indices are [(n0, k0), (n1, k1)...] where n0 is the index of video, k0 is the index
|
| 91 |
+
of frame in video n0.
|
| 92 |
+
|
| 93 |
+
"""
|
| 94 |
+
assert self._handle is not None
|
| 95 |
+
# avoid calling CAPI HasNext
|
| 96 |
+
if self._curr >= self._len:
|
| 97 |
+
raise StopIteration
|
| 98 |
+
_CAPI_VideoLoaderNext(self._handle)
|
| 99 |
+
data = _CAPI_VideoLoaderNextData(self._handle)
|
| 100 |
+
indices = _CAPI_VideoLoaderNextIndices(self._handle)
|
| 101 |
+
self._curr += 1
|
| 102 |
+
return bridge_out(data), bridge_out(indices)
|
| 103 |
+
|
| 104 |
+
def next(self):
|
| 105 |
+
"""Alias of __next__ for python2.
|
| 106 |
+
|
| 107 |
+
"""
|
| 108 |
+
return self.__next__()
|
| 109 |
+
|
| 110 |
+
def __iter__(self):
|
| 111 |
+
assert self._handle is not None
|
| 112 |
+
# if (self._curr >= self._len):
|
| 113 |
+
# self.reset()
|
| 114 |
+
# else:
|
| 115 |
+
# err_msg = "Call __iter__ of VideoLoader during previous iteration is forbidden. \
|
| 116 |
+
# Consider using cached iterator by 'vl = iter(video_loader)' and reuse it."
|
| 117 |
+
# raise RuntimeError(err_msg)
|
| 118 |
+
return self
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
_init_api("decord.video_loader")
|
parrot/lib/python3.10/site-packages/decord/video_reader.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Video Reader."""
|
| 2 |
+
from __future__ import absolute_import
|
| 3 |
+
|
| 4 |
+
import ctypes
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
from ._ffi.base import c_array, c_str
|
| 8 |
+
from ._ffi.function import _init_api
|
| 9 |
+
from ._ffi.ndarray import DECORDContext
|
| 10 |
+
from .base import DECORDError
|
| 11 |
+
from . import ndarray as _nd
|
| 12 |
+
from .ndarray import cpu, gpu
|
| 13 |
+
from .bridge import bridge_out
|
| 14 |
+
|
| 15 |
+
VideoReaderHandle = ctypes.c_void_p
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class VideoReader(object):
|
| 19 |
+
"""Individual video reader with convenient indexing and seeking functions.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
uri : str
|
| 24 |
+
Path of video file.
|
| 25 |
+
ctx : decord.Context
|
| 26 |
+
The context to decode the video file, can be decord.cpu() or decord.gpu().
|
| 27 |
+
width : int, default is -1
|
| 28 |
+
Desired output width of the video, unchanged if `-1` is specified.
|
| 29 |
+
height : int, default is -1
|
| 30 |
+
Desired output height of the video, unchanged if `-1` is specified.
|
| 31 |
+
num_threads : int, default is 0
|
| 32 |
+
Number of decoding thread, auto if `0` is specified.
|
| 33 |
+
fault_tol : int, default is -1
|
| 34 |
+
The threshold of corupted and recovered frames. This is to prevent silent fault
|
| 35 |
+
tolerance when for example 50% frames of a video cannot be decoded and duplicate
|
| 36 |
+
frames are returned. You may find the fault tolerant feature sweet in many cases,
|
| 37 |
+
but not for training models. Say `N = # recovered frames`
|
| 38 |
+
If `fault_tol` < 0, nothing will happen.
|
| 39 |
+
If 0 < `fault_tol` < 1.0, if N > `fault_tol * len(video)`, raise `DECORDLimitReachedError`.
|
| 40 |
+
If 1 < `fault_tol`, if N > `fault_tol`, raise `DECORDLimitReachedError`.
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
"""
|
| 44 |
+
def __init__(self, uri, ctx=cpu(0), width=-1, height=-1, num_threads=0, fault_tol=-1):
|
| 45 |
+
self._handle = None
|
| 46 |
+
assert isinstance(ctx, DECORDContext)
|
| 47 |
+
fault_tol = str(fault_tol)
|
| 48 |
+
if hasattr(uri, 'read'):
|
| 49 |
+
ba = bytearray(uri.read())
|
| 50 |
+
uri = '{} bytes'.format(len(ba))
|
| 51 |
+
self._handle = _CAPI_VideoReaderGetVideoReader(
|
| 52 |
+
ba, ctx.device_type, ctx.device_id, width, height, num_threads, 2, fault_tol)
|
| 53 |
+
else:
|
| 54 |
+
self._handle = _CAPI_VideoReaderGetVideoReader(
|
| 55 |
+
uri, ctx.device_type, ctx.device_id, width, height, num_threads, 0, fault_tol)
|
| 56 |
+
if self._handle is None:
|
| 57 |
+
raise RuntimeError("Error reading " + uri + "...")
|
| 58 |
+
self._num_frame = _CAPI_VideoReaderGetFrameCount(self._handle)
|
| 59 |
+
assert self._num_frame > 0, "Invalid frame count: {}".format(self._num_frame)
|
| 60 |
+
self._key_indices = None
|
| 61 |
+
self._frame_pts = None
|
| 62 |
+
self._avg_fps = None
|
| 63 |
+
|
| 64 |
+
def __del__(self):
|
| 65 |
+
try:
|
| 66 |
+
if self._handle is not None:
|
| 67 |
+
_CAPI_VideoReaderFree(self._handle)
|
| 68 |
+
except TypeError:
|
| 69 |
+
pass
|
| 70 |
+
|
| 71 |
+
def __len__(self):
|
| 72 |
+
"""Get length of the video. Note that sometimes FFMPEG reports inaccurate number of frames,
|
| 73 |
+
we always follow what FFMPEG reports.
|
| 74 |
+
|
| 75 |
+
Returns
|
| 76 |
+
-------
|
| 77 |
+
int
|
| 78 |
+
The number of frames in the video file.
|
| 79 |
+
|
| 80 |
+
"""
|
| 81 |
+
return self._num_frame
|
| 82 |
+
|
| 83 |
+
def __getitem__(self, idx):
|
| 84 |
+
"""Get frame at `idx`.
|
| 85 |
+
|
| 86 |
+
Parameters
|
| 87 |
+
----------
|
| 88 |
+
idx : int or slice
|
| 89 |
+
The frame index, can be negative which means it will index backwards,
|
| 90 |
+
or slice of frame indices.
|
| 91 |
+
|
| 92 |
+
Returns
|
| 93 |
+
-------
|
| 94 |
+
ndarray
|
| 95 |
+
Frame of shape HxWx3 or batch of image frames with shape NxHxWx3,
|
| 96 |
+
where N is the length of the slice.
|
| 97 |
+
"""
|
| 98 |
+
if isinstance(idx, slice):
|
| 99 |
+
return self.get_batch(range(*idx.indices(len(self))))
|
| 100 |
+
if idx < 0:
|
| 101 |
+
idx += self._num_frame
|
| 102 |
+
if idx >= self._num_frame or idx < 0:
|
| 103 |
+
raise IndexError("Index: {} out of bound: {}".format(idx, self._num_frame))
|
| 104 |
+
self.seek_accurate(idx)
|
| 105 |
+
return self.next()
|
| 106 |
+
|
| 107 |
+
def next(self):
|
| 108 |
+
"""Grab the next frame.
|
| 109 |
+
|
| 110 |
+
Returns
|
| 111 |
+
-------
|
| 112 |
+
ndarray
|
| 113 |
+
Frame with shape HxWx3.
|
| 114 |
+
|
| 115 |
+
"""
|
| 116 |
+
assert self._handle is not None
|
| 117 |
+
arr = _CAPI_VideoReaderNextFrame(self._handle)
|
| 118 |
+
if not arr.shape:
|
| 119 |
+
raise StopIteration()
|
| 120 |
+
return bridge_out(arr)
|
| 121 |
+
|
| 122 |
+
def _validate_indices(self, indices):
|
| 123 |
+
"""Validate int64 integers and convert negative integers to positive by backward search"""
|
| 124 |
+
assert self._handle is not None
|
| 125 |
+
indices = np.array(indices, dtype=np.int64)
|
| 126 |
+
# process negative indices
|
| 127 |
+
indices[indices < 0] += self._num_frame
|
| 128 |
+
if not (indices >= 0).all():
|
| 129 |
+
raise IndexError(
|
| 130 |
+
'Invalid negative indices: {}'.format(indices[indices < 0] + self._num_frame))
|
| 131 |
+
if not (indices < self._num_frame).all():
|
| 132 |
+
raise IndexError('Out of bound indices: {}'.format(indices[indices >= self._num_frame]))
|
| 133 |
+
return indices
|
| 134 |
+
|
| 135 |
+
def get_frame_timestamp(self, idx):
|
| 136 |
+
"""Get frame playback timestamp in unit(second).
|
| 137 |
+
|
| 138 |
+
Parameters
|
| 139 |
+
----------
|
| 140 |
+
indices: list of integers or slice
|
| 141 |
+
A list of frame indices. If negative indices detected, the indices will be indexed from backward.
|
| 142 |
+
|
| 143 |
+
Returns
|
| 144 |
+
-------
|
| 145 |
+
numpy.ndarray
|
| 146 |
+
numpy.ndarray of shape (N, 2), where N is the size of indices. The format is `(start_second, end_second)`.
|
| 147 |
+
"""
|
| 148 |
+
assert self._handle is not None
|
| 149 |
+
if isinstance(idx, slice):
|
| 150 |
+
idx = self.get_batch(range(*idx.indices(len(self))))
|
| 151 |
+
idx = self._validate_indices(idx)
|
| 152 |
+
if self._frame_pts is None:
|
| 153 |
+
self._frame_pts = _CAPI_VideoReaderGetFramePTS(self._handle).asnumpy()
|
| 154 |
+
return self._frame_pts[idx, :]
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def get_batch(self, indices):
|
| 158 |
+
"""Get entire batch of images. `get_batch` is optimized to handle seeking internally.
|
| 159 |
+
Duplicate frame indices will be optmized by copying existing frames rather than decode
|
| 160 |
+
from video again.
|
| 161 |
+
|
| 162 |
+
Parameters
|
| 163 |
+
----------
|
| 164 |
+
indices : list of integers
|
| 165 |
+
A list of frame indices. If negative indices detected, the indices will be indexed from backward
|
| 166 |
+
|
| 167 |
+
Returns
|
| 168 |
+
-------
|
| 169 |
+
ndarray
|
| 170 |
+
An entire batch of image frames with shape NxHxWx3, where N is the length of `indices`.
|
| 171 |
+
|
| 172 |
+
"""
|
| 173 |
+
assert self._handle is not None
|
| 174 |
+
indices = _nd.array(self._validate_indices(indices))
|
| 175 |
+
arr = _CAPI_VideoReaderGetBatch(self._handle, indices)
|
| 176 |
+
return bridge_out(arr)
|
| 177 |
+
|
| 178 |
+
def get_key_indices(self):
|
| 179 |
+
"""Get list of key frame indices.
|
| 180 |
+
|
| 181 |
+
Returns
|
| 182 |
+
-------
|
| 183 |
+
list
|
| 184 |
+
List of key frame indices.
|
| 185 |
+
|
| 186 |
+
"""
|
| 187 |
+
if self._key_indices is None:
|
| 188 |
+
self._key_indices = _CAPI_VideoReaderGetKeyIndices(self._handle).asnumpy().tolist()
|
| 189 |
+
return self._key_indices
|
| 190 |
+
|
| 191 |
+
def get_avg_fps(self):
|
| 192 |
+
"""Get average FPS(frame per second).
|
| 193 |
+
|
| 194 |
+
Returns
|
| 195 |
+
-------
|
| 196 |
+
float
|
| 197 |
+
Average FPS.
|
| 198 |
+
|
| 199 |
+
"""
|
| 200 |
+
if self._avg_fps is None:
|
| 201 |
+
self._avg_fps = _CAPI_VideoReaderGetAverageFPS(self._handle)
|
| 202 |
+
return self._avg_fps
|
| 203 |
+
|
| 204 |
+
def seek(self, pos):
|
| 205 |
+
"""Fast seek to frame position, this does not guarantee accurate position.
|
| 206 |
+
To obtain accurate seeking, see `accurate_seek`.
|
| 207 |
+
|
| 208 |
+
Parameters
|
| 209 |
+
----------
|
| 210 |
+
pos : integer
|
| 211 |
+
Non negative seeking position.
|
| 212 |
+
|
| 213 |
+
"""
|
| 214 |
+
assert self._handle is not None
|
| 215 |
+
assert pos >= 0 and pos < self._num_frame
|
| 216 |
+
success = _CAPI_VideoReaderSeek(self._handle, pos)
|
| 217 |
+
if not success:
|
| 218 |
+
raise RuntimeError("Failed to seek to frame {}".format(pos))
|
| 219 |
+
|
| 220 |
+
def seek_accurate(self, pos):
|
| 221 |
+
"""Accurately seek to frame position, this is slower than `seek`
|
| 222 |
+
but guarantees accurate position.
|
| 223 |
+
|
| 224 |
+
Parameters
|
| 225 |
+
----------
|
| 226 |
+
pos : integer
|
| 227 |
+
Non negative seeking position.
|
| 228 |
+
|
| 229 |
+
"""
|
| 230 |
+
assert self._handle is not None
|
| 231 |
+
assert pos >= 0 and pos < self._num_frame
|
| 232 |
+
success = _CAPI_VideoReaderSeekAccurate(self._handle, pos)
|
| 233 |
+
if not success:
|
| 234 |
+
raise RuntimeError("Failed to seek_accurate to frame {}".format(pos))
|
| 235 |
+
|
| 236 |
+
def skip_frames(self, num=1):
|
| 237 |
+
"""Skip reading multiple frames. Skipped frames will still be decoded
|
| 238 |
+
(required by following frames) but it can save image resize/copy operations.
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
Parameters
|
| 242 |
+
----------
|
| 243 |
+
num : int, default is 1
|
| 244 |
+
The number of frames to be skipped.
|
| 245 |
+
|
| 246 |
+
"""
|
| 247 |
+
assert self._handle is not None
|
| 248 |
+
assert num > 0
|
| 249 |
+
_CAPI_VideoReaderSkipFrames(self._handle, num)
|
| 250 |
+
|
| 251 |
+
_init_api("decord.video_reader")
|
parrot/lib/python3.10/site-packages/markdown_it/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (288 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/markdown_it/__pycache__/_compat.cpython-310.pyc
ADDED
|
Binary file (447 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/markdown_it/__pycache__/_punycode.cpython-310.pyc
ADDED
|
Binary file (1.67 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/markdown_it/__pycache__/main.cpython-310.pyc
ADDED
|
Binary file (11.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/markdown_it/__pycache__/parser_block.cpython-310.pyc
ADDED
|
Binary file (2.58 kB). View file
|
|
|