/* Generated by Cython 3.1.5 */ /* BEGIN: Cython Metadata { "distutils": { "depends": [], "extra_compile_args": [ "-std=c++14", "-fpermissive", "-Wno-deprecated-declarations", "-fno-var-tracking-assignments", "-O3" ], "include_dirs": [ "/opt/python/cp312-cp312/include", "/host//home/runner/_work/cuda-python/cuda-python/cuda_toolkit/include" ], "language": "c++", "library_dirs": [ "/tmp/build-env-3t3_8d9r/lib/python3.12/site-packages", "/tmp/build-env-3t3_8d9r/lib", "/host//home/runner/_work/cuda-python/cuda-python/cuda_toolkit/lib64", "/host//home/runner/_work/cuda-python/cuda-python/cuda_toolkit/lib" ], "name": "cuda.bindings._internal.utils", "sources": [ "cuda/bindings/_internal/utils.pyx" ] }, "module_name": "cuda.bindings._internal.utils" } END: Cython Metadata */ #ifndef PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN #endif /* PY_SSIZE_T_CLEAN */ /* InitLimitedAPI */ #if defined(Py_LIMITED_API) #if !defined(CYTHON_LIMITED_API) #define CYTHON_LIMITED_API 1 #endif #elif defined(CYTHON_LIMITED_API) #ifdef _MSC_VER #pragma message ("Limited API usage is enabled with 'CYTHON_LIMITED_API' but 'Py_LIMITED_API' does not define a Python target version. Consider setting 'Py_LIMITED_API' instead.") #else #warning Limited API usage is enabled with 'CYTHON_LIMITED_API' but 'Py_LIMITED_API' does not define a Python target version. Consider setting 'Py_LIMITED_API' instead. #endif #endif #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. #elif PY_VERSION_HEX < 0x03080000 #error Cython requires Python 3.8+. #else #define __PYX_ABI_VERSION "3_1_5" #define CYTHON_HEX_VERSION 0x030105F0 #define CYTHON_FUTURE_DIVISION 1 /* CModulePreamble */ #include #ifndef offsetof #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif #if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif #define __PYX_COMMA , #ifndef HAVE_LONG_LONG #define HAVE_LONG_LONG #endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif #ifndef Py_HUGE_VAL #define Py_HUGE_VAL HUGE_VAL #endif #define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX #if defined(GRAALVM_PYTHON) /* For very preliminary testing purposes. Most variables are set the same as PyPy. The existence of this section does not imply that anything works or is even tested */ #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 #define CYTHON_COMPILING_IN_GRAAL 1 #define CYTHON_COMPILING_IN_CPYTHON_FREETHREADING 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 0 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS #define CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_ASSUME_SAFE_SIZE #define CYTHON_ASSUME_SAFE_SIZE 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_GIL #define CYTHON_FAST_GIL 0 #undef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS 1 #endif #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #undef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 0 #undef CYTHON_USE_SYS_MONITORING #define CYTHON_USE_SYS_MONITORING 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #undef CYTHON_USE_AM_SEND #define CYTHON_USE_AM_SEND 0 #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 1 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #undef CYTHON_USE_FREELISTS #define CYTHON_USE_FREELISTS 0 #elif defined(PYPY_VERSION) #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 #define CYTHON_COMPILING_IN_GRAAL 0 #define CYTHON_COMPILING_IN_CPYTHON_FREETHREADING 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #ifndef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 0 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS #define CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #ifndef CYTHON_ASSUME_SAFE_SIZE #define CYTHON_ASSUME_SAFE_SIZE 1 #endif #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_GIL #define CYTHON_FAST_GIL 0 #undef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS 1 #endif #if PY_VERSION_HEX < 0x03090000 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif #undef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 0 #undef CYTHON_USE_SYS_MONITORING #define CYTHON_USE_SYS_MONITORING 0 #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE (PYPY_VERSION_NUM >= 0x07030C00) #endif #undef CYTHON_USE_AM_SEND #define CYTHON_USE_AM_SEND 0 #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_NUM >= 0x07031100) #endif #undef CYTHON_USE_FREELISTS #define CYTHON_USE_FREELISTS 0 #elif defined(CYTHON_LIMITED_API) #ifdef Py_LIMITED_API #undef __PYX_LIMITED_VERSION_HEX #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API #endif #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 1 #define CYTHON_COMPILING_IN_GRAAL 0 #define CYTHON_COMPILING_IN_CPYTHON_FREETHREADING 0 #undef CYTHON_CLINE_IN_TRACEBACK #define CYTHON_CLINE_IN_TRACEBACK 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 1 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #endif #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS #define CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS 0 #endif #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_ASSUME_SAFE_SIZE #define CYTHON_ASSUME_SAFE_SIZE 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_GIL #define CYTHON_FAST_GIL 0 #undef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL (__PYX_LIMITED_VERSION_HEX >= 0x030C0000) #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS 1 #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif #ifndef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 0 #endif #undef CYTHON_USE_SYS_MONITORING #define CYTHON_USE_SYS_MONITORING 0 #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #endif #ifndef CYTHON_USE_AM_SEND #define CYTHON_USE_AM_SEND (__PYX_LIMITED_VERSION_HEX >= 0x030A0000) #endif #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #undef CYTHON_USE_FREELISTS #define CYTHON_USE_FREELISTS 0 #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 1 #define CYTHON_COMPILING_IN_LIMITED_API 0 #define CYTHON_COMPILING_IN_GRAAL 0 #ifdef Py_GIL_DISABLED #define CYTHON_COMPILING_IN_CPYTHON_FREETHREADING 1 #else #define CYTHON_COMPILING_IN_CPYTHON_FREETHREADING 0 #endif #if PY_VERSION_HEX < 0x030A0000 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #elif !defined(CYTHON_USE_TYPE_SLOTS) #define CYTHON_USE_TYPE_SLOTS 1 #endif #ifndef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 0 #endif #ifndef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 1 #endif #ifndef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 1 #endif #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #elif !defined(CYTHON_USE_PYLIST_INTERNALS) #define CYTHON_USE_PYLIST_INTERNALS 1 #endif #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING || PY_VERSION_HEX >= 0x030B00A2 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) #define CYTHON_USE_UNICODE_WRITER 1 #endif #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING #undef CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS #define CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS 1 #elif !defined(CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS) #define CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_ASSUME_SAFE_SIZE #define CYTHON_ASSUME_SAFE_SIZE 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #ifndef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 1 #endif #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING #undef CYTHON_FAST_GIL #define CYTHON_FAST_GIL 0 #elif !defined(CYTHON_FAST_GIL) #define CYTHON_FAST_GIL (PY_VERSION_HEX < 0x030C00A6) #endif #ifndef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL 1 #endif #ifndef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 1 #endif #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS 1 #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif #ifndef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 0 #endif #ifndef CYTHON_USE_SYS_MONITORING #define CYTHON_USE_SYS_MONITORING (PY_VERSION_HEX >= 0x030d00B1) #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 1 #endif #ifndef CYTHON_USE_AM_SEND #define CYTHON_USE_AM_SEND 1 #endif #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #elif !defined(CYTHON_USE_DICT_VERSIONS) #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5 && !CYTHON_USE_MODULE_STATE) #endif #ifndef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 1 #endif #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 #endif #ifndef CYTHON_USE_FREELISTS #define CYTHON_USE_FREELISTS (!CYTHON_COMPILING_IN_CPYTHON_FREETHREADING) #endif #endif #ifndef CYTHON_FAST_PYCCALL #define CYTHON_FAST_PYCCALL CYTHON_FAST_PYCALL #endif #ifndef CYTHON_VECTORCALL #if CYTHON_COMPILING_IN_LIMITED_API #define CYTHON_VECTORCALL (__PYX_LIMITED_VERSION_HEX >= 0x030C0000) #else #define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) #endif #endif #define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) #if CYTHON_USE_PYLONG_INTERNALS #undef SHIFT #undef BASE #undef MASK #ifdef SIZEOF_VOID_P enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; #endif #endif #ifndef CYTHON_LOCK_AND_GIL_DEADLOCK_AVOIDANCE_TIME #define CYTHON_LOCK_AND_GIL_DEADLOCK_AVOIDANCE_TIME 100 #endif #ifndef __has_attribute #define __has_attribute(x) 0 #endif #ifndef __has_cpp_attribute #define __has_cpp_attribute(x) 0 #endif #ifndef CYTHON_RESTRICT #if defined(__GNUC__) #define CYTHON_RESTRICT __restrict__ #elif defined(_MSC_VER) && _MSC_VER >= 1400 #define CYTHON_RESTRICT __restrict #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_RESTRICT restrict #else #define CYTHON_RESTRICT #endif #endif #ifndef CYTHON_UNUSED #if defined(__cplusplus) /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 * but leads to warnings with -pedantic, since it is a C++17 feature */ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) #if __has_cpp_attribute(maybe_unused) #define CYTHON_UNUSED [[maybe_unused]] #endif #endif #endif #endif #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif #endif #ifndef CYTHON_UNUSED_VAR # if defined(__cplusplus) template void CYTHON_UNUSED_VAR( const T& ) { } # else # define CYTHON_UNUSED_VAR(x) (void)(x) # endif #endif #ifndef CYTHON_MAYBE_UNUSED_VAR #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) #endif #ifndef CYTHON_NCP_UNUSED # if CYTHON_COMPILING_IN_CPYTHON && !CYTHON_COMPILING_IN_CPYTHON_FREETHREADING # define CYTHON_NCP_UNUSED # else # define CYTHON_NCP_UNUSED CYTHON_UNUSED # endif #endif #ifndef CYTHON_USE_CPP_STD_MOVE #if defined(__cplusplus) && (\ __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) #define CYTHON_USE_CPP_STD_MOVE 1 #else #define CYTHON_USE_CPP_STD_MOVE 0 #endif #endif #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; #else typedef unsigned __int8 uint8_t; typedef unsigned __int16 uint16_t; typedef unsigned __int32 uint32_t; #endif #endif #if _MSC_VER < 1300 #ifdef _WIN64 typedef unsigned long long __pyx_uintptr_t; #else typedef unsigned int __pyx_uintptr_t; #endif #else #ifdef _WIN64 typedef unsigned __int64 __pyx_uintptr_t; #else typedef unsigned __int32 __pyx_uintptr_t; #endif #endif #else #include typedef uintptr_t __pyx_uintptr_t; #endif #ifndef CYTHON_FALLTHROUGH #if defined(__cplusplus) /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 * but leads to warnings with -pedantic, since it is a C++17 feature */ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) #if __has_cpp_attribute(fallthrough) #define CYTHON_FALLTHROUGH [[fallthrough]] #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_cpp_attribute(clang::fallthrough) #define CYTHON_FALLTHROUGH [[clang::fallthrough]] #elif __has_cpp_attribute(gnu::fallthrough) #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] #endif #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_attribute(fallthrough) #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) #else #define CYTHON_FALLTHROUGH #endif #endif #if defined(__clang__) && defined(__apple_build_version__) #if __apple_build_version__ < 7000000 #undef CYTHON_FALLTHROUGH #define CYTHON_FALLTHROUGH #endif #endif #endif #ifndef Py_UNREACHABLE #define Py_UNREACHABLE() assert(0); abort() #endif #ifdef __cplusplus template struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) #else #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) #endif #if CYTHON_COMPILING_IN_PYPY == 1 #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) #else #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) #endif #define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) /* CppInitCode */ #ifndef __cplusplus #error "Cython files generated with the C++ option must be compiled with a C++ compiler." #endif #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #else #define CYTHON_INLINE inline #endif #endif template void __Pyx_call_destructor(T& x) { x.~T(); } template class __Pyx_FakeReference { public: __Pyx_FakeReference() : ptr(NULL) { } __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } T *operator->() { return ptr; } T *operator&() { return ptr; } operator T&() { return *ptr; } template bool operator ==(const U& other) const { return *ptr == other; } template bool operator !=(const U& other) const { return *ptr != other; } template bool operator==(const __Pyx_FakeReference& other) const { return *ptr == *other.ptr; } template bool operator!=(const __Pyx_FakeReference& other) const { return *ptr != *other.ptr; } private: T *ptr; }; /* PythonCompatibility */ #define __PYX_BUILD_PY_SSIZE_T "n" #define CYTHON_FORMAT_SSIZE_T "z" #define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_DefaultClassType PyType_Type #if CYTHON_COMPILING_IN_LIMITED_API #ifndef CO_OPTIMIZED static int CO_OPTIMIZED; #endif #ifndef CO_NEWLOCALS static int CO_NEWLOCALS; #endif #ifndef CO_VARARGS static int CO_VARARGS; #endif #ifndef CO_VARKEYWORDS static int CO_VARKEYWORDS; #endif #ifndef CO_ASYNC_GENERATOR static int CO_ASYNC_GENERATOR; #endif #ifndef CO_GENERATOR static int CO_GENERATOR; #endif #ifndef CO_COROUTINE static int CO_COROUTINE; #endif #else #ifndef CO_COROUTINE #define CO_COROUTINE 0x80 #endif #ifndef CO_ASYNC_GENERATOR #define CO_ASYNC_GENERATOR 0x200 #endif #endif static int __Pyx_init_co_variables(void); #if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) #else #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) #define __Pyx_Py_Is(x, y) Py_Is(x, y) #else #define __Pyx_Py_Is(x, y) ((x) == (y)) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) #else #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) #else #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) #else #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) #endif #define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) #if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) #else #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) #endif #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif #ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif #ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif #ifndef Py_TPFLAGS_HAVE_FINALIZE #define Py_TPFLAGS_HAVE_FINALIZE 0 #endif #ifndef Py_TPFLAGS_SEQUENCE #define Py_TPFLAGS_SEQUENCE 0 #endif #ifndef Py_TPFLAGS_MAPPING #define Py_TPFLAGS_MAPPING 0 #endif #ifndef METH_STACKLESS #define METH_STACKLESS 0 #endif #ifndef METH_FASTCALL #ifndef METH_FASTCALL #define METH_FASTCALL 0x80 #endif typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames); #else #if PY_VERSION_HEX >= 0x030d00A4 # define __Pyx_PyCFunctionFast PyCFunctionFast # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords #else # define __Pyx_PyCFunctionFast _PyCFunctionFast # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords #endif #endif #if CYTHON_METH_FASTCALL #define __Pyx_METH_FASTCALL METH_FASTCALL #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords #else #define __Pyx_METH_FASTCALL METH_VARARGS #define __Pyx_PyCFunction_FastCall PyCFunction #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords #endif #if CYTHON_VECTORCALL #define __pyx_vectorcallfunc vectorcallfunc #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) #elif CYTHON_BACKPORT_VECTORCALL typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, size_t nargsf, PyObject *kwnames); #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) #else #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) #endif #if PY_VERSION_HEX >= 0x030900B1 #define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) #else #define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) #endif #define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) #elif !CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) #endif #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; } #endif static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void (*cfunc)(void)) { #if CYTHON_COMPILING_IN_LIMITED_API return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; #else return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; #endif } #define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) #if __PYX_LIMITED_VERSION_HEX < 0x03090000 #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); #else #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) #define __Pyx_PyCMethod PyCMethod #endif #ifndef METH_METHOD #define METH_METHOD 0x200 #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #define PyObject_Malloc(s) PyMem_Malloc(s) #define PyObject_Free(p) PyMem_Free(p) #define PyObject_Realloc(p) PyMem_Realloc(p) #endif #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_PyFrame_SetLineNumber(frame, lineno) #elif CYTHON_COMPILING_IN_GRAAL #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) _PyFrame_SetLineNumber((frame), (lineno)) #else #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) #endif #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_PyThreadState_Current PyThreadState_Get() #elif !CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_Current PyThreadState_GET() #elif PY_VERSION_HEX >= 0x030d00A1 #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() #else #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() #endif #if CYTHON_USE_MODULE_STATE static CYTHON_INLINE void *__Pyx__PyModule_GetState(PyObject *op) { void *result; result = PyModule_GetState(op); if (!result) Py_FatalError("Couldn't find the module state"); return result; } #define __Pyx_PyModule_GetState(o) (__pyx_mstatetype *)__Pyx__PyModule_GetState(o) #else #define __Pyx_PyModule_GetState(op) ((void)op,__pyx_mstate_global) #endif #define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE((PyObject *) obj), name, func_ctype) #define __Pyx_PyObject_TryGetSlot(obj, name, func_ctype) __Pyx_PyType_TryGetSlot(Py_TYPE(obj), name, func_ctype) #define __Pyx_PyObject_GetSubSlot(obj, sub, name, func_ctype) __Pyx_PyType_GetSubSlot(Py_TYPE(obj), sub, name, func_ctype) #define __Pyx_PyObject_TryGetSubSlot(obj, sub, name, func_ctype) __Pyx_PyType_TryGetSubSlot(Py_TYPE(obj), sub, name, func_ctype) #if CYTHON_USE_TYPE_SLOTS #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) #define __Pyx_PyType_TryGetSlot(type, name, func_ctype) __Pyx_PyType_GetSlot(type, name, func_ctype) #define __Pyx_PyType_GetSubSlot(type, sub, name, func_ctype) (((type)->sub) ? ((type)->sub->name) : NULL) #define __Pyx_PyType_TryGetSubSlot(type, sub, name, func_ctype) __Pyx_PyType_GetSubSlot(type, sub, name, func_ctype) #else #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) #define __Pyx_PyType_TryGetSlot(type, name, func_ctype)\ ((__PYX_LIMITED_VERSION_HEX >= 0x030A0000 ||\ (PyType_GetFlags(type) & Py_TPFLAGS_HEAPTYPE) || __Pyx_get_runtime_version() >= 0x030A0000) ?\ __Pyx_PyType_GetSlot(type, name, func_ctype) : NULL) #define __Pyx_PyType_GetSubSlot(obj, sub, name, func_ctype) __Pyx_PyType_GetSlot(obj, name, func_ctype) #define __Pyx_PyType_TryGetSubSlot(obj, sub, name, func_ctype) __Pyx_PyType_TryGetSlot(obj, name, func_ctype) #endif #if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) #else #define __Pyx_PyDict_NewPresized(n) PyDict_New() #endif #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) #if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_UNICODE_INTERNALS #define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); if (res == NULL) PyErr_Clear(); return res; } #elif !CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000 #define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError #define __Pyx_PyDict_GetItemStr PyDict_GetItem #else static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { #if CYTHON_COMPILING_IN_PYPY return PyDict_GetItem(dict, name); #else PyDictEntry *ep; PyDictObject *mp = (PyDictObject*) dict; long hash = ((PyStringObject *) name)->ob_shash; assert(hash != -1); ep = (mp->ma_lookup)(mp, name, hash); if (ep == NULL) { return NULL; } return ep->me_value; #endif } #define __Pyx_PyDict_GetItemStr PyDict_GetItem #endif #if CYTHON_USE_TYPE_SLOTS #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) #else #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) #endif #define __Pyx_PyObject_GetIterNextFunc(iterator) __Pyx_PyObject_GetSlot(iterator, tp_iternext, iternextfunc) #if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 #define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ PyTypeObject *type = Py_TYPE((PyObject*)obj);\ assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ PyObject_GC_Del(obj);\ Py_DECREF(type);\ } #else #define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) #endif #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) #define __Pyx_PyUnicode_DATA(u) ((void*)u) #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) #else #if PY_VERSION_HEX >= 0x030C0000 #define __Pyx_PyUnicode_READY(op) (0) #else #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ 0 : _PyUnicode_Ready((PyObject *)(op))) #endif #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) #if PY_VERSION_HEX >= 0x030C0000 #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) #else #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) #else #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) #endif #endif #endif #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) #else #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) #endif #if CYTHON_COMPILING_IN_PYPY #if !defined(PyUnicode_DecodeUnicodeEscape) #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) #endif #if !defined(PyUnicode_Contains) #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) #endif #if !defined(PyByteArray_Check) #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) #endif #if !defined(PyObject_Format) #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) #endif #endif #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_PySequence_ListKeepNew(obj)\ (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) #else #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) #endif #if PY_VERSION_HEX >= 0x030900A4 #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) #else #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) #endif #if CYTHON_AVOID_BORROWED_REFS || CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS #if __PYX_LIMITED_VERSION_HEX >= 0x030d0000 #define __Pyx_PyList_GetItemRef(o, i) PyList_GetItemRef(o, i) #elif CYTHON_COMPILING_IN_LIMITED_API || !CYTHON_ASSUME_SAFE_MACROS #define __Pyx_PyList_GetItemRef(o, i) (likely((i) >= 0) ? PySequence_GetItem(o, i) : (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) #else #define __Pyx_PyList_GetItemRef(o, i) PySequence_ITEM(o, i) #endif #elif CYTHON_COMPILING_IN_LIMITED_API || !CYTHON_ASSUME_SAFE_MACROS #if __PYX_LIMITED_VERSION_HEX >= 0x030d0000 #define __Pyx_PyList_GetItemRef(o, i) PyList_GetItemRef(o, i) #else #define __Pyx_PyList_GetItemRef(o, i) __Pyx_XNewRef(PyList_GetItem(o, i)) #endif #else #define __Pyx_PyList_GetItemRef(o, i) __Pyx_NewRef(PyList_GET_ITEM(o, i)) #endif #if __PYX_LIMITED_VERSION_HEX >= 0x030d0000 #define __Pyx_PyDict_GetItemRef(dict, key, result) PyDict_GetItemRef(dict, key, result) #elif CYTHON_AVOID_BORROWED_REFS || CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS static CYTHON_INLINE int __Pyx_PyDict_GetItemRef(PyObject *dict, PyObject *key, PyObject **result) { *result = PyObject_GetItem(dict, key); if (*result == NULL) { if (PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); return 0; } return -1; } return 1; } #else static CYTHON_INLINE int __Pyx_PyDict_GetItemRef(PyObject *dict, PyObject *key, PyObject **result) { *result = PyDict_GetItemWithError(dict, key); if (*result == NULL) { return PyErr_Occurred() ? -1 : 0; } Py_INCREF(*result); return 1; } #endif #if defined(CYTHON_DEBUG_VISIT_CONST) && CYTHON_DEBUG_VISIT_CONST #define __Pyx_VISIT_CONST(obj) Py_VISIT(obj) #else #define __Pyx_VISIT_CONST(obj) #endif #if CYTHON_ASSUME_SAFE_MACROS #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) #define __Pyx_PyTuple_GET_ITEM(o, i) PyTuple_GET_ITEM(o, i) #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) #define __Pyx_PyList_GET_ITEM(o, i) PyList_GET_ITEM(o, i) #else #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) #define __Pyx_PyTuple_GET_ITEM(o, i) PyTuple_GetItem(o, i) #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) #define __Pyx_PyList_GET_ITEM(o, i) PyList_GetItem(o, i) #endif #if CYTHON_ASSUME_SAFE_SIZE #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) #define __Pyx_PyUnicode_GET_LENGTH(o) PyUnicode_GET_LENGTH(o) #else #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) #define __Pyx_PyUnicode_GET_LENGTH(o) PyUnicode_GetLength(o) #endif #if __PYX_LIMITED_VERSION_HEX >= 0x030d0000 #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) #else static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { PyObject *module = PyImport_AddModule(name); Py_XINCREF(module); return module; } #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_InternFromString) #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) #endif #define __Pyx_PyLong_FromHash_t PyLong_FromSsize_t #define __Pyx_PyLong_AsHash_t __Pyx_PyIndex_AsSsize_t #if __PYX_LIMITED_VERSION_HEX >= 0x030A0000 #define __Pyx_PySendResult PySendResult #else typedef enum { PYGEN_RETURN = 0, PYGEN_ERROR = -1, PYGEN_NEXT = 1, } __Pyx_PySendResult; #endif #if CYTHON_COMPILING_IN_LIMITED_API || PY_VERSION_HEX < 0x030A00A3 typedef __Pyx_PySendResult (*__Pyx_pyiter_sendfunc)(PyObject *iter, PyObject *value, PyObject **result); #else #define __Pyx_pyiter_sendfunc sendfunc #endif #if !CYTHON_USE_AM_SEND #define __PYX_HAS_PY_AM_SEND 0 #elif __PYX_LIMITED_VERSION_HEX >= 0x030A0000 #define __PYX_HAS_PY_AM_SEND 1 #else #define __PYX_HAS_PY_AM_SEND 2 // our own backported implementation #endif #if __PYX_HAS_PY_AM_SEND < 2 #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #else typedef struct { unaryfunc am_await; unaryfunc am_aiter; unaryfunc am_anext; __Pyx_pyiter_sendfunc am_send; } __Pyx_PyAsyncMethodsStruct; #define __Pyx_SlotTpAsAsync(s) ((PyAsyncMethods*)(s)) #endif #if CYTHON_USE_AM_SEND && PY_VERSION_HEX < 0x030A00F0 #define __Pyx_TPFLAGS_HAVE_AM_SEND (1UL << 21) #else #define __Pyx_TPFLAGS_HAVE_AM_SEND (0) #endif #if PY_VERSION_HEX >= 0x03090000 #define __Pyx_PyInterpreterState_Get() PyInterpreterState_Get() #else #define __Pyx_PyInterpreterState_Get() PyThreadState_Get()->interp #endif #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030A0000 #ifdef __cplusplus extern "C" #endif PyAPI_FUNC(void *) PyMem_Calloc(size_t nelem, size_t elsize); #endif #if CYTHON_COMPILING_IN_LIMITED_API static int __Pyx_init_co_variable(PyObject *inspect, const char* name, int *write_to) { int value; PyObject *py_value = PyObject_GetAttrString(inspect, name); if (!py_value) return 0; value = (int) PyLong_AsLong(py_value); Py_DECREF(py_value); *write_to = value; return value != -1 || !PyErr_Occurred(); } static int __Pyx_init_co_variables(void) { PyObject *inspect; int result; inspect = PyImport_ImportModule("inspect"); result = #if !defined(CO_OPTIMIZED) __Pyx_init_co_variable(inspect, "CO_OPTIMIZED", &CO_OPTIMIZED) && #endif #if !defined(CO_NEWLOCALS) __Pyx_init_co_variable(inspect, "CO_NEWLOCALS", &CO_NEWLOCALS) && #endif #if !defined(CO_VARARGS) __Pyx_init_co_variable(inspect, "CO_VARARGS", &CO_VARARGS) && #endif #if !defined(CO_VARKEYWORDS) __Pyx_init_co_variable(inspect, "CO_VARKEYWORDS", &CO_VARKEYWORDS) && #endif #if !defined(CO_ASYNC_GENERATOR) __Pyx_init_co_variable(inspect, "CO_ASYNC_GENERATOR", &CO_ASYNC_GENERATOR) && #endif #if !defined(CO_GENERATOR) __Pyx_init_co_variable(inspect, "CO_GENERATOR", &CO_GENERATOR) && #endif #if !defined(CO_COROUTINE) __Pyx_init_co_variable(inspect, "CO_COROUTINE", &CO_COROUTINE) && #endif 1; Py_DECREF(inspect); return result ? 0 : -1; } #else static int __Pyx_init_co_variables(void) { return 0; // It's a limited API-only feature } #endif /* MathInitCode */ #if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) #ifndef _USE_MATH_DEFINES #define _USE_MATH_DEFINES #endif #endif #include #ifdef NAN #define __PYX_NAN() ((float) NAN) #else static CYTHON_INLINE float __PYX_NAN() { float value; memset(&value, 0xFF, sizeof(value)); return value; } #endif #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) #define __Pyx_truncl trunc #else #define __Pyx_truncl truncl #endif #ifndef CYTHON_CLINE_IN_TRACEBACK_RUNTIME #define CYTHON_CLINE_IN_TRACEBACK_RUNTIME 0 #endif #ifndef CYTHON_CLINE_IN_TRACEBACK #define CYTHON_CLINE_IN_TRACEBACK CYTHON_CLINE_IN_TRACEBACK_RUNTIME #endif #if CYTHON_CLINE_IN_TRACEBACK #define __PYX_MARK_ERR_POS(f_index, lineno) { __pyx_filename = __pyx_f[f_index]; (void) __pyx_filename; __pyx_lineno = lineno; (void) __pyx_lineno; __pyx_clineno = __LINE__; (void) __pyx_clineno; } #else #define __PYX_MARK_ERR_POS(f_index, lineno) { __pyx_filename = __pyx_f[f_index]; (void) __pyx_filename; __pyx_lineno = lineno; (void) __pyx_lineno; (void) __pyx_clineno; } #endif #define __PYX_ERR(f_index, lineno, Ln_error) \ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } #ifdef CYTHON_EXTERN_C #undef __PYX_EXTERN_C #define __PYX_EXTERN_C CYTHON_EXTERN_C #elif defined(__PYX_EXTERN_C) #ifdef _MSC_VER #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") #else #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. #endif #else #define __PYX_EXTERN_C extern "C++" #endif #define __PYX_HAVE__cuda__bindings___internal__utils #define __PYX_HAVE_API__cuda__bindings___internal__utils /* Early includes */ #include #include "ios" #include "new" #include "stdexcept" #include "typeinfo" #include #include template class nullable_unique_ptr { public: nullable_unique_ptr() noexcept = default; nullable_unique_ptr(std::nullptr_t) noexcept = delete; explicit nullable_unique_ptr(T* data, bool own_data): own_data_(own_data) { if (own_data) manager_.reset(data); else raw_data_ = data; } nullable_unique_ptr(const nullable_unique_ptr&) = delete; nullable_unique_ptr& operator=(const nullable_unique_ptr&) = delete; nullable_unique_ptr(nullable_unique_ptr&& other) noexcept { own_data_ = other.own_data_; other.own_data_ = false; // ownership is transferred if (own_data_) { manager_ = std::move(other.manager_); raw_data_ = nullptr; // just in case } else { manager_.reset(nullptr); // just in case raw_data_ = other.raw_data_; } } nullable_unique_ptr& operator=(nullable_unique_ptr&& other) noexcept { own_data_ = other.own_data_; other.own_data_ = false; // ownership is transferred if (own_data_) { manager_ = std::move(other.manager_); raw_data_ = nullptr; // just in case } else { manager_.reset(nullptr); // just in case raw_data_ = other.raw_data_; } return *this; } ~nullable_unique_ptr() = default; void reset(T* data, bool own_data) { own_data_ = own_data; if (own_data_) { manager_.reset(data); raw_data_ = nullptr; } else { manager_.reset(nullptr); raw_data_ = data; } } void swap(nullable_unique_ptr& other) noexcept { std::swap(manager_, other.manager_); std::swap(raw_data_, other.raw_data_); std::swap(own_data_, other.own_data_); } /* * Get the pointer to the underlying object (this is different from data()!). */ T* get() const noexcept { if (own_data_) return manager_.get(); else return raw_data_; } /* * Get the pointer to the underlying buffer (this is different from get()!). */ void* data() noexcept { if (own_data_) return manager_.get()->data(); else return raw_data_; } T& operator*() { if (own_data_) return *manager_; else return *raw_data_; } private: std::unique_ptr manager_{}; T* raw_data_{nullptr}; bool own_data_{false}; }; #include #include #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyFloat_FromString(obj) PyFloat_FromString(obj) #else #define __Pyx_PyFloat_FromString(obj) PyFloat_FromString(obj, NULL) #endif #include #if PY_MAJOR_VERSION <= 2 #define PyDict_GetItemWithError _PyDict_GetItemWithError #endif #if PY_VERSION_HEX < 0x030d0000 static CYTHON_INLINE int __Pyx_PyWeakref_GetRef(PyObject *ref, PyObject **pobj) { PyObject *obj = PyWeakref_GetObject(ref); if (obj == NULL) { // SystemError if ref is NULL *pobj = NULL; return -1; } if (obj == Py_None) { *pobj = NULL; return 0; } Py_INCREF(obj); *pobj = obj; return 1; } #else #define __Pyx_PyWeakref_GetRef PyWeakref_GetRef #endif #include "pythread.h" #if (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030600) && !defined(PyContextVar_Get) #define PyContextVar_Get(var, d, v) ((d) ? ((void)(var), Py_INCREF(d), (v)[0] = (d), 0) : ((v)[0] = NULL, 0) ) #endif #include #if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600) // move should be defined for these versions of MSVC, but __cplusplus isn't set usefully #include namespace cython_std { template typename std::remove_reference::type&& move(T& t) noexcept { return std::move(t); } template typename std::remove_reference::type&& move(T&& t) noexcept { return std::move(t); } } #endif #ifdef _OPENMP #include #endif /* _OPENMP */ #if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif #define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 #define __PYX_DEFAULT_STRING_ENCODING "" #define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString #define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #define __Pyx_uchar_cast(c) ((unsigned char)c) #define __Pyx_long_cast(x) ((long)x) #define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ (sizeof(type) < sizeof(Py_ssize_t)) ||\ (sizeof(type) > sizeof(Py_ssize_t) &&\ likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX) &&\ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ v == (type)PY_SSIZE_T_MIN))) ||\ (sizeof(type) == sizeof(Py_ssize_t) &&\ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX))) ) static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { return (size_t) i < (size_t) limit; } #if defined (__cplusplus) && __cplusplus >= 201103L #include #define __Pyx_sst_abs(value) std::abs(value) #elif SIZEOF_INT >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) abs(value) #elif SIZEOF_LONG >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) labs(value) #elif defined (_MSC_VER) #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define __Pyx_sst_abs(value) llabs(value) #elif defined (__GNUC__) #define __Pyx_sst_abs(value) __builtin_llabs(value) #else #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); #define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) #define __Pyx_PyBytes_FromString PyBytes_FromString #define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #if CYTHON_ASSUME_SAFE_MACROS #define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyByteArray_AsString(s) PyByteArray_AS_STRING(s) #else #define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AsString(s)) #define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AsString(s)) #define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AsString(s)) #define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AsString(s)) #define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AsString(s)) #define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AsString(s)) #define __Pyx_PyByteArray_AsString(s) PyByteArray_AsString(s) #endif #define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) #define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) #define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode static CYTHON_INLINE PyObject *__Pyx_NewRef(PyObject *obj) { #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030a0000 || defined(Py_NewRef) return Py_NewRef(obj); #else Py_INCREF(obj); return obj; #endif } static CYTHON_INLINE PyObject *__Pyx_XNewRef(PyObject *obj) { #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030a0000 || defined(Py_XNewRef) return Py_XNewRef(obj); #else Py_XINCREF(obj); return obj; #endif } static CYTHON_INLINE PyObject *__Pyx_Owned_Py_None(int b); static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_Long(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyLong_FromSize_t(size_t); static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #if CYTHON_ASSUME_SAFE_MACROS #define __Pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #define __Pyx_PyFloat_AS_DOUBLE(x) PyFloat_AS_DOUBLE(x) #else #define __Pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) #define __Pyx_PyFloat_AS_DOUBLE(x) PyFloat_AsDouble(x) #endif #define __Pyx_PyFloat_AsFloat(x) ((float) __Pyx_PyFloat_AsDouble(x)) #define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) #if CYTHON_USE_PYLONG_INTERNALS #if PY_VERSION_HEX >= 0x030C00A7 #ifndef _PyLong_SIGN_MASK #define _PyLong_SIGN_MASK 3 #endif #ifndef _PyLong_NON_SIZE_BITS #define _PyLong_NON_SIZE_BITS 3 #endif #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) #define __Pyx_PyLong_SignedDigitCount(x)\ ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) #else #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) #endif typedef Py_ssize_t __Pyx_compact_pylong; typedef size_t __Pyx_compact_upylong; #else #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) #define __Pyx_PyLong_CompactValue(x)\ ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) typedef sdigit __Pyx_compact_pylong; typedef digit __Pyx_compact_upylong; #endif static CYTHON_INLINE int __Pyx_PyLong_CompactAsLong(PyObject *x, long *return_value); #if PY_VERSION_HEX >= 0x030C00A5 #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) #else #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) #endif #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) #elif __PYX_DEFAULT_STRING_ENCODING_IS_ASCII #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeASCII(c_str, size, NULL) #else #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) #endif /* Test for GCC > 2.95 */ #if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ /* PretendToInitialize */ #ifdef __cplusplus #if __cplusplus > 201103L #include #endif template static void __Pyx_pretend_to_initialize(T* ptr) { #if __cplusplus > 201103L if ((std::is_trivially_default_constructible::value)) #endif *ptr = T(); (void)ptr; } #else static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } #endif #if !CYTHON_USE_MODULE_STATE static PyObject *__pyx_m = NULL; #endif static int __pyx_lineno; static int __pyx_clineno = 0; static const char * const __pyx_cfilenm = __FILE__; static const char *__pyx_filename; /* #### Code section: filename_table ### */ static const char* const __pyx_f[] = { "cuda/bindings/_internal/utils.pyx", "", "cpython/contextvars.pxd", "cpython/type.pxd", "cpython/bool.pxd", "cpython/complex.pxd", }; /* #### Code section: utility_code_proto_before_types ### */ /* CriticalSections.proto */ #if !CYTHON_COMPILING_IN_CPYTHON_FREETHREADING #define __Pyx_PyCriticalSection void* #define __Pyx_PyCriticalSection2 void* #define __Pyx_PyCriticalSection_Begin1(cs, arg) (void)cs #define __Pyx_PyCriticalSection_Begin2(cs, arg1, arg2) (void)cs #define __Pyx_PyCriticalSection_End1(cs) #define __Pyx_PyCriticalSection_End2(cs) #else #define __Pyx_PyCriticalSection PyCriticalSection #define __Pyx_PyCriticalSection2 PyCriticalSection2 #define __Pyx_PyCriticalSection_Begin1 PyCriticalSection_Begin #define __Pyx_PyCriticalSection_Begin2 PyCriticalSection2_Begin #define __Pyx_PyCriticalSection_End1 PyCriticalSection_End #define __Pyx_PyCriticalSection_End2 PyCriticalSection2_End #endif #if PY_VERSION_HEX < 0x030d0000 || CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_BEGIN_CRITICAL_SECTION(o) { #define __Pyx_END_CRITICAL_SECTION() } #else #define __Pyx_BEGIN_CRITICAL_SECTION Py_BEGIN_CRITICAL_SECTION #define __Pyx_END_CRITICAL_SECTION Py_END_CRITICAL_SECTION #endif /* Atomics.proto */ #include #ifndef CYTHON_ATOMICS #define CYTHON_ATOMICS 1 #endif #define __PYX_CYTHON_ATOMICS_ENABLED() CYTHON_ATOMICS #define __PYX_GET_CYTHON_COMPILING_IN_CPYTHON_FREETHREADING() CYTHON_COMPILING_IN_CPYTHON_FREETHREADING #define __pyx_atomic_int_type int #define __pyx_nonatomic_int_type int #if CYTHON_ATOMICS && (defined(__STDC_VERSION__) &&\ (__STDC_VERSION__ >= 201112L) &&\ !defined(__STDC_NO_ATOMICS__)) #include #elif CYTHON_ATOMICS && (defined(__cplusplus) && (\ (__cplusplus >= 201103L) ||\ (defined(_MSC_VER) && _MSC_VER >= 1700))) #include #endif #if CYTHON_ATOMICS && (defined(__STDC_VERSION__) &&\ (__STDC_VERSION__ >= 201112L) &&\ !defined(__STDC_NO_ATOMICS__) &&\ ATOMIC_INT_LOCK_FREE == 2) #undef __pyx_atomic_int_type #define __pyx_atomic_int_type atomic_int #define __pyx_atomic_ptr_type atomic_uintptr_t #define __pyx_nonatomic_ptr_type uintptr_t #define __pyx_atomic_incr_relaxed(value) atomic_fetch_add_explicit(value, 1, memory_order_relaxed) #define __pyx_atomic_incr_acq_rel(value) atomic_fetch_add_explicit(value, 1, memory_order_acq_rel) #define __pyx_atomic_decr_acq_rel(value) atomic_fetch_sub_explicit(value, 1, memory_order_acq_rel) #define __pyx_atomic_sub(value, arg) atomic_fetch_sub(value, arg) #define __pyx_atomic_int_cmp_exchange(value, expected, desired) atomic_compare_exchange_strong(value, expected, desired) #define __pyx_atomic_load(value) atomic_load(value) #define __pyx_atomic_store(value, new_value) atomic_store(value, new_value) #define __pyx_atomic_pointer_load_relaxed(value) atomic_load_explicit(value, memory_order_relaxed) #define __pyx_atomic_pointer_load_acquire(value) atomic_load_explicit(value, memory_order_acquire) #define __pyx_atomic_pointer_exchange(value, new_value) atomic_exchange(value, (__pyx_nonatomic_ptr_type)new_value) #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER) #pragma message ("Using standard C atomics") #elif defined(__PYX_DEBUG_ATOMICS) #warning "Using standard C atomics" #endif #elif CYTHON_ATOMICS && (defined(__cplusplus) && (\ (__cplusplus >= 201103L) ||\ \ (defined(_MSC_VER) && _MSC_VER >= 1700)) &&\ ATOMIC_INT_LOCK_FREE == 2) #undef __pyx_atomic_int_type #define __pyx_atomic_int_type std::atomic_int #define __pyx_atomic_ptr_type std::atomic_uintptr_t #define __pyx_nonatomic_ptr_type uintptr_t #define __pyx_atomic_incr_relaxed(value) std::atomic_fetch_add_explicit(value, 1, std::memory_order_relaxed) #define __pyx_atomic_incr_acq_rel(value) std::atomic_fetch_add_explicit(value, 1, std::memory_order_acq_rel) #define __pyx_atomic_decr_acq_rel(value) std::atomic_fetch_sub_explicit(value, 1, std::memory_order_acq_rel) #define __pyx_atomic_sub(value, arg) std::atomic_fetch_sub(value, arg) #define __pyx_atomic_int_cmp_exchange(value, expected, desired) std::atomic_compare_exchange_strong(value, expected, desired) #define __pyx_atomic_load(value) std::atomic_load(value) #define __pyx_atomic_store(value, new_value) std::atomic_store(value, new_value) #define __pyx_atomic_pointer_load_relaxed(value) std::atomic_load_explicit(value, std::memory_order_relaxed) #define __pyx_atomic_pointer_load_acquire(value) std::atomic_load_explicit(value, std::memory_order_acquire) #define __pyx_atomic_pointer_exchange(value, new_value) std::atomic_exchange(value, (__pyx_nonatomic_ptr_type)new_value) #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER) #pragma message ("Using standard C++ atomics") #elif defined(__PYX_DEBUG_ATOMICS) #warning "Using standard C++ atomics" #endif #elif CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 &&\ (__GNUC_MINOR__ > 1 ||\ (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL__ >= 2)))) #define __pyx_atomic_ptr_type void* #define __pyx_atomic_incr_relaxed(value) __sync_fetch_and_add(value, 1) #define __pyx_atomic_incr_acq_rel(value) __sync_fetch_and_add(value, 1) #define __pyx_atomic_decr_acq_rel(value) __sync_fetch_and_sub(value, 1) #define __pyx_atomic_sub(value, arg) __sync_fetch_and_sub(value, arg) static CYTHON_INLINE int __pyx_atomic_int_cmp_exchange(__pyx_atomic_int_type* value, __pyx_nonatomic_int_type* expected, __pyx_nonatomic_int_type desired) { __pyx_nonatomic_int_type old = __sync_val_compare_and_swap(value, *expected, desired); int result = old == *expected; *expected = old; return result; } #define __pyx_atomic_load(value) __sync_fetch_and_add(value, 0) #define __pyx_atomic_store(value, new_value) __sync_lock_test_and_set(value, new_value) #define __pyx_atomic_pointer_load_relaxed(value) __sync_fetch_and_add(value, 0) #define __pyx_atomic_pointer_load_acquire(value) __sync_fetch_and_add(value, 0) #define __pyx_atomic_pointer_exchange(value, new_value) __sync_lock_test_and_set(value, (__pyx_atomic_ptr_type)new_value) #ifdef __PYX_DEBUG_ATOMICS #warning "Using GNU atomics" #endif #elif CYTHON_ATOMICS && defined(_MSC_VER) #include #undef __pyx_atomic_int_type #define __pyx_atomic_int_type long #define __pyx_atomic_ptr_type void* #undef __pyx_nonatomic_int_type #define __pyx_nonatomic_int_type long #pragma intrinsic (_InterlockedExchangeAdd, _InterlockedExchange, _InterlockedCompareExchange, _InterlockedCompareExchangePointer, _InterlockedExchangePointer) #define __pyx_atomic_incr_relaxed(value) _InterlockedExchangeAdd(value, 1) #define __pyx_atomic_incr_acq_rel(value) _InterlockedExchangeAdd(value, 1) #define __pyx_atomic_decr_acq_rel(value) _InterlockedExchangeAdd(value, -1) #define __pyx_atomic_sub(value, arg) _InterlockedExchangeAdd(value, -arg) static CYTHON_INLINE int __pyx_atomic_int_cmp_exchange(__pyx_atomic_int_type* value, __pyx_nonatomic_int_type* expected, __pyx_nonatomic_int_type desired) { __pyx_nonatomic_int_type old = _InterlockedCompareExchange(value, desired, *expected); int result = old == *expected; *expected = old; return result; } #define __pyx_atomic_load(value) _InterlockedExchangeAdd(value, 0) #define __pyx_atomic_store(value, new_value) _InterlockedExchange(value, new_value) #define __pyx_atomic_pointer_load_relaxed(value) *(void * volatile *)value #define __pyx_atomic_pointer_load_acquire(value) _InterlockedCompareExchangePointer(value, 0, 0) #define __pyx_atomic_pointer_exchange(value, new_value) _InterlockedExchangePointer(value, (__pyx_atomic_ptr_type)new_value) #ifdef __PYX_DEBUG_ATOMICS #pragma message ("Using MSVC atomics") #endif #else #undef CYTHON_ATOMICS #define CYTHON_ATOMICS 0 #ifdef __PYX_DEBUG_ATOMICS #warning "Not using atomics" #endif #endif #if CYTHON_ATOMICS #define __pyx_add_acquisition_count(memview)\ __pyx_atomic_incr_relaxed(__pyx_get_slice_count_pointer(memview)) #define __pyx_sub_acquisition_count(memview)\ __pyx_atomic_decr_acq_rel(__pyx_get_slice_count_pointer(memview)) #else #define __pyx_add_acquisition_count(memview)\ __pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock) #define __pyx_sub_acquisition_count(memview)\ __pyx_sub_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock) #endif /* #### Code section: numeric_typedefs ### */ /* #### Code section: complex_type_declarations ### */ /* #### Code section: type_declarations ### */ /*--- Type declarations ---*/ struct __pyx_opt_args_7cpython_11contextvars_get_value; struct __pyx_opt_args_7cpython_11contextvars_get_value_no_default; /* "cpython/contextvars.pxd":116 * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline object get_value(var, default_value=None): # <<<<<<<<<<<<<< * """Return a new reference to the value of the context variable, * or the default value of the context variable, */ struct __pyx_opt_args_7cpython_11contextvars_get_value { int __pyx_n; PyObject *default_value; }; /* "cpython/contextvars.pxd":134 * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline object get_value_no_default(var, default_value=None): # <<<<<<<<<<<<<< * """Return a new reference to the value of the context variable, * or the provided default value if no such value was found. */ struct __pyx_opt_args_7cpython_11contextvars_get_value_no_default { int __pyx_n; PyObject *default_value; }; template struct __pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource; struct __pyx_opt_args_4cuda_8bindings_9_internal_5utils_get_buffer_pointer; /* "cuda/bindings/_internal/utils.pxd":156 * * * cdef cppclass nested_resource[T]: # <<<<<<<<<<<<<< * nullable_unique_ptr[ vector[intptr_t] ] ptrs * nullable_unique_ptr[ vector[vector[T]] ] nested_resource_ptr */ template struct __pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource { nullable_unique_ptr > ptrs; nullable_unique_ptr > > nested_resource_ptr; }; /* "cuda/bindings/_internal/utils.pxd":167 * * cdef bint is_nested_sequence(data) * cdef void* get_buffer_pointer(buf, Py_ssize_t size, readonly=*) except* # <<<<<<<<<<<<<< */ struct __pyx_opt_args_4cuda_8bindings_9_internal_5utils_get_buffer_pointer { int __pyx_n; PyObject *readonly; }; /* #### Code section: utility_code_proto ### */ /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, Py_ssize_t); void (*DECREF)(void*, PyObject*, Py_ssize_t); void (*GOTREF)(void*, PyObject*, Py_ssize_t); void (*GIVEREF)(void*, PyObject*, Py_ssize_t); void* (*SetupContext)(const char*, Py_ssize_t, const char*); void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; #define __Pyx_RefNannySetupContext(name, acquire_gil)\ if (acquire_gil) {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ PyGILState_Release(__pyx_gilstate_save);\ } else {\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ } #define __Pyx_RefNannyFinishContextNogil() {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __Pyx_RefNannyFinishContext();\ PyGILState_Release(__pyx_gilstate_save);\ } #define __Pyx_RefNannyFinishContextNogil() {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __Pyx_RefNannyFinishContext();\ PyGILState_Release(__pyx_gilstate_save);\ } #define __Pyx_RefNannyFinishContext()\ __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContextNogil() #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) #define __Pyx_GOTREF(r) #define __Pyx_GIVEREF(r) #define __Pyx_XINCREF(r) Py_XINCREF(r) #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) #endif #define __Pyx_Py_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; Py_XDECREF(tmp);\ } while (0) #define __Pyx_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_XDECREF(tmp);\ } while (0) #define __Pyx_DECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_DECREF(tmp);\ } while (0) #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) /* PyErrExceptionMatches.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); #else #define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) #endif /* PyThreadStateGet.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; #define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; #if PY_VERSION_HEX >= 0x030C00A6 #define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) #define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) #else #define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) #define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) #endif #else #define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_assign #define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) #define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() #endif /* PyErrFetchRestore.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) #define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) #define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 #define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) #else #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #endif #else #define __Pyx_PyErr_Clear() PyErr_Clear() #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) #endif /* PyObjectGetAttrStr.proto */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); #else #define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) #endif /* PyObjectGetAttrStrNoError.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); /* AssertionsEnabled.proto */ #if CYTHON_COMPILING_IN_LIMITED_API || (CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030C0000) static int __pyx_assertions_enabled_flag; #define __pyx_assertions_enabled() (__pyx_assertions_enabled_flag) static int __Pyx_init_assertions_enabled(void) { PyObject *builtins, *debug, *debug_str; int flag; builtins = PyEval_GetBuiltins(); if (!builtins) goto bad; debug_str = PyUnicode_FromStringAndSize("__debug__", 9); if (!debug_str) goto bad; debug = PyObject_GetItem(builtins, debug_str); Py_DECREF(debug_str); if (!debug) goto bad; flag = PyObject_IsTrue(debug); Py_DECREF(debug); if (flag == -1) goto bad; __pyx_assertions_enabled_flag = flag; return 0; bad: __pyx_assertions_enabled_flag = 1; return -1; } #else #define __Pyx_init_assertions_enabled() (0) #define __pyx_assertions_enabled() (!Py_OptimizeFlag) #endif /* RaiseException.proto */ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /* GetTopmostException.proto */ #if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); #endif /* SaveResetException.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); #else #define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) #define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) #endif /* GetException.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #else static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); #endif /* PyUnicode_Unicode.proto */ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj); /* IncludeStringH.proto */ #include /* JoinPyUnicode.proto */ static PyObject* __Pyx_PyUnicode_Join(PyObject** values, Py_ssize_t value_count, Py_ssize_t result_ulength, Py_UCS4 max_char); /* PyFunctionFastCall.proto */ #if CYTHON_FAST_PYCALL #if !CYTHON_VECTORCALL #define __Pyx_PyFunction_FastCall(func, args, nargs)\ __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject *const *args, Py_ssize_t nargs, PyObject *kwargs); #endif #define __Pyx_BUILD_ASSERT_EXPR(cond)\ (sizeof(char [1 - 2*!(cond)]) - 1) #ifndef Py_MEMBER_SIZE #define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) #endif #if !CYTHON_VECTORCALL #if PY_VERSION_HEX >= 0x03080000 #include "frameobject.h" #define __Pxy_PyFrame_Initialize_Offsets() #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) #else static size_t __pyx_pyframe_localsplus_offset = 0; #include "frameobject.h" #define __Pxy_PyFrame_Initialize_Offsets()\ ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) #define __Pyx_PyFrame_GetLocalsplus(frame)\ (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) #endif #endif #endif /* PyObjectCall.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); #else #define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) #endif /* PyObjectCallMethO.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); #endif /* PyObjectFastCall.proto */ #define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject * const*args, size_t nargs, PyObject *kwargs); /* SwapException.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #else static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); #endif /* GetItemInt.proto */ #define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck, has_gil)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ __Pyx_GetItemInt_Generic(o, to_py_func(i)))) #define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck, has_gil)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, int wraparound, int boundscheck); #define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck, has_gil)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, int wraparound, int boundscheck); static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, int wraparound, int boundscheck); /* RaiseUnexpectedTypeError.proto */ static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); /* TypeImport.proto */ #ifndef __PYX_HAVE_RT_ImportType_proto_3_1_5 #define __PYX_HAVE_RT_ImportType_proto_3_1_5 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L #include #endif #if (defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) || __cplusplus >= 201103L #define __PYX_GET_STRUCT_ALIGNMENT_3_1_5(s) alignof(s) #else #define __PYX_GET_STRUCT_ALIGNMENT_3_1_5(s) sizeof(void*) #endif enum __Pyx_ImportType_CheckSize_3_1_5 { __Pyx_ImportType_CheckSize_Error_3_1_5 = 0, __Pyx_ImportType_CheckSize_Warn_3_1_5 = 1, __Pyx_ImportType_CheckSize_Ignore_3_1_5 = 2 }; static PyTypeObject *__Pyx_ImportType_3_1_5(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_1_5 check_size); #endif /* PyObjectCallOneArg.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); /* Py3UpdateBases.proto */ static PyObject* __Pyx_PEP560_update_bases(PyObject *bases); /* CalculateMetaclass.proto */ static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases); /* PyObjectCall2Args.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); /* PyObjectLookupSpecial.proto */ #if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS #define __Pyx_PyObject_LookupSpecialNoError(obj, attr_name) __Pyx__PyObject_LookupSpecial(obj, attr_name, 0) #define __Pyx_PyObject_LookupSpecial(obj, attr_name) __Pyx__PyObject_LookupSpecial(obj, attr_name, 1) static CYTHON_INLINE PyObject* __Pyx__PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name, int with_error); #else #define __Pyx_PyObject_LookupSpecialNoError(o,n) __Pyx_PyObject_GetAttrStrNoError(o,n) #define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n) #endif /* Py3ClassCreate.proto */ static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc); static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass); /* PyDictVersioning.proto */ #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS #define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) #define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) #define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ (version_var) = __PYX_GET_DICT_VERSION(dict);\ (cache_var) = (value); #define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ static PY_UINT64_T __pyx_dict_version = 0;\ static PyObject *__pyx_dict_cached_value = NULL;\ if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ (VAR) = __pyx_dict_cached_value;\ } else {\ (VAR) = __pyx_dict_cached_value = (LOOKUP);\ __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ }\ } static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); #else #define __PYX_GET_DICT_VERSION(dict) (0) #define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) #define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); #endif /* CLineInTraceback.proto */ #if CYTHON_CLINE_IN_TRACEBACK && CYTHON_CLINE_IN_TRACEBACK_RUNTIME static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); #else #define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) #endif /* CodeObjectCache.proto */ #if CYTHON_COMPILING_IN_LIMITED_API typedef PyObject __Pyx_CachedCodeObjectType; #else typedef PyCodeObject __Pyx_CachedCodeObjectType; #endif typedef struct { __Pyx_CachedCodeObjectType* code_object; int code_line; } __Pyx_CodeObjectCacheEntry; struct __Pyx_CodeObjectCache { int count; int max_count; __Pyx_CodeObjectCacheEntry* entries; #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING __pyx_atomic_int_type accessor_count; #endif }; static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); static __Pyx_CachedCodeObjectType *__pyx_find_code_object(int code_line); static void __pyx_insert_code_object(int code_line, __Pyx_CachedCodeObjectType* code_object); /* AddTraceback.proto */ static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename); /* CppExceptionConversion.proto */ #ifndef __Pyx_CppExn2PyErr #include #include #include #include static void __Pyx_CppExn2PyErr() { try { if (PyErr_Occurred()) ; // let the latest Python exn pass through and ignore the current one else throw; } catch (const std::bad_alloc& exn) { PyErr_SetString(PyExc_MemoryError, exn.what()); } catch (const std::bad_cast& exn) { PyErr_SetString(PyExc_TypeError, exn.what()); } catch (const std::bad_typeid& exn) { PyErr_SetString(PyExc_TypeError, exn.what()); } catch (const std::domain_error& exn) { PyErr_SetString(PyExc_ValueError, exn.what()); } catch (const std::invalid_argument& exn) { PyErr_SetString(PyExc_ValueError, exn.what()); } catch (const std::ios_base::failure& exn) { PyErr_SetString(PyExc_IOError, exn.what()); } catch (const std::out_of_range& exn) { PyErr_SetString(PyExc_IndexError, exn.what()); } catch (const std::overflow_error& exn) { PyErr_SetString(PyExc_OverflowError, exn.what()); } catch (const std::range_error& exn) { PyErr_SetString(PyExc_ArithmeticError, exn.what()); } catch (const std::underflow_error& exn) { PyErr_SetString(PyExc_ArithmeticError, exn.what()); } catch (const std::exception& exn) { PyErr_SetString(PyExc_RuntimeError, exn.what()); } catch (...) { PyErr_SetString(PyExc_RuntimeError, "Unknown exception"); } } #endif /* GCCDiagnostics.proto */ #if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) #define __Pyx_HAS_GCC_DIAGNOSTIC #endif /* LengthHint.proto */ #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_PyObject_LengthHint(o, defaultval) (defaultval) #else #define __Pyx_PyObject_LengthHint(o, defaultval) PyObject_LengthHint(o, defaultval) #endif /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyLong_As_int(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE int32_t __Pyx_PyLong_As_int32_t(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE int64_t __Pyx_PyLong_As_int64_t(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE char __Pyx_PyLong_As_char(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE size_t __Pyx_PyLong_As_size_t(PyObject *); /* FormatTypeName.proto */ #if CYTHON_COMPILING_IN_LIMITED_API typedef PyObject *__Pyx_TypeName; #define __Pyx_FMT_TYPENAME "%U" #define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) #if __PYX_LIMITED_VERSION_HEX >= 0x030d0000 #define __Pyx_PyType_GetFullyQualifiedName PyType_GetFullyQualifiedName #else static __Pyx_TypeName __Pyx_PyType_GetFullyQualifiedName(PyTypeObject* tp); #endif #else // !LIMITED_API typedef const char *__Pyx_TypeName; #define __Pyx_FMT_TYPENAME "%.200s" #define __Pyx_PyType_GetFullyQualifiedName(tp) ((tp)->tp_name) #define __Pyx_DECREF_TypeName(obj) #endif /* PyObjectVectorCallKwBuilder.proto */ CYTHON_UNUSED static int __Pyx_VectorcallBuilder_AddArg_Check(PyObject *key, PyObject *value, PyObject *builder, PyObject **args, int n); #if CYTHON_VECTORCALL #if PY_VERSION_HEX >= 0x03090000 #define __Pyx_Object_Vectorcall_CallFromBuilder PyObject_Vectorcall #else #define __Pyx_Object_Vectorcall_CallFromBuilder _PyObject_Vectorcall #endif #define __Pyx_MakeVectorcallBuilderKwds(n) PyTuple_New(n) static int __Pyx_VectorcallBuilder_AddArg(PyObject *key, PyObject *value, PyObject *builder, PyObject **args, int n); static int __Pyx_VectorcallBuilder_AddArgStr(const char *key, PyObject *value, PyObject *builder, PyObject **args, int n); #else #define __Pyx_Object_Vectorcall_CallFromBuilder __Pyx_PyObject_FastCallDict #define __Pyx_MakeVectorcallBuilderKwds(n) __Pyx_PyDict_NewPresized(n) #define __Pyx_VectorcallBuilder_AddArg(key, value, builder, args, n) PyDict_SetItem(builder, key, value) #define __Pyx_VectorcallBuilder_AddArgStr(key, value, builder, args, n) PyDict_SetItemString(builder, key, value) #endif /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyLong_From_long(long value); /* CIntFromPy.proto */ static CYTHON_INLINE long __Pyx_PyLong_As_long(PyObject *); /* FastTypeChecks.proto */ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) #define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); #else #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) #define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) #define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2) { return PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2); } #endif #define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) #ifdef PyExceptionInstance_Check #define __Pyx_PyBaseException_Check(obj) PyExceptionInstance_Check(obj) #else #define __Pyx_PyBaseException_Check(obj) __Pyx_TypeCheck(obj, PyExc_BaseException) #endif /* GetRuntimeVersion.proto */ static unsigned long __Pyx_get_runtime_version(void); /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); /* FunctionExport.proto */ static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); /* MultiPhaseInitModuleState.proto */ #if CYTHON_PEP489_MULTI_PHASE_INIT && CYTHON_USE_MODULE_STATE static PyObject *__Pyx_State_FindModule(void*); static int __Pyx_State_AddModule(PyObject* module, void*); static int __Pyx_State_RemoveModule(void*); #elif CYTHON_USE_MODULE_STATE #define __Pyx_State_FindModule PyState_FindModule #define __Pyx_State_AddModule PyState_AddModule #define __Pyx_State_RemoveModule PyState_RemoveModule #endif /* #### Code section: module_declarations ### */ /* CythonABIVersion.proto */ #if CYTHON_COMPILING_IN_LIMITED_API #if CYTHON_METH_FASTCALL #define __PYX_FASTCALL_ABI_SUFFIX "_fastcall" #else #define __PYX_FASTCALL_ABI_SUFFIX #endif #define __PYX_LIMITED_ABI_SUFFIX "limited" __PYX_FASTCALL_ABI_SUFFIX __PYX_AM_SEND_ABI_SUFFIX #else #define __PYX_LIMITED_ABI_SUFFIX #endif #if __PYX_HAS_PY_AM_SEND == 1 #define __PYX_AM_SEND_ABI_SUFFIX #elif __PYX_HAS_PY_AM_SEND == 2 #define __PYX_AM_SEND_ABI_SUFFIX "amsendbackport" #else #define __PYX_AM_SEND_ABI_SUFFIX "noamsend" #endif #ifndef __PYX_MONITORING_ABI_SUFFIX #define __PYX_MONITORING_ABI_SUFFIX #endif #if CYTHON_USE_TP_FINALIZE #define __PYX_TP_FINALIZE_ABI_SUFFIX #else #define __PYX_TP_FINALIZE_ABI_SUFFIX "nofinalize" #endif #if CYTHON_USE_FREELISTS || !defined(__Pyx_AsyncGen_USED) #define __PYX_FREELISTS_ABI_SUFFIX #else #define __PYX_FREELISTS_ABI_SUFFIX "nofreelists" #endif #define CYTHON_ABI __PYX_ABI_VERSION __PYX_LIMITED_ABI_SUFFIX __PYX_MONITORING_ABI_SUFFIX __PYX_TP_FINALIZE_ABI_SUFFIX __PYX_FREELISTS_ABI_SUFFIX __PYX_AM_SEND_ABI_SUFFIX #define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI #define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." #if !CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComplexObject *__pyx_v_self); /* proto*/ #endif #if !CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4imag_imag(PyComplexObject *__pyx_v_self); /* proto*/ #endif /* Module declarations from "libc.stdint" */ /* Module declarations from "libcpp.vector" */ /* Module declarations from "libcpp" */ /* Module declarations from "libcpp.memory" */ /* Module declarations from "cpython.version" */ /* Module declarations from "__builtin__" */ /* Module declarations from "cpython.type" */ /* Module declarations from "libc.string" */ /* Module declarations from "libc.stdio" */ /* Module declarations from "cpython.object" */ /* Module declarations from "cpython.ref" */ /* Module declarations from "cpython.exc" */ /* Module declarations from "cpython.module" */ /* Module declarations from "cpython.mem" */ /* Module declarations from "cpython.tuple" */ /* Module declarations from "cpython.list" */ /* Module declarations from "cpython.sequence" */ /* Module declarations from "cpython.mapping" */ /* Module declarations from "cpython.iterator" */ /* Module declarations from "cpython.number" */ /* Module declarations from "__builtin__" */ /* Module declarations from "cpython.bool" */ /* Module declarations from "cpython.long" */ /* Module declarations from "cpython.float" */ /* Module declarations from "cython" */ /* Module declarations from "__builtin__" */ /* Module declarations from "cpython.complex" */ /* Module declarations from "libc.stddef" */ /* Module declarations from "cpython.unicode" */ /* Module declarations from "cpython.pyport" */ /* Module declarations from "cpython.dict" */ /* Module declarations from "cpython.instance" */ /* Module declarations from "cpython.function" */ /* Module declarations from "cpython.method" */ /* Module declarations from "cpython.weakref" */ /* Module declarations from "cpython.getargs" */ /* Module declarations from "cpython.pythread" */ /* Module declarations from "cpython.pystate" */ /* Module declarations from "cpython.set" */ /* Module declarations from "cpython.buffer" */ /* Module declarations from "cpython.bytes" */ /* Module declarations from "cpython.pycapsule" */ /* Module declarations from "cpython.contextvars" */ /* Module declarations from "cpython" */ /* Module declarations from "libcpp.utility" */ /* Module declarations from "cuda.bindings._internal.utils" */ static int __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(PyObject *); /*proto*/ static int __pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &, PyObject *, int *); /*proto*/ static int __pyx_fuse_1__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &, PyObject *, int32_t *); /*proto*/ static int __pyx_fuse_2__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &, PyObject *, int64_t *); /*proto*/ static int __pyx_fuse_3__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &, PyObject *, char *); /*proto*/ static int __pyx_fuse_4__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &, PyObject *, float *); /*proto*/ static int __pyx_fuse_5__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &, PyObject *, double *); /*proto*/ static int __pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptrs(nullable_unique_ptr > &, PyObject *, void *); /*proto*/ static int __pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, int *); /*proto*/ static int __pyx_fuse_1__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, int32_t *); /*proto*/ static int __pyx_fuse_2__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, int64_t *); /*proto*/ static int __pyx_fuse_3__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, char *); /*proto*/ static int __pyx_fuse_4__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, float *); /*proto*/ static int __pyx_fuse_5__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, double *); /*proto*/ static std::vector __pyx_convert_vector_from_py_int(PyObject *); /*proto*/ static std::vector __pyx_convert_vector_from_py_int32_t(PyObject *); /*proto*/ static std::vector __pyx_convert_vector_from_py_int64_t(PyObject *); /*proto*/ static std::vector __pyx_convert_vector_from_py_float(PyObject *); /*proto*/ static std::vector __pyx_convert_vector_from_py_double(PyObject *); /*proto*/ /* #### Code section: typeinfo ### */ /* #### Code section: before_global_var ### */ #define __Pyx_MODULE_NAME "cuda.bindings._internal.utils" extern int __pyx_module_is_main_cuda__bindings___internal__utils; int __pyx_module_is_main_cuda__bindings___internal__utils = 0; /* Implementation of "cuda.bindings._internal.utils" */ /* #### Code section: global_var ### */ static PyObject *__pyx_builtin_RuntimeError; static PyObject *__pyx_builtin_AssertionError; static PyObject *__pyx_builtin_ValueError; static PyObject *__pyx_builtin_range; static PyObject *__pyx_builtin_enumerate; /* #### Code section: string_decls ### */ static const char __pyx_k_[] = ""; static const char __pyx_k__2[] = "?"; static const char __pyx_k_doc[] = "__doc__"; static const char __pyx_k_None[] = "None"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_name[] = "__name__"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_range[] = "range"; static const char __pyx_k_module[] = "__module__"; static const char __pyx_k_prepare[] = "__prepare__"; static const char __pyx_k_qualname[] = "__qualname__"; static const char __pyx_k_writable[] = "writable "; static const char __pyx_k_enumerate[] = "enumerate"; static const char __pyx_k_metaclass[] = "__metaclass__"; static const char __pyx_k_ValueError[] = "ValueError"; static const char __pyx_k_mro_entries[] = "__mro_entries__"; static const char __pyx_k_RuntimeError[] = "RuntimeError"; static const char __pyx_k_AssertionError[] = "AssertionError"; static const char __pyx_k_NotSupportedError[] = "NotSupportedError"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_buffer_of_size_bytes[] = "buffer, of size bytes"; static const char __pyx_k_FunctionNotFoundError[] = "FunctionNotFoundError"; static const char __pyx_k_cuda_bindings__internal_utils[] = "cuda.bindings._internal.utils"; static const char __pyx_k_buf_must_be_either_a_Python_int[] = "buf must be either a Python int representing the pointer address to a valid buffer, or a 1D contiguous "; /* #### Code section: decls ### */ /* #### Code section: late_includes ### */ /* #### Code section: module_state ### */ /* SmallCodeConfig */ #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE #elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) #define CYTHON_SMALL_CODE __attribute__((cold)) #else #define CYTHON_SMALL_CODE #endif #endif typedef struct { PyObject *__pyx_d; PyObject *__pyx_b; PyObject *__pyx_cython_runtime; PyObject *__pyx_empty_tuple; PyObject *__pyx_empty_bytes; PyObject *__pyx_empty_unicode; #ifdef __Pyx_CyFunction_USED PyTypeObject *__pyx_CyFunctionType; #endif #ifdef __Pyx_FusedFunction_USED PyTypeObject *__pyx_FusedFunctionType; #endif #ifdef __Pyx_Generator_USED PyTypeObject *__pyx_GeneratorType; #endif #ifdef __Pyx_IterableCoroutine_USED PyTypeObject *__pyx_IterableCoroutineType; #endif #ifdef __Pyx_Coroutine_USED PyTypeObject *__pyx_CoroutineAwaitType; #endif #ifdef __Pyx_Coroutine_USED PyTypeObject *__pyx_CoroutineType; #endif PyTypeObject *__pyx_ptype_7cpython_4type_type; PyTypeObject *__pyx_ptype_7cpython_4bool_bool; PyTypeObject *__pyx_ptype_7cpython_7complex_complex; PyObject *__pyx_tuple[4]; PyObject *__pyx_string_tab[24]; /* #### Code section: module_state_contents ### */ /* CodeObjectCache.module_state_decls */ struct __Pyx_CodeObjectCache __pyx_code_cache; /* #### Code section: module_state_end ### */ } __pyx_mstatetype; #if CYTHON_USE_MODULE_STATE #ifdef __cplusplus namespace { extern struct PyModuleDef __pyx_moduledef; } /* anonymous namespace */ #else static struct PyModuleDef __pyx_moduledef; #endif #define __pyx_mstate_global (__Pyx_PyModule_GetState(__Pyx_State_FindModule(&__pyx_moduledef))) #define __pyx_m (__Pyx_State_FindModule(&__pyx_moduledef)) #else static __pyx_mstatetype __pyx_mstate_global_static = #ifdef __cplusplus {}; #else {0}; #endif static __pyx_mstatetype * const __pyx_mstate_global = &__pyx_mstate_global_static; #endif /* #### Code section: constant_name_defines ### */ #define __pyx_kp_u_ __pyx_string_tab[0] #define __pyx_n_u_AssertionError __pyx_string_tab[1] #define __pyx_n_u_FunctionNotFoundError __pyx_string_tab[2] #define __pyx_kp_u_None __pyx_string_tab[3] #define __pyx_n_u_NotSupportedError __pyx_string_tab[4] #define __pyx_n_u_RuntimeError __pyx_string_tab[5] #define __pyx_n_u_ValueError __pyx_string_tab[6] #define __pyx_kp_u__2 __pyx_string_tab[7] #define __pyx_kp_u_buf_must_be_either_a_Python_int __pyx_string_tab[8] #define __pyx_kp_u_buffer_of_size_bytes __pyx_string_tab[9] #define __pyx_n_u_cline_in_traceback __pyx_string_tab[10] #define __pyx_n_u_cuda_bindings__internal_utils __pyx_string_tab[11] #define __pyx_n_u_doc __pyx_string_tab[12] #define __pyx_n_u_enumerate __pyx_string_tab[13] #define __pyx_n_u_main __pyx_string_tab[14] #define __pyx_n_u_metaclass __pyx_string_tab[15] #define __pyx_n_u_module __pyx_string_tab[16] #define __pyx_n_u_mro_entries __pyx_string_tab[17] #define __pyx_n_u_name __pyx_string_tab[18] #define __pyx_n_u_prepare __pyx_string_tab[19] #define __pyx_n_u_qualname __pyx_string_tab[20] #define __pyx_n_u_range __pyx_string_tab[21] #define __pyx_n_u_test __pyx_string_tab[22] #define __pyx_kp_u_writable __pyx_string_tab[23] /* #### Code section: module_state_clear ### */ #if CYTHON_USE_MODULE_STATE static CYTHON_SMALL_CODE int __pyx_m_clear(PyObject *m) { __pyx_mstatetype *clear_module_state = __Pyx_PyModule_GetState(m); if (!clear_module_state) return 0; Py_CLEAR(clear_module_state->__pyx_d); Py_CLEAR(clear_module_state->__pyx_b); Py_CLEAR(clear_module_state->__pyx_cython_runtime); Py_CLEAR(clear_module_state->__pyx_empty_tuple); Py_CLEAR(clear_module_state->__pyx_empty_bytes); Py_CLEAR(clear_module_state->__pyx_empty_unicode); #ifdef __Pyx_CyFunction_USED Py_CLEAR(clear_module_state->__pyx_CyFunctionType); #endif #ifdef __Pyx_FusedFunction_USED Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); #endif #if CYTHON_PEP489_MULTI_PHASE_INIT __Pyx_State_RemoveModule(NULL); #endif Py_CLEAR(clear_module_state->__pyx_ptype_7cpython_4type_type); Py_CLEAR(clear_module_state->__pyx_ptype_7cpython_4bool_bool); Py_CLEAR(clear_module_state->__pyx_ptype_7cpython_7complex_complex); for (int i=0; i<4; ++i) { Py_CLEAR(clear_module_state->__pyx_tuple[i]); } for (int i=0; i<24; ++i) { Py_CLEAR(clear_module_state->__pyx_string_tab[i]); } return 0; } #endif /* #### Code section: module_state_traverse ### */ #if CYTHON_USE_MODULE_STATE static CYTHON_SMALL_CODE int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { __pyx_mstatetype *traverse_module_state = __Pyx_PyModule_GetState(m); if (!traverse_module_state) return 0; Py_VISIT(traverse_module_state->__pyx_d); Py_VISIT(traverse_module_state->__pyx_b); Py_VISIT(traverse_module_state->__pyx_cython_runtime); __Pyx_VISIT_CONST(traverse_module_state->__pyx_empty_tuple); __Pyx_VISIT_CONST(traverse_module_state->__pyx_empty_bytes); __Pyx_VISIT_CONST(traverse_module_state->__pyx_empty_unicode); #ifdef __Pyx_CyFunction_USED Py_VISIT(traverse_module_state->__pyx_CyFunctionType); #endif #ifdef __Pyx_FusedFunction_USED Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); #endif Py_VISIT(traverse_module_state->__pyx_ptype_7cpython_4type_type); Py_VISIT(traverse_module_state->__pyx_ptype_7cpython_4bool_bool); Py_VISIT(traverse_module_state->__pyx_ptype_7cpython_7complex_complex); for (int i=0; i<4; ++i) { __Pyx_VISIT_CONST(traverse_module_state->__pyx_tuple[i]); } for (int i=0; i<24; ++i) { __Pyx_VISIT_CONST(traverse_module_state->__pyx_string_tab[i]); } return 0; } #endif /* #### Code section: module_code ### */ /* "vector.from_py":51 * cdef Py_ssize_t __Pyx_PyObject_LengthHint(object o, Py_ssize_t defaultval) except -1 * * @cname("__pyx_convert_vector_from_py_int") # <<<<<<<<<<<<<< * cdef vector[X] __pyx_convert_vector_from_py_int(object o) except *: * */ static std::vector __pyx_convert_vector_from_py_int(PyObject *__pyx_v_o) { std::vector __pyx_v_v; Py_ssize_t __pyx_v_s; PyObject *__pyx_v_item = NULL; std::vector __pyx_r; __Pyx_RefNannyDeclarations Py_ssize_t __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *(*__pyx_t_4)(PyObject *); PyObject *__pyx_t_5 = NULL; int __pyx_t_6; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_convert_vector_from_py_int", 0); /* "vector.from_py":55 * * cdef vector[X] v * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) # <<<<<<<<<<<<<< * * if s > 0: */ __pyx_t_1 = __Pyx_PyObject_LengthHint(__pyx_v_o, 0); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(1, 55, __pyx_L1_error) __pyx_v_s = __pyx_t_1; /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ __pyx_t_2 = (__pyx_v_s > 0); if (__pyx_t_2) { /* "vector.from_py":58 * * if s > 0: * v.reserve( s) # <<<<<<<<<<<<<< * * for item in o: */ try { __pyx_v_v.reserve(((size_t)__pyx_v_s)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 58, __pyx_L1_error) } /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { __pyx_t_3 = __pyx_v_o; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; __pyx_t_4 = NULL; } else { __pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 60, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 60, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_4)) { if (likely(PyList_CheckExact(__pyx_t_3))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } __pyx_t_5 = __Pyx_PyList_GetItemRef(__pyx_t_3, __pyx_t_1); ++__pyx_t_1; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_5 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1)); #else __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_1); #endif ++__pyx_t_1; } if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 60, __pyx_L1_error) } else { __pyx_t_5 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_5)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(1, 60, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_5); __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_5); __pyx_t_5 = 0; /* "vector.from_py":61 * * for item in o: * v.push_back(item) # <<<<<<<<<<<<<< * * return v */ __pyx_t_6 = __Pyx_PyLong_As_int(__pyx_v_item); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 61, __pyx_L1_error) try { __pyx_v_v.push_back(((int)__pyx_t_6)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 61, __pyx_L1_error) } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "vector.from_py":63 * v.push_back(item) * * return v # <<<<<<<<<<<<<< * * */ __pyx_r = __pyx_v_v; goto __pyx_L0; /* "vector.from_py":51 * cdef Py_ssize_t __Pyx_PyObject_LengthHint(object o, Py_ssize_t defaultval) except -1 * * @cname("__pyx_convert_vector_from_py_int") # <<<<<<<<<<<<<< * cdef vector[X] __pyx_convert_vector_from_py_int(object o) except *: * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("vector.from_py.__pyx_convert_vector_from_py_int", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_pretend_to_initialize(&__pyx_r); __pyx_L0:; __Pyx_XDECREF(__pyx_v_item); __Pyx_RefNannyFinishContext(); return __pyx_r; } static std::vector __pyx_convert_vector_from_py_int32_t(PyObject *__pyx_v_o) { std::vector __pyx_v_v; Py_ssize_t __pyx_v_s; PyObject *__pyx_v_item = NULL; std::vector __pyx_r; __Pyx_RefNannyDeclarations Py_ssize_t __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *(*__pyx_t_4)(PyObject *); PyObject *__pyx_t_5 = NULL; int32_t __pyx_t_6; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_convert_vector_from_py_int32_t", 0); /* "vector.from_py":55 * * cdef vector[X] v * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) # <<<<<<<<<<<<<< * * if s > 0: */ __pyx_t_1 = __Pyx_PyObject_LengthHint(__pyx_v_o, 0); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(1, 55, __pyx_L1_error) __pyx_v_s = __pyx_t_1; /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ __pyx_t_2 = (__pyx_v_s > 0); if (__pyx_t_2) { /* "vector.from_py":58 * * if s > 0: * v.reserve( s) # <<<<<<<<<<<<<< * * for item in o: */ try { __pyx_v_v.reserve(((size_t)__pyx_v_s)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 58, __pyx_L1_error) } /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { __pyx_t_3 = __pyx_v_o; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; __pyx_t_4 = NULL; } else { __pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 60, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 60, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_4)) { if (likely(PyList_CheckExact(__pyx_t_3))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } __pyx_t_5 = __Pyx_PyList_GetItemRef(__pyx_t_3, __pyx_t_1); ++__pyx_t_1; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_5 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1)); #else __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_1); #endif ++__pyx_t_1; } if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 60, __pyx_L1_error) } else { __pyx_t_5 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_5)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(1, 60, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_5); __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_5); __pyx_t_5 = 0; /* "vector.from_py":61 * * for item in o: * v.push_back(item) # <<<<<<<<<<<<<< * * return v */ __pyx_t_6 = __Pyx_PyLong_As_int32_t(__pyx_v_item); if (unlikely((__pyx_t_6 == ((int32_t)-1)) && PyErr_Occurred())) __PYX_ERR(1, 61, __pyx_L1_error) try { __pyx_v_v.push_back(((int32_t)__pyx_t_6)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 61, __pyx_L1_error) } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "vector.from_py":63 * v.push_back(item) * * return v # <<<<<<<<<<<<<< * * */ __pyx_r = __pyx_v_v; goto __pyx_L0; /* "vector.from_py":51 * cdef Py_ssize_t __Pyx_PyObject_LengthHint(object o, Py_ssize_t defaultval) except -1 * * @cname("__pyx_convert_vector_from_py_int32_t") # <<<<<<<<<<<<<< * cdef vector[X] __pyx_convert_vector_from_py_int32_t(object o) except *: * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("vector.from_py.__pyx_convert_vector_from_py_int32_t", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_pretend_to_initialize(&__pyx_r); __pyx_L0:; __Pyx_XDECREF(__pyx_v_item); __Pyx_RefNannyFinishContext(); return __pyx_r; } static std::vector __pyx_convert_vector_from_py_int64_t(PyObject *__pyx_v_o) { std::vector __pyx_v_v; Py_ssize_t __pyx_v_s; PyObject *__pyx_v_item = NULL; std::vector __pyx_r; __Pyx_RefNannyDeclarations Py_ssize_t __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *(*__pyx_t_4)(PyObject *); PyObject *__pyx_t_5 = NULL; int64_t __pyx_t_6; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_convert_vector_from_py_int64_t", 0); /* "vector.from_py":55 * * cdef vector[X] v * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) # <<<<<<<<<<<<<< * * if s > 0: */ __pyx_t_1 = __Pyx_PyObject_LengthHint(__pyx_v_o, 0); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(1, 55, __pyx_L1_error) __pyx_v_s = __pyx_t_1; /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ __pyx_t_2 = (__pyx_v_s > 0); if (__pyx_t_2) { /* "vector.from_py":58 * * if s > 0: * v.reserve( s) # <<<<<<<<<<<<<< * * for item in o: */ try { __pyx_v_v.reserve(((size_t)__pyx_v_s)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 58, __pyx_L1_error) } /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { __pyx_t_3 = __pyx_v_o; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; __pyx_t_4 = NULL; } else { __pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 60, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 60, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_4)) { if (likely(PyList_CheckExact(__pyx_t_3))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } __pyx_t_5 = __Pyx_PyList_GetItemRef(__pyx_t_3, __pyx_t_1); ++__pyx_t_1; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_5 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1)); #else __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_1); #endif ++__pyx_t_1; } if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 60, __pyx_L1_error) } else { __pyx_t_5 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_5)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(1, 60, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_5); __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_5); __pyx_t_5 = 0; /* "vector.from_py":61 * * for item in o: * v.push_back(item) # <<<<<<<<<<<<<< * * return v */ __pyx_t_6 = __Pyx_PyLong_As_int64_t(__pyx_v_item); if (unlikely((__pyx_t_6 == ((int64_t)-1)) && PyErr_Occurred())) __PYX_ERR(1, 61, __pyx_L1_error) try { __pyx_v_v.push_back(((int64_t)__pyx_t_6)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 61, __pyx_L1_error) } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "vector.from_py":63 * v.push_back(item) * * return v # <<<<<<<<<<<<<< * * */ __pyx_r = __pyx_v_v; goto __pyx_L0; /* "vector.from_py":51 * cdef Py_ssize_t __Pyx_PyObject_LengthHint(object o, Py_ssize_t defaultval) except -1 * * @cname("__pyx_convert_vector_from_py_int64_t") # <<<<<<<<<<<<<< * cdef vector[X] __pyx_convert_vector_from_py_int64_t(object o) except *: * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("vector.from_py.__pyx_convert_vector_from_py_int64_t", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_pretend_to_initialize(&__pyx_r); __pyx_L0:; __Pyx_XDECREF(__pyx_v_item); __Pyx_RefNannyFinishContext(); return __pyx_r; } static std::vector __pyx_convert_vector_from_py_float(PyObject *__pyx_v_o) { std::vector __pyx_v_v; Py_ssize_t __pyx_v_s; PyObject *__pyx_v_item = NULL; std::vector __pyx_r; __Pyx_RefNannyDeclarations Py_ssize_t __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *(*__pyx_t_4)(PyObject *); PyObject *__pyx_t_5 = NULL; float __pyx_t_6; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_convert_vector_from_py_float", 0); /* "vector.from_py":55 * * cdef vector[X] v * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) # <<<<<<<<<<<<<< * * if s > 0: */ __pyx_t_1 = __Pyx_PyObject_LengthHint(__pyx_v_o, 0); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(1, 55, __pyx_L1_error) __pyx_v_s = __pyx_t_1; /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ __pyx_t_2 = (__pyx_v_s > 0); if (__pyx_t_2) { /* "vector.from_py":58 * * if s > 0: * v.reserve( s) # <<<<<<<<<<<<<< * * for item in o: */ try { __pyx_v_v.reserve(((size_t)__pyx_v_s)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 58, __pyx_L1_error) } /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { __pyx_t_3 = __pyx_v_o; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; __pyx_t_4 = NULL; } else { __pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 60, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 60, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_4)) { if (likely(PyList_CheckExact(__pyx_t_3))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } __pyx_t_5 = __Pyx_PyList_GetItemRef(__pyx_t_3, __pyx_t_1); ++__pyx_t_1; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_5 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1)); #else __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_1); #endif ++__pyx_t_1; } if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 60, __pyx_L1_error) } else { __pyx_t_5 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_5)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(1, 60, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_5); __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_5); __pyx_t_5 = 0; /* "vector.from_py":61 * * for item in o: * v.push_back(item) # <<<<<<<<<<<<<< * * return v */ __pyx_t_6 = __Pyx_PyFloat_AsFloat(__pyx_v_item); if (unlikely((__pyx_t_6 == (float)-1) && PyErr_Occurred())) __PYX_ERR(1, 61, __pyx_L1_error) try { __pyx_v_v.push_back(((float)__pyx_t_6)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 61, __pyx_L1_error) } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "vector.from_py":63 * v.push_back(item) * * return v # <<<<<<<<<<<<<< * * */ __pyx_r = __pyx_v_v; goto __pyx_L0; /* "vector.from_py":51 * cdef Py_ssize_t __Pyx_PyObject_LengthHint(object o, Py_ssize_t defaultval) except -1 * * @cname("__pyx_convert_vector_from_py_float") # <<<<<<<<<<<<<< * cdef vector[X] __pyx_convert_vector_from_py_float(object o) except *: * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("vector.from_py.__pyx_convert_vector_from_py_float", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_pretend_to_initialize(&__pyx_r); __pyx_L0:; __Pyx_XDECREF(__pyx_v_item); __Pyx_RefNannyFinishContext(); return __pyx_r; } static std::vector __pyx_convert_vector_from_py_double(PyObject *__pyx_v_o) { std::vector __pyx_v_v; Py_ssize_t __pyx_v_s; PyObject *__pyx_v_item = NULL; std::vector __pyx_r; __Pyx_RefNannyDeclarations Py_ssize_t __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *(*__pyx_t_4)(PyObject *); PyObject *__pyx_t_5 = NULL; double __pyx_t_6; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_convert_vector_from_py_double", 0); /* "vector.from_py":55 * * cdef vector[X] v * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) # <<<<<<<<<<<<<< * * if s > 0: */ __pyx_t_1 = __Pyx_PyObject_LengthHint(__pyx_v_o, 0); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(1, 55, __pyx_L1_error) __pyx_v_s = __pyx_t_1; /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ __pyx_t_2 = (__pyx_v_s > 0); if (__pyx_t_2) { /* "vector.from_py":58 * * if s > 0: * v.reserve( s) # <<<<<<<<<<<<<< * * for item in o: */ try { __pyx_v_v.reserve(((size_t)__pyx_v_s)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 58, __pyx_L1_error) } /* "vector.from_py":57 * cdef Py_ssize_t s = __Pyx_PyObject_LengthHint(o, 0) * * if s > 0: # <<<<<<<<<<<<<< * v.reserve( s) * */ } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { __pyx_t_3 = __pyx_v_o; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; __pyx_t_4 = NULL; } else { __pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 60, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 60, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_4)) { if (likely(PyList_CheckExact(__pyx_t_3))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } __pyx_t_5 = __Pyx_PyList_GetItemRef(__pyx_t_3, __pyx_t_1); ++__pyx_t_1; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(1, 60, __pyx_L1_error) #endif if (__pyx_t_1 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_5 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1)); #else __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_1); #endif ++__pyx_t_1; } if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 60, __pyx_L1_error) } else { __pyx_t_5 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_5)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(1, 60, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_5); __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_5); __pyx_t_5 = 0; /* "vector.from_py":61 * * for item in o: * v.push_back(item) # <<<<<<<<<<<<<< * * return v */ __pyx_t_6 = __Pyx_PyFloat_AsDouble(__pyx_v_item); if (unlikely((__pyx_t_6 == (double)-1) && PyErr_Occurred())) __PYX_ERR(1, 61, __pyx_L1_error) try { __pyx_v_v.push_back(((double)__pyx_t_6)); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(1, 61, __pyx_L1_error) } /* "vector.from_py":60 * v.reserve( s) * * for item in o: # <<<<<<<<<<<<<< * v.push_back(item) * */ } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "vector.from_py":63 * v.push_back(item) * * return v # <<<<<<<<<<<<<< * * */ __pyx_r = __pyx_v_v; goto __pyx_L0; /* "vector.from_py":51 * cdef Py_ssize_t __Pyx_PyObject_LengthHint(object o, Py_ssize_t defaultval) except -1 * * @cname("__pyx_convert_vector_from_py_double") # <<<<<<<<<<<<<< * cdef vector[X] __pyx_convert_vector_from_py_double(object o) except *: * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("vector.from_py.__pyx_convert_vector_from_py_double", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_pretend_to_initialize(&__pyx_r); __pyx_L0:; __Pyx_XDECREF(__pyx_v_item); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "cpython/complex.pxd":20 * * # unavailable in limited API * @property # <<<<<<<<<<<<<< * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline double real(self) noexcept: */ #if !CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComplexObject *__pyx_v_self) { double __pyx_r; /* "cpython/complex.pxd":23 * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline double real(self) noexcept: * return self.cval.real # <<<<<<<<<<<<<< * * # unavailable in limited API */ __pyx_r = __pyx_v_self->cval.real; goto __pyx_L0; /* "cpython/complex.pxd":20 * * # unavailable in limited API * @property # <<<<<<<<<<<<<< * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline double real(self) noexcept: */ /* function exit code */ __pyx_L0:; return __pyx_r; } #endif /*!(#if !CYTHON_COMPILING_IN_LIMITED_API)*/ /* "cpython/complex.pxd":26 * * # unavailable in limited API * @property # <<<<<<<<<<<<<< * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline double imag(self) noexcept: */ #if !CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4imag_imag(PyComplexObject *__pyx_v_self) { double __pyx_r; /* "cpython/complex.pxd":29 * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline double imag(self) noexcept: * return self.cval.imag # <<<<<<<<<<<<<< * * # PyTypeObject PyComplex_Type */ __pyx_r = __pyx_v_self->cval.imag; goto __pyx_L0; /* "cpython/complex.pxd":26 * * # unavailable in limited API * @property # <<<<<<<<<<<<<< * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline double imag(self) noexcept: */ /* function exit code */ __pyx_L0:; return __pyx_r; } #endif /*!(#if !CYTHON_COMPILING_IN_LIMITED_API)*/ /* "cpython/contextvars.pxd":115 * * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") # <<<<<<<<<<<<<< * cdef inline object get_value(var, default_value=None): * """Return a new reference to the value of the context variable, */ #if !CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE PyObject *__pyx_f_7cpython_11contextvars_get_value(PyObject *__pyx_v_var, struct __pyx_opt_args_7cpython_11contextvars_get_value *__pyx_optional_args) { /* "cpython/contextvars.pxd":116 * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline object get_value(var, default_value=None): # <<<<<<<<<<<<<< * """Return a new reference to the value of the context variable, * or the default value of the context variable, */ PyObject *__pyx_v_default_value = ((PyObject *)Py_None); PyObject *__pyx_v_value; PyObject *__pyx_v_pyvalue = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("get_value", 0); if (__pyx_optional_args) { if (__pyx_optional_args->__pyx_n > 0) { __pyx_v_default_value = __pyx_optional_args->default_value; } } /* "cpython/contextvars.pxd":121 * or None if no such value or default was found. * """ * cdef PyObject *value = NULL # <<<<<<<<<<<<<< * PyContextVar_Get(var, NULL, &value) * if value is NULL: */ __pyx_v_value = NULL; /* "cpython/contextvars.pxd":122 * """ * cdef PyObject *value = NULL * PyContextVar_Get(var, NULL, &value) # <<<<<<<<<<<<<< * if value is NULL: * # context variable does not have a default */ __pyx_t_1 = PyContextVar_Get(__pyx_v_var, NULL, (&__pyx_v_value)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(2, 122, __pyx_L1_error) /* "cpython/contextvars.pxd":123 * cdef PyObject *value = NULL * PyContextVar_Get(var, NULL, &value) * if value is NULL: # <<<<<<<<<<<<<< * # context variable does not have a default * pyvalue = default_value */ __pyx_t_2 = (__pyx_v_value == NULL); if (__pyx_t_2) { /* "cpython/contextvars.pxd":125 * if value is NULL: * # context variable does not have a default * pyvalue = default_value # <<<<<<<<<<<<<< * else: * # value or default value of context variable */ __Pyx_INCREF(__pyx_v_default_value); __pyx_v_pyvalue = __pyx_v_default_value; /* "cpython/contextvars.pxd":123 * cdef PyObject *value = NULL * PyContextVar_Get(var, NULL, &value) * if value is NULL: # <<<<<<<<<<<<<< * # context variable does not have a default * pyvalue = default_value */ goto __pyx_L3; } /* "cpython/contextvars.pxd":128 * else: * # value or default value of context variable * pyvalue = value # <<<<<<<<<<<<<< * Py_XDECREF(value) # PyContextVar_Get() returned an owned reference as 'PyObject*' * return pyvalue */ /*else*/ { __pyx_t_3 = ((PyObject *)__pyx_v_value); __Pyx_INCREF(__pyx_t_3); __pyx_v_pyvalue = __pyx_t_3; __pyx_t_3 = 0; /* "cpython/contextvars.pxd":129 * # value or default value of context variable * pyvalue = value * Py_XDECREF(value) # PyContextVar_Get() returned an owned reference as 'PyObject*' # <<<<<<<<<<<<<< * return pyvalue * */ Py_XDECREF(__pyx_v_value); } __pyx_L3:; /* "cpython/contextvars.pxd":130 * pyvalue = value * Py_XDECREF(value) # PyContextVar_Get() returned an owned reference as 'PyObject*' * return pyvalue # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_pyvalue); __pyx_r = __pyx_v_pyvalue; goto __pyx_L0; /* "cpython/contextvars.pxd":115 * * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") # <<<<<<<<<<<<<< * cdef inline object get_value(var, default_value=None): * """Return a new reference to the value of the context variable, */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("cpython.contextvars.get_value", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF(__pyx_v_pyvalue); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } #endif /*!(#if !CYTHON_COMPILING_IN_LIMITED_API)*/ /* "cpython/contextvars.pxd":133 * * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") # <<<<<<<<<<<<<< * cdef inline object get_value_no_default(var, default_value=None): * """Return a new reference to the value of the context variable, */ #if !CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE PyObject *__pyx_f_7cpython_11contextvars_get_value_no_default(PyObject *__pyx_v_var, struct __pyx_opt_args_7cpython_11contextvars_get_value_no_default *__pyx_optional_args) { /* "cpython/contextvars.pxd":134 * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") * cdef inline object get_value_no_default(var, default_value=None): # <<<<<<<<<<<<<< * """Return a new reference to the value of the context variable, * or the provided default value if no such value was found. */ PyObject *__pyx_v_default_value = ((PyObject *)Py_None); PyObject *__pyx_v_value; PyObject *__pyx_v_pyvalue = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; PyObject *__pyx_t_2 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("get_value_no_default", 0); if (__pyx_optional_args) { if (__pyx_optional_args->__pyx_n > 0) { __pyx_v_default_value = __pyx_optional_args->default_value; } } /* "cpython/contextvars.pxd":140 * Ignores the default value of the context variable, if any. * """ * cdef PyObject *value = NULL # <<<<<<<<<<<<<< * PyContextVar_Get(var, default_value, &value) * # value of context variable or 'default_value' */ __pyx_v_value = NULL; /* "cpython/contextvars.pxd":141 * """ * cdef PyObject *value = NULL * PyContextVar_Get(var, default_value, &value) # <<<<<<<<<<<<<< * # value of context variable or 'default_value' * pyvalue = value */ __pyx_t_1 = PyContextVar_Get(__pyx_v_var, ((PyObject *)__pyx_v_default_value), (&__pyx_v_value)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(2, 141, __pyx_L1_error) /* "cpython/contextvars.pxd":143 * PyContextVar_Get(var, default_value, &value) * # value of context variable or 'default_value' * pyvalue = value # <<<<<<<<<<<<<< * Py_XDECREF(value) # PyContextVar_Get() returned an owned reference as 'PyObject*' * return pyvalue */ __pyx_t_2 = ((PyObject *)__pyx_v_value); __Pyx_INCREF(__pyx_t_2); __pyx_v_pyvalue = __pyx_t_2; __pyx_t_2 = 0; /* "cpython/contextvars.pxd":144 * # value of context variable or 'default_value' * pyvalue = value * Py_XDECREF(value) # PyContextVar_Get() returned an owned reference as 'PyObject*' # <<<<<<<<<<<<<< * return pyvalue */ Py_XDECREF(__pyx_v_value); /* "cpython/contextvars.pxd":145 * pyvalue = value * Py_XDECREF(value) # PyContextVar_Get() returned an owned reference as 'PyObject*' * return pyvalue # <<<<<<<<<<<<<< */ __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_pyvalue); __pyx_r = __pyx_v_pyvalue; goto __pyx_L0; /* "cpython/contextvars.pxd":133 * * * @_cython.c_compile_guard("!CYTHON_COMPILING_IN_LIMITED_API") # <<<<<<<<<<<<<< * cdef inline object get_value_no_default(var, default_value=None): * """Return a new reference to the value of the context variable, */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("cpython.contextvars.get_value_no_default", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF(__pyx_v_pyvalue); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } #endif /*!(#if !CYTHON_COMPILING_IN_LIMITED_API)*/ /* "cuda/bindings/_internal/utils.pyx":11 * * * cdef bint is_nested_sequence(data): # <<<<<<<<<<<<<< * if not cpython.PySequence_Check(data): * return False */ static int __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(PyObject *__pyx_v_data) { PyObject *__pyx_v_i = NULL; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; PyObject *__pyx_t_2 = NULL; Py_ssize_t __pyx_t_3; PyObject *(*__pyx_t_4)(PyObject *); PyObject *__pyx_t_5 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("is_nested_sequence", 0); /* "cuda/bindings/_internal/utils.pyx":12 * * cdef bint is_nested_sequence(data): * if not cpython.PySequence_Check(data): # <<<<<<<<<<<<<< * return False * else: */ __pyx_t_1 = (!PySequence_Check(__pyx_v_data)); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":13 * cdef bint is_nested_sequence(data): * if not cpython.PySequence_Check(data): * return False # <<<<<<<<<<<<<< * else: * for i in data: */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":12 * * cdef bint is_nested_sequence(data): * if not cpython.PySequence_Check(data): # <<<<<<<<<<<<<< * return False * else: */ } /* "cuda/bindings/_internal/utils.pyx":15 * return False * else: * for i in data: # <<<<<<<<<<<<<< * if not cpython.PySequence_Check(i): * return False */ /*else*/ { if (likely(PyList_CheckExact(__pyx_v_data)) || PyTuple_CheckExact(__pyx_v_data)) { __pyx_t_2 = __pyx_v_data; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0; __pyx_t_4 = NULL; } else { __pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_data); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_4 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 15, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_4)) { if (likely(PyList_CheckExact(__pyx_t_2))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 15, __pyx_L1_error) #endif if (__pyx_t_3 >= __pyx_temp) break; } __pyx_t_5 = __Pyx_PyList_GetItemRef(__pyx_t_2, __pyx_t_3); ++__pyx_t_3; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 15, __pyx_L1_error) #endif if (__pyx_t_3 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_5 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3)); #else __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_3); #endif ++__pyx_t_3; } if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 15, __pyx_L1_error) } else { __pyx_t_5 = __pyx_t_4(__pyx_t_2); if (unlikely(!__pyx_t_5)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 15, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_5); __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_5); __pyx_t_5 = 0; /* "cuda/bindings/_internal/utils.pyx":16 * else: * for i in data: * if not cpython.PySequence_Check(i): # <<<<<<<<<<<<<< * return False * else: */ __pyx_t_1 = (!PySequence_Check(__pyx_v_i)); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":17 * for i in data: * if not cpython.PySequence_Check(i): * return False # <<<<<<<<<<<<<< * else: * return True */ __pyx_r = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":16 * else: * for i in data: * if not cpython.PySequence_Check(i): # <<<<<<<<<<<<<< * return False * else: */ } /* "cuda/bindings/_internal/utils.pyx":15 * return False * else: * for i in data: # <<<<<<<<<<<<<< * if not cpython.PySequence_Check(i): * return False */ } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /*else*/ { /* "cuda/bindings/_internal/utils.pyx":19 * return False * else: * return True # <<<<<<<<<<<<<< * * */ __pyx_r = 1; goto __pyx_L0; } } /* "cuda/bindings/_internal/utils.pyx":11 * * * cdef bint is_nested_sequence(data): # <<<<<<<<<<<<<< * if not cpython.PySequence_Check(data): * return False */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("cuda.bindings._internal.utils.is_nested_sequence", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_i); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "cuda/bindings/_internal/utils.pyx":22 * * * cdef void* get_buffer_pointer(buf, Py_ssize_t size, readonly=True) except*: # <<<<<<<<<<<<<< * """The caller must ensure ``buf`` is alive when the returned pointer is in use.""" * cdef void* bufPtr */ static void *__pyx_f_4cuda_8bindings_9_internal_5utils_get_buffer_pointer(PyObject *__pyx_v_buf, Py_ssize_t __pyx_v_size, struct __pyx_opt_args_4cuda_8bindings_9_internal_5utils_get_buffer_pointer *__pyx_optional_args) { PyObject *__pyx_v_readonly = ((PyObject *)Py_True); void *__pyx_v_bufPtr; int __pyx_v_flags; int __pyx_v_status; Py_buffer __pyx_v_view; PyObject *__pyx_v_e = NULL; PyObject *__pyx_v_adj = NULL; void *__pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; intptr_t __pyx_t_3; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; int __pyx_t_7; void *__pyx_t_8; PyObject *__pyx_t_9 = NULL; PyObject *__pyx_t_10 = NULL; PyObject *__pyx_t_11 = NULL; PyObject *__pyx_t_12 = NULL; PyObject *__pyx_t_13 = NULL; PyObject *__pyx_t_14 = NULL; PyObject *__pyx_t_15 = NULL; PyObject *__pyx_t_16[3]; PyObject *__pyx_t_17 = NULL; size_t __pyx_t_18; int __pyx_t_19; char const *__pyx_t_20; PyObject *__pyx_t_21 = NULL; PyObject *__pyx_t_22 = NULL; PyObject *__pyx_t_23 = NULL; PyObject *__pyx_t_24 = NULL; PyObject *__pyx_t_25 = NULL; PyObject *__pyx_t_26 = NULL; char const *__pyx_t_27; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("get_buffer_pointer", 0); if (__pyx_optional_args) { if (__pyx_optional_args->__pyx_n > 0) { __pyx_v_readonly = __pyx_optional_args->readonly; } } /* "cuda/bindings/_internal/utils.pyx":25 * """The caller must ensure ``buf`` is alive when the returned pointer is in use.""" * cdef void* bufPtr * cdef int flags = cpython.PyBUF_ANY_CONTIGUOUS # <<<<<<<<<<<<<< * if not readonly: * flags |= cpython.PyBUF_WRITABLE */ __pyx_v_flags = PyBUF_ANY_CONTIGUOUS; /* "cuda/bindings/_internal/utils.pyx":26 * cdef void* bufPtr * cdef int flags = cpython.PyBUF_ANY_CONTIGUOUS * if not readonly: # <<<<<<<<<<<<<< * flags |= cpython.PyBUF_WRITABLE * cdef int status = -1 */ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_readonly); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 26, __pyx_L1_error) __pyx_t_2 = (!__pyx_t_1); if (__pyx_t_2) { /* "cuda/bindings/_internal/utils.pyx":27 * cdef int flags = cpython.PyBUF_ANY_CONTIGUOUS * if not readonly: * flags |= cpython.PyBUF_WRITABLE # <<<<<<<<<<<<<< * cdef int status = -1 * cdef cpython.Py_buffer view */ __pyx_v_flags = (__pyx_v_flags | PyBUF_WRITABLE); /* "cuda/bindings/_internal/utils.pyx":26 * cdef void* bufPtr * cdef int flags = cpython.PyBUF_ANY_CONTIGUOUS * if not readonly: # <<<<<<<<<<<<<< * flags |= cpython.PyBUF_WRITABLE * cdef int status = -1 */ } /* "cuda/bindings/_internal/utils.pyx":28 * if not readonly: * flags |= cpython.PyBUF_WRITABLE * cdef int status = -1 # <<<<<<<<<<<<<< * cdef cpython.Py_buffer view * */ __pyx_v_status = -1; /* "cuda/bindings/_internal/utils.pyx":31 * cdef cpython.Py_buffer view * * if isinstance(buf, int): # <<<<<<<<<<<<<< * bufPtr = buf * else: # try buffer protocol */ __pyx_t_2 = PyLong_Check(__pyx_v_buf); if (__pyx_t_2) { /* "cuda/bindings/_internal/utils.pyx":32 * * if isinstance(buf, int): * bufPtr = buf # <<<<<<<<<<<<<< * else: # try buffer protocol * try: */ __pyx_t_3 = PyLong_AsSsize_t(__pyx_v_buf); if (unlikely((__pyx_t_3 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 32, __pyx_L1_error) __pyx_v_bufPtr = ((void *)((intptr_t)__pyx_t_3)); /* "cuda/bindings/_internal/utils.pyx":31 * cdef cpython.Py_buffer view * * if isinstance(buf, int): # <<<<<<<<<<<<<< * bufPtr = buf * else: # try buffer protocol */ goto __pyx_L4; } /* "cuda/bindings/_internal/utils.pyx":34 * bufPtr = buf * else: # try buffer protocol * try: # <<<<<<<<<<<<<< * status = cpython.PyObject_GetBuffer(buf, &view, flags) * # when the caller does not provide a size, it is set to -1 at generate-time by cybind */ /*else*/ { /*try:*/ { { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_4, &__pyx_t_5, &__pyx_t_6); __Pyx_XGOTREF(__pyx_t_4); __Pyx_XGOTREF(__pyx_t_5); __Pyx_XGOTREF(__pyx_t_6); /*try:*/ { /* "cuda/bindings/_internal/utils.pyx":35 * else: # try buffer protocol * try: * status = cpython.PyObject_GetBuffer(buf, &view, flags) # <<<<<<<<<<<<<< * # when the caller does not provide a size, it is set to -1 at generate-time by cybind * if size != -1: */ __pyx_t_7 = PyObject_GetBuffer(__pyx_v_buf, (&__pyx_v_view), __pyx_v_flags); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 35, __pyx_L8_error) __pyx_v_status = __pyx_t_7; /* "cuda/bindings/_internal/utils.pyx":37 * status = cpython.PyObject_GetBuffer(buf, &view, flags) * # when the caller does not provide a size, it is set to -1 at generate-time by cybind * if size != -1: # <<<<<<<<<<<<<< * assert view.len == size * assert view.ndim == 1 */ __pyx_t_2 = (__pyx_v_size != -1L); if (__pyx_t_2) { /* "cuda/bindings/_internal/utils.pyx":38 * # when the caller does not provide a size, it is set to -1 at generate-time by cybind * if size != -1: * assert view.len == size # <<<<<<<<<<<<<< * assert view.ndim == 1 * except Exception as e: */ #ifndef CYTHON_WITHOUT_ASSERTIONS if (unlikely(__pyx_assertions_enabled())) { __pyx_t_2 = (__pyx_v_view.len == __pyx_v_size); if (unlikely(!__pyx_t_2)) { __Pyx_Raise(__pyx_builtin_AssertionError, 0, 0, 0); __PYX_ERR(0, 38, __pyx_L8_error) } } #else if ((1)); else __PYX_ERR(0, 38, __pyx_L8_error) #endif /* "cuda/bindings/_internal/utils.pyx":37 * status = cpython.PyObject_GetBuffer(buf, &view, flags) * # when the caller does not provide a size, it is set to -1 at generate-time by cybind * if size != -1: # <<<<<<<<<<<<<< * assert view.len == size * assert view.ndim == 1 */ } /* "cuda/bindings/_internal/utils.pyx":39 * if size != -1: * assert view.len == size * assert view.ndim == 1 # <<<<<<<<<<<<<< * except Exception as e: * adj = "writable " if not readonly else "" */ #ifndef CYTHON_WITHOUT_ASSERTIONS if (unlikely(__pyx_assertions_enabled())) { __pyx_t_2 = (__pyx_v_view.ndim == 1); if (unlikely(!__pyx_t_2)) { __Pyx_Raise(__pyx_builtin_AssertionError, 0, 0, 0); __PYX_ERR(0, 39, __pyx_L8_error) } } #else if ((1)); else __PYX_ERR(0, 39, __pyx_L8_error) #endif /* "cuda/bindings/_internal/utils.pyx":34 * bufPtr = buf * else: # try buffer protocol * try: # <<<<<<<<<<<<<< * status = cpython.PyObject_GetBuffer(buf, &view, flags) * # when the caller does not provide a size, it is set to -1 at generate-time by cybind */ } /* "cuda/bindings/_internal/utils.pyx":47 * "buffer, of size bytes") from e * else: * bufPtr = view.buf # <<<<<<<<<<<<<< * finally: * if status == 0: */ /*else:*/ { __pyx_t_8 = __pyx_v_view.buf; __pyx_v_bufPtr = __pyx_t_8; } __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L13_try_end; __pyx_L8_error:; /* "cuda/bindings/_internal/utils.pyx":40 * assert view.len == size * assert view.ndim == 1 * except Exception as e: # <<<<<<<<<<<<<< * adj = "writable " if not readonly else "" * raise ValueError( */ __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(((PyTypeObject*)PyExc_Exception)))); if (__pyx_t_7) { __Pyx_AddTraceback("cuda.bindings._internal.utils.get_buffer_pointer", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11) < 0) __PYX_ERR(0, 40, __pyx_L10_except_error) __Pyx_XGOTREF(__pyx_t_9); __Pyx_XGOTREF(__pyx_t_10); __Pyx_XGOTREF(__pyx_t_11); __Pyx_INCREF(__pyx_t_10); __pyx_v_e = __pyx_t_10; /*try:*/ { /* "cuda/bindings/_internal/utils.pyx":41 * assert view.ndim == 1 * except Exception as e: * adj = "writable " if not readonly else "" # <<<<<<<<<<<<<< * raise ValueError( * "buf must be either a Python int representing the pointer " */ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_readonly); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(0, 41, __pyx_L20_error) __pyx_t_1 = (!__pyx_t_2); if (__pyx_t_1) { __Pyx_INCREF(__pyx_mstate_global->__pyx_kp_u_writable); __pyx_t_12 = __pyx_mstate_global->__pyx_kp_u_writable; } else { __Pyx_INCREF(__pyx_mstate_global->__pyx_kp_u_); __pyx_t_12 = __pyx_mstate_global->__pyx_kp_u_; } __pyx_v_adj = ((PyObject*)__pyx_t_12); __pyx_t_12 = 0; /* "cuda/bindings/_internal/utils.pyx":42 * except Exception as e: * adj = "writable " if not readonly else "" * raise ValueError( # <<<<<<<<<<<<<< * "buf must be either a Python int representing the pointer " * f"address to a valid buffer, or a 1D contiguous {adj}" */ __pyx_t_13 = NULL; __Pyx_INCREF(__pyx_builtin_ValueError); __pyx_t_14 = __pyx_builtin_ValueError; /* "cuda/bindings/_internal/utils.pyx":44 * raise ValueError( * "buf must be either a Python int representing the pointer " * f"address to a valid buffer, or a 1D contiguous {adj}" # <<<<<<<<<<<<<< * "buffer, of size bytes") from e * else: */ __pyx_t_15 = __Pyx_PyUnicode_Unicode(__pyx_v_adj); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 44, __pyx_L20_error) __Pyx_GOTREF(__pyx_t_15); __pyx_t_16[0] = __pyx_mstate_global->__pyx_kp_u_buf_must_be_either_a_Python_int; __pyx_t_16[1] = __pyx_t_15; __pyx_t_16[2] = __pyx_mstate_global->__pyx_kp_u_buffer_of_size_bytes; /* "cuda/bindings/_internal/utils.pyx":43 * adj = "writable " if not readonly else "" * raise ValueError( * "buf must be either a Python int representing the pointer " # <<<<<<<<<<<<<< * f"address to a valid buffer, or a 1D contiguous {adj}" * "buffer, of size bytes") from e */ __pyx_t_17 = __Pyx_PyUnicode_Join(__pyx_t_16, 3, 103 + __Pyx_PyUnicode_GET_LENGTH(__pyx_t_15) + 21, 127 | __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_15)); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 43, __pyx_L20_error) __Pyx_GOTREF(__pyx_t_17); __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; __pyx_t_18 = 1; { PyObject *__pyx_callargs[2] = {__pyx_t_13, __pyx_t_17}; __pyx_t_12 = __Pyx_PyObject_FastCall(__pyx_t_14, __pyx_callargs+__pyx_t_18, (2-__pyx_t_18) | (__pyx_t_18*__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)); __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 42, __pyx_L20_error) __Pyx_GOTREF(__pyx_t_12); } /* "cuda/bindings/_internal/utils.pyx":45 * "buf must be either a Python int representing the pointer " * f"address to a valid buffer, or a 1D contiguous {adj}" * "buffer, of size bytes") from e # <<<<<<<<<<<<<< * else: * bufPtr = view.buf */ __Pyx_Raise(__pyx_t_12, 0, 0, __pyx_v_e); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; __PYX_ERR(0, 42, __pyx_L20_error) } /* "cuda/bindings/_internal/utils.pyx":40 * assert view.len == size * assert view.ndim == 1 * except Exception as e: # <<<<<<<<<<<<<< * adj = "writable " if not readonly else "" * raise ValueError( */ /*finally:*/ { __pyx_L20_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; __Pyx_ExceptionSwap(&__pyx_t_24, &__pyx_t_25, &__pyx_t_26); if ( unlikely(__Pyx_GetException(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23) < 0)) __Pyx_ErrFetch(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23); __Pyx_XGOTREF(__pyx_t_21); __Pyx_XGOTREF(__pyx_t_22); __Pyx_XGOTREF(__pyx_t_23); __Pyx_XGOTREF(__pyx_t_24); __Pyx_XGOTREF(__pyx_t_25); __Pyx_XGOTREF(__pyx_t_26); __pyx_t_7 = __pyx_lineno; __pyx_t_19 = __pyx_clineno; __pyx_t_20 = __pyx_filename; { __Pyx_DECREF(__pyx_v_e); __pyx_v_e = 0; } __Pyx_XGIVEREF(__pyx_t_24); __Pyx_XGIVEREF(__pyx_t_25); __Pyx_XGIVEREF(__pyx_t_26); __Pyx_ExceptionReset(__pyx_t_24, __pyx_t_25, __pyx_t_26); __Pyx_XGIVEREF(__pyx_t_21); __Pyx_XGIVEREF(__pyx_t_22); __Pyx_XGIVEREF(__pyx_t_23); __Pyx_ErrRestore(__pyx_t_21, __pyx_t_22, __pyx_t_23); __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; __pyx_lineno = __pyx_t_7; __pyx_clineno = __pyx_t_19; __pyx_filename = __pyx_t_20; goto __pyx_L10_except_error; } } } goto __pyx_L10_except_error; /* "cuda/bindings/_internal/utils.pyx":34 * bufPtr = buf * else: # try buffer protocol * try: # <<<<<<<<<<<<<< * status = cpython.PyObject_GetBuffer(buf, &view, flags) * # when the caller does not provide a size, it is set to -1 at generate-time by cybind */ __pyx_L10_except_error:; __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); goto __pyx_L6_error; __pyx_L13_try_end:; } } /* "cuda/bindings/_internal/utils.pyx":49 * bufPtr = view.buf * finally: * if status == 0: # <<<<<<<<<<<<<< * cpython.PyBuffer_Release(&view) * */ /*finally:*/ { /*normal exit:*/{ __pyx_t_1 = (__pyx_v_status == 0); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":50 * finally: * if status == 0: * cpython.PyBuffer_Release(&view) # <<<<<<<<<<<<<< * * return bufPtr */ PyBuffer_Release((&__pyx_v_view)); /* "cuda/bindings/_internal/utils.pyx":49 * bufPtr = view.buf * finally: * if status == 0: # <<<<<<<<<<<<<< * cpython.PyBuffer_Release(&view) * */ } goto __pyx_L7; } __pyx_L6_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_6 = 0; __pyx_t_5 = 0; __pyx_t_4 = 0; __pyx_t_26 = 0; __pyx_t_25 = 0; __pyx_t_24 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_ExceptionSwap(&__pyx_t_26, &__pyx_t_25, &__pyx_t_24); if ( unlikely(__Pyx_GetException(&__pyx_t_6, &__pyx_t_5, &__pyx_t_4) < 0)) __Pyx_ErrFetch(&__pyx_t_6, &__pyx_t_5, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_6); __Pyx_XGOTREF(__pyx_t_5); __Pyx_XGOTREF(__pyx_t_4); __Pyx_XGOTREF(__pyx_t_26); __Pyx_XGOTREF(__pyx_t_25); __Pyx_XGOTREF(__pyx_t_24); __pyx_t_19 = __pyx_lineno; __pyx_t_7 = __pyx_clineno; __pyx_t_27 = __pyx_filename; { __pyx_t_1 = (__pyx_v_status == 0); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":50 * finally: * if status == 0: * cpython.PyBuffer_Release(&view) # <<<<<<<<<<<<<< * * return bufPtr */ PyBuffer_Release((&__pyx_v_view)); /* "cuda/bindings/_internal/utils.pyx":49 * bufPtr = view.buf * finally: * if status == 0: # <<<<<<<<<<<<<< * cpython.PyBuffer_Release(&view) * */ } } __Pyx_XGIVEREF(__pyx_t_26); __Pyx_XGIVEREF(__pyx_t_25); __Pyx_XGIVEREF(__pyx_t_24); __Pyx_ExceptionReset(__pyx_t_26, __pyx_t_25, __pyx_t_24); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ErrRestore(__pyx_t_6, __pyx_t_5, __pyx_t_4); __pyx_t_6 = 0; __pyx_t_5 = 0; __pyx_t_4 = 0; __pyx_t_26 = 0; __pyx_t_25 = 0; __pyx_t_24 = 0; __pyx_lineno = __pyx_t_19; __pyx_clineno = __pyx_t_7; __pyx_filename = __pyx_t_27; goto __pyx_L1_error; } __pyx_L7:; } } __pyx_L4:; /* "cuda/bindings/_internal/utils.pyx":52 * cpython.PyBuffer_Release(&view) * * return bufPtr # <<<<<<<<<<<<<< * * */ __pyx_r = __pyx_v_bufPtr; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":22 * * * cdef void* get_buffer_pointer(buf, Py_ssize_t size, readonly=True) except*: # <<<<<<<<<<<<<< * """The caller must ensure ``buf`` is alive when the returned pointer is in use.""" * cdef void* bufPtr */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_13); __Pyx_XDECREF(__pyx_t_14); __Pyx_XDECREF(__pyx_t_15); __Pyx_XDECREF(__pyx_t_17); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_buffer_pointer", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF(__pyx_v_e); __Pyx_XDECREF(__pyx_v_adj); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "cuda/bindings/_internal/utils.pyx":57 * # Cython can't infer the ResT overload when it is wrapped in nullable_unique_ptr, * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) */ static int __pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED int *__pyx_v___unused) { std::vector *__pyx_v_vec; Py_ssize_t __pyx_v_i; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector *__pyx_t_3; Py_ssize_t __pyx_t_4; Py_ssize_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; int __pyx_t_7; intptr_t __pyx_t_8; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_0get_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":59 * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 59, __pyx_L1_error) try { __pyx_t_3 = new std::vector (__pyx_t_2); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 59, __pyx_L1_error) } __pyx_v_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":62 * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop * in_out_ptr.reset(vec, True) # <<<<<<<<<<<<<< * for i in range(len(obj)): * deref(vec)[i] = obj[i] */ __pyx_v_in_out_ptr.reset(__pyx_v_vec, 1); /* "cuda/bindings/_internal/utils.pyx":63 * # case of exception in the following loop * in_out_ptr.reset(vec, True) * for i in range(len(obj)): # <<<<<<<<<<<<<< * deref(vec)[i] = obj[i] * else: */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_t_4 = __pyx_t_2; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_i = __pyx_t_5; /* "cuda/bindings/_internal/utils.pyx":64 * in_out_ptr.reset(vec, True) * for i in range(len(obj)): * deref(vec)[i] = obj[i] # <<<<<<<<<<<<<< * else: * in_out_ptr.reset(obj, False) */ __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_obj, __pyx_v_i, Py_ssize_t, 1, PyLong_FromSsize_t, 0, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = __Pyx_PyLong_As_int(__pyx_t_6); if (unlikely((__pyx_t_7 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; ((*__pyx_v_vec)[__pyx_v_i]) = __pyx_t_7; } /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":66 * deref(vec)[i] = obj[i] * else: * in_out_ptr.reset(obj, False) # <<<<<<<<<<<<<< * return 0 * */ /*else*/ { __pyx_t_8 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_8 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 66, __pyx_L1_error) __pyx_v_in_out_ptr.reset(((std::vector *)((intptr_t)__pyx_t_8)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":67 * else: * in_out_ptr.reset(obj, False) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":57 * # Cython can't infer the ResT overload when it is wrapped in nullable_unique_ptr, * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_1__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED int32_t *__pyx_v___unused) { std::vector *__pyx_v_vec; Py_ssize_t __pyx_v_i; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector *__pyx_t_3; Py_ssize_t __pyx_t_4; Py_ssize_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; int32_t __pyx_t_7; intptr_t __pyx_t_8; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_1get_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":59 * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 59, __pyx_L1_error) try { __pyx_t_3 = new std::vector (__pyx_t_2); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 59, __pyx_L1_error) } __pyx_v_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":62 * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop * in_out_ptr.reset(vec, True) # <<<<<<<<<<<<<< * for i in range(len(obj)): * deref(vec)[i] = obj[i] */ __pyx_v_in_out_ptr.reset(__pyx_v_vec, 1); /* "cuda/bindings/_internal/utils.pyx":63 * # case of exception in the following loop * in_out_ptr.reset(vec, True) * for i in range(len(obj)): # <<<<<<<<<<<<<< * deref(vec)[i] = obj[i] * else: */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_t_4 = __pyx_t_2; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_i = __pyx_t_5; /* "cuda/bindings/_internal/utils.pyx":64 * in_out_ptr.reset(vec, True) * for i in range(len(obj)): * deref(vec)[i] = obj[i] # <<<<<<<<<<<<<< * else: * in_out_ptr.reset(obj, False) */ __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_obj, __pyx_v_i, Py_ssize_t, 1, PyLong_FromSsize_t, 0, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = __Pyx_PyLong_As_int32_t(__pyx_t_6); if (unlikely((__pyx_t_7 == ((int32_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; ((*__pyx_v_vec)[__pyx_v_i]) = __pyx_t_7; } /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":66 * deref(vec)[i] = obj[i] * else: * in_out_ptr.reset(obj, False) # <<<<<<<<<<<<<< * return 0 * */ /*else*/ { __pyx_t_8 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_8 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 66, __pyx_L1_error) __pyx_v_in_out_ptr.reset(((std::vector *)((intptr_t)__pyx_t_8)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":67 * else: * in_out_ptr.reset(obj, False) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":57 * # Cython can't infer the ResT overload when it is wrapped in nullable_unique_ptr, * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_2__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED int64_t *__pyx_v___unused) { std::vector *__pyx_v_vec; Py_ssize_t __pyx_v_i; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector *__pyx_t_3; Py_ssize_t __pyx_t_4; Py_ssize_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; int64_t __pyx_t_7; intptr_t __pyx_t_8; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_2get_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":59 * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 59, __pyx_L1_error) try { __pyx_t_3 = new std::vector (__pyx_t_2); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 59, __pyx_L1_error) } __pyx_v_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":62 * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop * in_out_ptr.reset(vec, True) # <<<<<<<<<<<<<< * for i in range(len(obj)): * deref(vec)[i] = obj[i] */ __pyx_v_in_out_ptr.reset(__pyx_v_vec, 1); /* "cuda/bindings/_internal/utils.pyx":63 * # case of exception in the following loop * in_out_ptr.reset(vec, True) * for i in range(len(obj)): # <<<<<<<<<<<<<< * deref(vec)[i] = obj[i] * else: */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_t_4 = __pyx_t_2; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_i = __pyx_t_5; /* "cuda/bindings/_internal/utils.pyx":64 * in_out_ptr.reset(vec, True) * for i in range(len(obj)): * deref(vec)[i] = obj[i] # <<<<<<<<<<<<<< * else: * in_out_ptr.reset(obj, False) */ __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_obj, __pyx_v_i, Py_ssize_t, 1, PyLong_FromSsize_t, 0, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = __Pyx_PyLong_As_int64_t(__pyx_t_6); if (unlikely((__pyx_t_7 == ((int64_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; ((*__pyx_v_vec)[__pyx_v_i]) = __pyx_t_7; } /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":66 * deref(vec)[i] = obj[i] * else: * in_out_ptr.reset(obj, False) # <<<<<<<<<<<<<< * return 0 * */ /*else*/ { __pyx_t_8 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_8 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 66, __pyx_L1_error) __pyx_v_in_out_ptr.reset(((std::vector *)((intptr_t)__pyx_t_8)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":67 * else: * in_out_ptr.reset(obj, False) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":57 * # Cython can't infer the ResT overload when it is wrapped in nullable_unique_ptr, * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_3__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED char *__pyx_v___unused) { std::vector *__pyx_v_vec; Py_ssize_t __pyx_v_i; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector *__pyx_t_3; Py_ssize_t __pyx_t_4; Py_ssize_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; char __pyx_t_7; intptr_t __pyx_t_8; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_3get_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":59 * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 59, __pyx_L1_error) try { __pyx_t_3 = new std::vector (__pyx_t_2); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 59, __pyx_L1_error) } __pyx_v_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":62 * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop * in_out_ptr.reset(vec, True) # <<<<<<<<<<<<<< * for i in range(len(obj)): * deref(vec)[i] = obj[i] */ __pyx_v_in_out_ptr.reset(__pyx_v_vec, 1); /* "cuda/bindings/_internal/utils.pyx":63 * # case of exception in the following loop * in_out_ptr.reset(vec, True) * for i in range(len(obj)): # <<<<<<<<<<<<<< * deref(vec)[i] = obj[i] * else: */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_t_4 = __pyx_t_2; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_i = __pyx_t_5; /* "cuda/bindings/_internal/utils.pyx":64 * in_out_ptr.reset(vec, True) * for i in range(len(obj)): * deref(vec)[i] = obj[i] # <<<<<<<<<<<<<< * else: * in_out_ptr.reset(obj, False) */ __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_obj, __pyx_v_i, Py_ssize_t, 1, PyLong_FromSsize_t, 0, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = __Pyx_PyLong_As_char(__pyx_t_6); if (unlikely((__pyx_t_7 == (char)-1) && PyErr_Occurred())) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; ((*__pyx_v_vec)[__pyx_v_i]) = __pyx_t_7; } /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":66 * deref(vec)[i] = obj[i] * else: * in_out_ptr.reset(obj, False) # <<<<<<<<<<<<<< * return 0 * */ /*else*/ { __pyx_t_8 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_8 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 66, __pyx_L1_error) __pyx_v_in_out_ptr.reset(((std::vector *)((intptr_t)__pyx_t_8)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":67 * else: * in_out_ptr.reset(obj, False) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":57 * # Cython can't infer the ResT overload when it is wrapped in nullable_unique_ptr, * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_4__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED float *__pyx_v___unused) { std::vector *__pyx_v_vec; Py_ssize_t __pyx_v_i; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector *__pyx_t_3; Py_ssize_t __pyx_t_4; Py_ssize_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; float __pyx_t_7; intptr_t __pyx_t_8; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_4get_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":59 * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 59, __pyx_L1_error) try { __pyx_t_3 = new std::vector (__pyx_t_2); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 59, __pyx_L1_error) } __pyx_v_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":62 * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop * in_out_ptr.reset(vec, True) # <<<<<<<<<<<<<< * for i in range(len(obj)): * deref(vec)[i] = obj[i] */ __pyx_v_in_out_ptr.reset(__pyx_v_vec, 1); /* "cuda/bindings/_internal/utils.pyx":63 * # case of exception in the following loop * in_out_ptr.reset(vec, True) * for i in range(len(obj)): # <<<<<<<<<<<<<< * deref(vec)[i] = obj[i] * else: */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_t_4 = __pyx_t_2; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_i = __pyx_t_5; /* "cuda/bindings/_internal/utils.pyx":64 * in_out_ptr.reset(vec, True) * for i in range(len(obj)): * deref(vec)[i] = obj[i] # <<<<<<<<<<<<<< * else: * in_out_ptr.reset(obj, False) */ __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_obj, __pyx_v_i, Py_ssize_t, 1, PyLong_FromSsize_t, 0, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = __Pyx_PyFloat_AsFloat(__pyx_t_6); if (unlikely((__pyx_t_7 == (float)-1) && PyErr_Occurred())) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; ((*__pyx_v_vec)[__pyx_v_i]) = __pyx_t_7; } /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":66 * deref(vec)[i] = obj[i] * else: * in_out_ptr.reset(obj, False) # <<<<<<<<<<<<<< * return 0 * */ /*else*/ { __pyx_t_8 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_8 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 66, __pyx_L1_error) __pyx_v_in_out_ptr.reset(((std::vector *)((intptr_t)__pyx_t_8)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":67 * else: * in_out_ptr.reset(obj, False) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":57 * # Cython can't infer the ResT overload when it is wrapped in nullable_unique_ptr, * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_5__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr(nullable_unique_ptr > &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED double *__pyx_v___unused) { std::vector *__pyx_v_vec; Py_ssize_t __pyx_v_i; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector *__pyx_t_3; Py_ssize_t __pyx_t_4; Py_ssize_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; double __pyx_t_7; intptr_t __pyx_t_8; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_5get_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":59 * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 59, __pyx_L1_error) try { __pyx_t_3 = new std::vector (__pyx_t_2); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 59, __pyx_L1_error) } __pyx_v_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":62 * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop * in_out_ptr.reset(vec, True) # <<<<<<<<<<<<<< * for i in range(len(obj)): * deref(vec)[i] = obj[i] */ __pyx_v_in_out_ptr.reset(__pyx_v_vec, 1); /* "cuda/bindings/_internal/utils.pyx":63 * # case of exception in the following loop * in_out_ptr.reset(vec, True) * for i in range(len(obj)): # <<<<<<<<<<<<<< * deref(vec)[i] = obj[i] * else: */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_t_4 = __pyx_t_2; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_i = __pyx_t_5; /* "cuda/bindings/_internal/utils.pyx":64 * in_out_ptr.reset(vec, True) * for i in range(len(obj)): * deref(vec)[i] = obj[i] # <<<<<<<<<<<<<< * else: * in_out_ptr.reset(obj, False) */ __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_obj, __pyx_v_i, Py_ssize_t, 1, PyLong_FromSsize_t, 0, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = __Pyx_PyFloat_AsDouble(__pyx_t_6); if (unlikely((__pyx_t_7 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 64, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; ((*__pyx_v_vec)[__pyx_v_i]) = __pyx_t_7; } /* "cuda/bindings/_internal/utils.pyx":58 * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[ResT](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":66 * deref(vec)[i] = obj[i] * else: * in_out_ptr.reset(obj, False) # <<<<<<<<<<<<<< * return 0 * */ /*else*/ { __pyx_t_8 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_8 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 66, __pyx_L1_error) __pyx_v_in_out_ptr.reset(((std::vector *)((intptr_t)__pyx_t_8)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":67 * else: * in_out_ptr.reset(obj, False) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":57 * # Cython can't infer the ResT overload when it is wrapped in nullable_unique_ptr, * # so we need a dummy (__unused) input argument to help it * cdef int get_resource_ptr(nullable_unique_ptr[vector[ResT]] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[ResT](len(obj)) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "cuda/bindings/_internal/utils.pyx":70 * * * cdef int get_resource_ptrs(nullable_unique_ptr[ vector[PtrT*] ] &in_out_ptr, object obj, PtrT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[PtrT*](len(obj)) */ static int __pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptrs(nullable_unique_ptr > &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED void *__pyx_v___unused) { std::vector *__pyx_v_vec; Py_ssize_t __pyx_v_i; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector *__pyx_t_3; Py_ssize_t __pyx_t_4; Py_ssize_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; intptr_t __pyx_t_7; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_0get_resource_ptrs", 0); /* "cuda/bindings/_internal/utils.pyx":71 * * cdef int get_resource_ptrs(nullable_unique_ptr[ vector[PtrT*] ] &in_out_ptr, object obj, PtrT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[PtrT*](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":72 * cdef int get_resource_ptrs(nullable_unique_ptr[ vector[PtrT*] ] &in_out_ptr, object obj, PtrT* __unused) except 1: * if cpython.PySequence_Check(obj): * vec = new vector[PtrT*](len(obj)) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 72, __pyx_L1_error) try { __pyx_t_3 = new std::vector (__pyx_t_2); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 72, __pyx_L1_error) } __pyx_v_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":75 * # set the ownership immediately to avoid leaking the `vec` memory in * # case of exception in the following loop * in_out_ptr.reset(vec, True) # <<<<<<<<<<<<<< * for i in range(len(obj)): * deref(vec)[i] = (obj[i]) */ __pyx_v_in_out_ptr.reset(__pyx_v_vec, 1); /* "cuda/bindings/_internal/utils.pyx":76 * # case of exception in the following loop * in_out_ptr.reset(vec, True) * for i in range(len(obj)): # <<<<<<<<<<<<<< * deref(vec)[i] = (obj[i]) * else: */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 76, __pyx_L1_error) __pyx_t_4 = __pyx_t_2; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_i = __pyx_t_5; /* "cuda/bindings/_internal/utils.pyx":77 * in_out_ptr.reset(vec, True) * for i in range(len(obj)): * deref(vec)[i] = (obj[i]) # <<<<<<<<<<<<<< * else: * in_out_ptr.reset(obj, False) */ __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_obj, __pyx_v_i, Py_ssize_t, 1, PyLong_FromSsize_t, 0, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 77, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = PyLong_AsSsize_t(__pyx_t_6); if (unlikely((__pyx_t_7 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 77, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; ((*__pyx_v_vec)[__pyx_v_i]) = ((void *)((intptr_t)__pyx_t_7)); } /* "cuda/bindings/_internal/utils.pyx":71 * * cdef int get_resource_ptrs(nullable_unique_ptr[ vector[PtrT*] ] &in_out_ptr, object obj, PtrT* __unused) except 1: * if cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * vec = new vector[PtrT*](len(obj)) * # set the ownership immediately to avoid leaking the `vec` memory in */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":79 * deref(vec)[i] = (obj[i]) * else: * in_out_ptr.reset(obj, False) # <<<<<<<<<<<<<< * return 0 * */ /*else*/ { __pyx_t_7 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_7 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 79, __pyx_L1_error) __pyx_v_in_out_ptr.reset(((std::vector *)((intptr_t)__pyx_t_7)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":80 * else: * in_out_ptr.reset(obj, False) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":70 * * * cdef int get_resource_ptrs(nullable_unique_ptr[ vector[PtrT*] ] &in_out_ptr, object obj, PtrT* __unused) except 1: # <<<<<<<<<<<<<< * if cpython.PySequence_Check(obj): * vec = new vector[PtrT*](len(obj)) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_resource_ptrs", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "cuda/bindings/_internal/utils.pyx":83 * * * cdef int get_nested_resource_ptr(nested_resource[ResT] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr */ static int __pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED int *__pyx_v___unused) { nullable_unique_ptr > __pyx_v_nested_ptr; nullable_unique_ptr > > __pyx_v_nested_res_ptr; std::vector *__pyx_v_nested_vec; std::vector > *__pyx_v_nested_res_vec; size_t __pyx_v_i; size_t __pyx_v_length; intptr_t __pyx_v_addr; PyObject *__pyx_v_obj_i = NULL; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector > *__pyx_t_3; std::vector *__pyx_t_4; size_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *(*__pyx_t_7)(PyObject *); PyObject *__pyx_t_8 = NULL; std::vector __pyx_t_9; intptr_t __pyx_t_10; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_0get_nested_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":86 * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL # <<<<<<<<<<<<<< * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 */ __pyx_v_nested_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":87 * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL # <<<<<<<<<<<<<< * cdef size_t i = 0, length = 0 * cdef intptr_t addr */ __pyx_v_nested_res_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":88 * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 # <<<<<<<<<<<<<< * cdef intptr_t addr * */ __pyx_v_i = 0; __pyx_v_length = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ __pyx_t_1 = __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(__pyx_v_obj); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 91, __pyx_L1_error) if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":92 * * if is_nested_sequence(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 92, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":93 * if is_nested_sequence(obj): * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * # set the ownership immediately to avoid leaking memory in case of */ try { __pyx_t_3 = new std::vector > (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 93, __pyx_L1_error) } __pyx_v_nested_res_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":94 * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 94, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":97 * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): */ __pyx_v_nested_res_ptr.reset(__pyx_v_nested_res_vec, 1); /* "cuda/bindings/_internal/utils.pyx":98 * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, obj_i in enumerate(obj): * if ResT is char: */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 99, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 99, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 99, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 99, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __Pyx_XDECREF_SET(__pyx_v_obj_i, __pyx_t_8); __pyx_t_8 = 0; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":108 * deref(nested_res_vec)[i].assign(obj_i_ptr, obj_i_ptr + str_len) * else: * deref(nested_res_vec)[i] = obj_i # <<<<<<<<<<<<<< * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): */ __pyx_t_9 = __pyx_convert_vector_from_py_int(__pyx_v_obj_i); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 108, __pyx_L1_error) ((*__pyx_v_nested_res_vec)[__pyx_v_i]) = __pyx_t_9; /* "cuda/bindings/_internal/utils.pyx":109 * else: * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) # <<<<<<<<<<<<<< * elif cpython.PySequence_Check(obj): * length = len(obj) */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = ((intptr_t)((*__pyx_v_nested_res_vec)[__pyx_v_i]).data()); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":111 * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 111, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":112 * elif cpython.PySequence_Check(obj): * length = len(obj) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 112, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":113 * length = len(obj) * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 114, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 114, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 114, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __pyx_t_10 = PyLong_AsSsize_t(__pyx_t_8); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_v_addr = __pyx_t_10; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":115 * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr # <<<<<<<<<<<<<< * nested_res_ptr.reset(NULL, False) * else: */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = __pyx_v_addr; /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":116 * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * else: * # obj is an int (ResT**) */ __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":119 * else: * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * nested_ptr.reset(obj, False) * */ /*else*/ { __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":120 * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) * nested_ptr.reset(obj, False) # <<<<<<<<<<<<<< * * in_out_ptr.ptrs = move(nested_ptr) */ __pyx_t_10 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 120, __pyx_L1_error) __pyx_v_nested_ptr.reset(((std::vector *)((intptr_t)__pyx_t_10)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":122 * nested_ptr.reset(obj, False) * * in_out_ptr.ptrs = move(nested_ptr) # <<<<<<<<<<<<<< * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 */ __pyx_v_in_out_ptr.ptrs = cython_std::move > >(__pyx_v_nested_ptr); /* "cuda/bindings/_internal/utils.pyx":123 * * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) # <<<<<<<<<<<<<< * return 0 * */ __pyx_v_in_out_ptr.nested_resource_ptr = cython_std::move > > >(__pyx_v_nested_res_ptr); /* "cuda/bindings/_internal/utils.pyx":124 * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":83 * * * cdef int get_nested_resource_ptr(nested_resource[ResT] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_nested_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_obj_i); __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_1__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED int32_t *__pyx_v___unused) { nullable_unique_ptr > __pyx_v_nested_ptr; nullable_unique_ptr > > __pyx_v_nested_res_ptr; std::vector *__pyx_v_nested_vec; std::vector > *__pyx_v_nested_res_vec; size_t __pyx_v_i; size_t __pyx_v_length; intptr_t __pyx_v_addr; PyObject *__pyx_v_obj_i = NULL; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector > *__pyx_t_3; std::vector *__pyx_t_4; size_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *(*__pyx_t_7)(PyObject *); PyObject *__pyx_t_8 = NULL; std::vector __pyx_t_9; intptr_t __pyx_t_10; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_1get_nested_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":86 * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL # <<<<<<<<<<<<<< * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 */ __pyx_v_nested_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":87 * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL # <<<<<<<<<<<<<< * cdef size_t i = 0, length = 0 * cdef intptr_t addr */ __pyx_v_nested_res_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":88 * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 # <<<<<<<<<<<<<< * cdef intptr_t addr * */ __pyx_v_i = 0; __pyx_v_length = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ __pyx_t_1 = __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(__pyx_v_obj); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 91, __pyx_L1_error) if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":92 * * if is_nested_sequence(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 92, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":93 * if is_nested_sequence(obj): * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * # set the ownership immediately to avoid leaking memory in case of */ try { __pyx_t_3 = new std::vector > (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 93, __pyx_L1_error) } __pyx_v_nested_res_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":94 * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 94, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":97 * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): */ __pyx_v_nested_res_ptr.reset(__pyx_v_nested_res_vec, 1); /* "cuda/bindings/_internal/utils.pyx":98 * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, obj_i in enumerate(obj): * if ResT is char: */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 99, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 99, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 99, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 99, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __Pyx_XDECREF_SET(__pyx_v_obj_i, __pyx_t_8); __pyx_t_8 = 0; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":108 * deref(nested_res_vec)[i].assign(obj_i_ptr, obj_i_ptr + str_len) * else: * deref(nested_res_vec)[i] = obj_i # <<<<<<<<<<<<<< * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): */ __pyx_t_9 = __pyx_convert_vector_from_py_int32_t(__pyx_v_obj_i); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 108, __pyx_L1_error) ((*__pyx_v_nested_res_vec)[__pyx_v_i]) = __pyx_t_9; /* "cuda/bindings/_internal/utils.pyx":109 * else: * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) # <<<<<<<<<<<<<< * elif cpython.PySequence_Check(obj): * length = len(obj) */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = ((intptr_t)((*__pyx_v_nested_res_vec)[__pyx_v_i]).data()); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":111 * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 111, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":112 * elif cpython.PySequence_Check(obj): * length = len(obj) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 112, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":113 * length = len(obj) * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 114, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 114, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 114, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __pyx_t_10 = PyLong_AsSsize_t(__pyx_t_8); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_v_addr = __pyx_t_10; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":115 * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr # <<<<<<<<<<<<<< * nested_res_ptr.reset(NULL, False) * else: */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = __pyx_v_addr; /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":116 * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * else: * # obj is an int (ResT**) */ __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":119 * else: * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * nested_ptr.reset(obj, False) * */ /*else*/ { __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":120 * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) * nested_ptr.reset(obj, False) # <<<<<<<<<<<<<< * * in_out_ptr.ptrs = move(nested_ptr) */ __pyx_t_10 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 120, __pyx_L1_error) __pyx_v_nested_ptr.reset(((std::vector *)((intptr_t)__pyx_t_10)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":122 * nested_ptr.reset(obj, False) * * in_out_ptr.ptrs = move(nested_ptr) # <<<<<<<<<<<<<< * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 */ __pyx_v_in_out_ptr.ptrs = cython_std::move > >(__pyx_v_nested_ptr); /* "cuda/bindings/_internal/utils.pyx":123 * * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) # <<<<<<<<<<<<<< * return 0 * */ __pyx_v_in_out_ptr.nested_resource_ptr = ((nullable_unique_ptr > > )cython_std::move > > >(__pyx_v_nested_res_ptr)); /* "cuda/bindings/_internal/utils.pyx":124 * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":83 * * * cdef int get_nested_resource_ptr(nested_resource[ResT] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_nested_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_obj_i); __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_2__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED int64_t *__pyx_v___unused) { nullable_unique_ptr > __pyx_v_nested_ptr; nullable_unique_ptr > > __pyx_v_nested_res_ptr; std::vector *__pyx_v_nested_vec; std::vector > *__pyx_v_nested_res_vec; size_t __pyx_v_i; size_t __pyx_v_length; intptr_t __pyx_v_addr; PyObject *__pyx_v_obj_i = NULL; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector > *__pyx_t_3; std::vector *__pyx_t_4; size_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *(*__pyx_t_7)(PyObject *); PyObject *__pyx_t_8 = NULL; std::vector __pyx_t_9; intptr_t __pyx_t_10; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_2get_nested_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":86 * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL # <<<<<<<<<<<<<< * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 */ __pyx_v_nested_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":87 * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL # <<<<<<<<<<<<<< * cdef size_t i = 0, length = 0 * cdef intptr_t addr */ __pyx_v_nested_res_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":88 * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 # <<<<<<<<<<<<<< * cdef intptr_t addr * */ __pyx_v_i = 0; __pyx_v_length = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ __pyx_t_1 = __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(__pyx_v_obj); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 91, __pyx_L1_error) if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":92 * * if is_nested_sequence(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 92, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":93 * if is_nested_sequence(obj): * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * # set the ownership immediately to avoid leaking memory in case of */ try { __pyx_t_3 = new std::vector > (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 93, __pyx_L1_error) } __pyx_v_nested_res_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":94 * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 94, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":97 * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): */ __pyx_v_nested_res_ptr.reset(__pyx_v_nested_res_vec, 1); /* "cuda/bindings/_internal/utils.pyx":98 * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, obj_i in enumerate(obj): * if ResT is char: */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 99, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 99, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 99, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 99, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __Pyx_XDECREF_SET(__pyx_v_obj_i, __pyx_t_8); __pyx_t_8 = 0; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":108 * deref(nested_res_vec)[i].assign(obj_i_ptr, obj_i_ptr + str_len) * else: * deref(nested_res_vec)[i] = obj_i # <<<<<<<<<<<<<< * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): */ __pyx_t_9 = __pyx_convert_vector_from_py_int64_t(__pyx_v_obj_i); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 108, __pyx_L1_error) ((*__pyx_v_nested_res_vec)[__pyx_v_i]) = __pyx_t_9; /* "cuda/bindings/_internal/utils.pyx":109 * else: * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) # <<<<<<<<<<<<<< * elif cpython.PySequence_Check(obj): * length = len(obj) */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = ((intptr_t)((*__pyx_v_nested_res_vec)[__pyx_v_i]).data()); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":111 * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 111, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":112 * elif cpython.PySequence_Check(obj): * length = len(obj) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 112, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":113 * length = len(obj) * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 114, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 114, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 114, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __pyx_t_10 = PyLong_AsSsize_t(__pyx_t_8); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_v_addr = __pyx_t_10; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":115 * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr # <<<<<<<<<<<<<< * nested_res_ptr.reset(NULL, False) * else: */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = __pyx_v_addr; /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":116 * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * else: * # obj is an int (ResT**) */ __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":119 * else: * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * nested_ptr.reset(obj, False) * */ /*else*/ { __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":120 * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) * nested_ptr.reset(obj, False) # <<<<<<<<<<<<<< * * in_out_ptr.ptrs = move(nested_ptr) */ __pyx_t_10 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 120, __pyx_L1_error) __pyx_v_nested_ptr.reset(((std::vector *)((intptr_t)__pyx_t_10)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":122 * nested_ptr.reset(obj, False) * * in_out_ptr.ptrs = move(nested_ptr) # <<<<<<<<<<<<<< * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 */ __pyx_v_in_out_ptr.ptrs = cython_std::move > >(__pyx_v_nested_ptr); /* "cuda/bindings/_internal/utils.pyx":123 * * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) # <<<<<<<<<<<<<< * return 0 * */ __pyx_v_in_out_ptr.nested_resource_ptr = ((nullable_unique_ptr > > )cython_std::move > > >(__pyx_v_nested_res_ptr)); /* "cuda/bindings/_internal/utils.pyx":124 * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":83 * * * cdef int get_nested_resource_ptr(nested_resource[ResT] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_nested_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_obj_i); __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_3__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED char *__pyx_v___unused) { nullable_unique_ptr > __pyx_v_nested_ptr; nullable_unique_ptr > > __pyx_v_nested_res_ptr; std::vector *__pyx_v_nested_vec; std::vector > *__pyx_v_nested_res_vec; size_t __pyx_v_i; size_t __pyx_v_length; intptr_t __pyx_v_addr; PyObject *__pyx_v_obj_i = NULL; PyObject *__pyx_v_obj_i_bytes = NULL; size_t __pyx_v_str_len; char *__pyx_v_obj_i_ptr; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector > *__pyx_t_3; std::vector *__pyx_t_4; size_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *(*__pyx_t_7)(PyObject *); PyObject *__pyx_t_8 = NULL; Py_ssize_t __pyx_t_9; char *__pyx_t_10; intptr_t __pyx_t_11; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_3get_nested_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":86 * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL # <<<<<<<<<<<<<< * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 */ __pyx_v_nested_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":87 * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL # <<<<<<<<<<<<<< * cdef size_t i = 0, length = 0 * cdef intptr_t addr */ __pyx_v_nested_res_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":88 * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 # <<<<<<<<<<<<<< * cdef intptr_t addr * */ __pyx_v_i = 0; __pyx_v_length = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ __pyx_t_1 = __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(__pyx_v_obj); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 91, __pyx_L1_error) if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":92 * * if is_nested_sequence(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 92, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":93 * if is_nested_sequence(obj): * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * # set the ownership immediately to avoid leaking memory in case of */ try { __pyx_t_3 = new std::vector > (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 93, __pyx_L1_error) } __pyx_v_nested_res_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":94 * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 94, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":97 * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): */ __pyx_v_nested_res_ptr.reset(__pyx_v_nested_res_vec, 1); /* "cuda/bindings/_internal/utils.pyx":98 * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, obj_i in enumerate(obj): * if ResT is char: */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 99, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 99, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 99, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 99, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __Pyx_XDECREF_SET(__pyx_v_obj_i, __pyx_t_8); __pyx_t_8 = 0; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":101 * for i, obj_i in enumerate(obj): * if ResT is char: * obj_i_bytes = ((obj_i)).encode() # <<<<<<<<<<<<<< * str_len = (len(obj_i_bytes)) + 1 # including null termination * deref(nested_res_vec)[i].resize(str_len) */ if (!(likely(PyUnicode_CheckExact(__pyx_v_obj_i)) || __Pyx_RaiseUnexpectedTypeError("str", __pyx_v_obj_i))) __PYX_ERR(0, 101, __pyx_L1_error) if (unlikely(__pyx_v_obj_i == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "encode"); __PYX_ERR(0, 101, __pyx_L1_error) } __pyx_t_8 = PyUnicode_AsEncodedString(((PyObject*)__pyx_v_obj_i), NULL, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 101, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_XDECREF_SET(__pyx_v_obj_i_bytes, ((PyObject*)__pyx_t_8)); __pyx_t_8 = 0; /* "cuda/bindings/_internal/utils.pyx":102 * if ResT is char: * obj_i_bytes = ((obj_i)).encode() * str_len = (len(obj_i_bytes)) + 1 # including null termination # <<<<<<<<<<<<<< * deref(nested_res_vec)[i].resize(str_len) * obj_i_ptr = (obj_i_bytes) */ __pyx_t_9 = __Pyx_PyBytes_GET_SIZE(__pyx_v_obj_i_bytes); if (unlikely(__pyx_t_9 == ((Py_ssize_t)-1))) __PYX_ERR(0, 102, __pyx_L1_error) __pyx_v_str_len = (((size_t)__pyx_t_9) + 1); /* "cuda/bindings/_internal/utils.pyx":103 * obj_i_bytes = ((obj_i)).encode() * str_len = (len(obj_i_bytes)) + 1 # including null termination * deref(nested_res_vec)[i].resize(str_len) # <<<<<<<<<<<<<< * obj_i_ptr = (obj_i_bytes) * # cast to size_t explicitly to work around a potentially Cython bug */ try { ((*__pyx_v_nested_res_vec)[__pyx_v_i]).resize(__pyx_v_str_len); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 103, __pyx_L1_error) } /* "cuda/bindings/_internal/utils.pyx":104 * str_len = (len(obj_i_bytes)) + 1 # including null termination * deref(nested_res_vec)[i].resize(str_len) * obj_i_ptr = (obj_i_bytes) # <<<<<<<<<<<<<< * # cast to size_t explicitly to work around a potentially Cython bug * deref(nested_res_vec)[i].assign(obj_i_ptr, obj_i_ptr + str_len) */ __pyx_t_10 = __Pyx_PyBytes_AsWritableString(__pyx_v_obj_i_bytes); if (unlikely((!__pyx_t_10) && PyErr_Occurred())) __PYX_ERR(0, 104, __pyx_L1_error) __pyx_v_obj_i_ptr = ((char *)__pyx_t_10); /* "cuda/bindings/_internal/utils.pyx":106 * obj_i_ptr = (obj_i_bytes) * # cast to size_t explicitly to work around a potentially Cython bug * deref(nested_res_vec)[i].assign(obj_i_ptr, obj_i_ptr + str_len) # <<<<<<<<<<<<<< * else: * deref(nested_res_vec)[i] = obj_i */ try { ((*__pyx_v_nested_res_vec)[__pyx_v_i]).assign(__pyx_v_obj_i_ptr, (__pyx_v_obj_i_ptr + ((size_t)__pyx_v_str_len))); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 106, __pyx_L1_error) } /* "cuda/bindings/_internal/utils.pyx":109 * else: * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) # <<<<<<<<<<<<<< * elif cpython.PySequence_Check(obj): * length = len(obj) */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = ((intptr_t)((*__pyx_v_nested_res_vec)[__pyx_v_i]).data()); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":111 * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 111, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":112 * elif cpython.PySequence_Check(obj): * length = len(obj) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 112, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":113 * length = len(obj) * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 114, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 114, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 114, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __pyx_t_11 = PyLong_AsSsize_t(__pyx_t_8); if (unlikely((__pyx_t_11 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_v_addr = __pyx_t_11; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":115 * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr # <<<<<<<<<<<<<< * nested_res_ptr.reset(NULL, False) * else: */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = __pyx_v_addr; /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":116 * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * else: * # obj is an int (ResT**) */ __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":119 * else: * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * nested_ptr.reset(obj, False) * */ /*else*/ { __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":120 * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) * nested_ptr.reset(obj, False) # <<<<<<<<<<<<<< * * in_out_ptr.ptrs = move(nested_ptr) */ __pyx_t_11 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_11 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 120, __pyx_L1_error) __pyx_v_nested_ptr.reset(((std::vector *)((intptr_t)__pyx_t_11)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":122 * nested_ptr.reset(obj, False) * * in_out_ptr.ptrs = move(nested_ptr) # <<<<<<<<<<<<<< * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 */ __pyx_v_in_out_ptr.ptrs = cython_std::move > >(__pyx_v_nested_ptr); /* "cuda/bindings/_internal/utils.pyx":123 * * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) # <<<<<<<<<<<<<< * return 0 * */ __pyx_v_in_out_ptr.nested_resource_ptr = cython_std::move > > >(__pyx_v_nested_res_ptr); /* "cuda/bindings/_internal/utils.pyx":124 * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":83 * * * cdef int get_nested_resource_ptr(nested_resource[ResT] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_nested_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_obj_i); __Pyx_XDECREF(__pyx_v_obj_i_bytes); __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_4__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED float *__pyx_v___unused) { nullable_unique_ptr > __pyx_v_nested_ptr; nullable_unique_ptr > > __pyx_v_nested_res_ptr; std::vector *__pyx_v_nested_vec; std::vector > *__pyx_v_nested_res_vec; size_t __pyx_v_i; size_t __pyx_v_length; intptr_t __pyx_v_addr; PyObject *__pyx_v_obj_i = NULL; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector > *__pyx_t_3; std::vector *__pyx_t_4; size_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *(*__pyx_t_7)(PyObject *); PyObject *__pyx_t_8 = NULL; std::vector __pyx_t_9; intptr_t __pyx_t_10; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_4get_nested_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":86 * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL # <<<<<<<<<<<<<< * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 */ __pyx_v_nested_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":87 * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL # <<<<<<<<<<<<<< * cdef size_t i = 0, length = 0 * cdef intptr_t addr */ __pyx_v_nested_res_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":88 * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 # <<<<<<<<<<<<<< * cdef intptr_t addr * */ __pyx_v_i = 0; __pyx_v_length = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ __pyx_t_1 = __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(__pyx_v_obj); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 91, __pyx_L1_error) if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":92 * * if is_nested_sequence(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 92, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":93 * if is_nested_sequence(obj): * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * # set the ownership immediately to avoid leaking memory in case of */ try { __pyx_t_3 = new std::vector > (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 93, __pyx_L1_error) } __pyx_v_nested_res_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":94 * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 94, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":97 * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): */ __pyx_v_nested_res_ptr.reset(__pyx_v_nested_res_vec, 1); /* "cuda/bindings/_internal/utils.pyx":98 * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, obj_i in enumerate(obj): * if ResT is char: */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 99, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 99, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 99, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 99, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __Pyx_XDECREF_SET(__pyx_v_obj_i, __pyx_t_8); __pyx_t_8 = 0; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":108 * deref(nested_res_vec)[i].assign(obj_i_ptr, obj_i_ptr + str_len) * else: * deref(nested_res_vec)[i] = obj_i # <<<<<<<<<<<<<< * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): */ __pyx_t_9 = __pyx_convert_vector_from_py_float(__pyx_v_obj_i); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 108, __pyx_L1_error) ((*__pyx_v_nested_res_vec)[__pyx_v_i]) = __pyx_t_9; /* "cuda/bindings/_internal/utils.pyx":109 * else: * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) # <<<<<<<<<<<<<< * elif cpython.PySequence_Check(obj): * length = len(obj) */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = ((intptr_t)((*__pyx_v_nested_res_vec)[__pyx_v_i]).data()); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":111 * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 111, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":112 * elif cpython.PySequence_Check(obj): * length = len(obj) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 112, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":113 * length = len(obj) * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 114, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 114, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 114, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __pyx_t_10 = PyLong_AsSsize_t(__pyx_t_8); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_v_addr = __pyx_t_10; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":115 * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr # <<<<<<<<<<<<<< * nested_res_ptr.reset(NULL, False) * else: */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = __pyx_v_addr; /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":116 * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * else: * # obj is an int (ResT**) */ __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":119 * else: * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * nested_ptr.reset(obj, False) * */ /*else*/ { __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":120 * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) * nested_ptr.reset(obj, False) # <<<<<<<<<<<<<< * * in_out_ptr.ptrs = move(nested_ptr) */ __pyx_t_10 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 120, __pyx_L1_error) __pyx_v_nested_ptr.reset(((std::vector *)((intptr_t)__pyx_t_10)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":122 * nested_ptr.reset(obj, False) * * in_out_ptr.ptrs = move(nested_ptr) # <<<<<<<<<<<<<< * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 */ __pyx_v_in_out_ptr.ptrs = cython_std::move > >(__pyx_v_nested_ptr); /* "cuda/bindings/_internal/utils.pyx":123 * * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) # <<<<<<<<<<<<<< * return 0 * */ __pyx_v_in_out_ptr.nested_resource_ptr = cython_std::move > > >(__pyx_v_nested_res_ptr); /* "cuda/bindings/_internal/utils.pyx":124 * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":83 * * * cdef int get_nested_resource_ptr(nested_resource[ResT] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_nested_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_obj_i); __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_fuse_5__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr(__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &__pyx_v_in_out_ptr, PyObject *__pyx_v_obj, CYTHON_UNUSED double *__pyx_v___unused) { nullable_unique_ptr > __pyx_v_nested_ptr; nullable_unique_ptr > > __pyx_v_nested_res_ptr; std::vector *__pyx_v_nested_vec; std::vector > *__pyx_v_nested_res_vec; size_t __pyx_v_i; size_t __pyx_v_length; intptr_t __pyx_v_addr; PyObject *__pyx_v_obj_i = NULL; int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; Py_ssize_t __pyx_t_2; std::vector > *__pyx_t_3; std::vector *__pyx_t_4; size_t __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *(*__pyx_t_7)(PyObject *); PyObject *__pyx_t_8 = NULL; std::vector __pyx_t_9; intptr_t __pyx_t_10; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__pyx_fuse_5get_nested_resource_ptr", 0); /* "cuda/bindings/_internal/utils.pyx":86 * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL # <<<<<<<<<<<<<< * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 */ __pyx_v_nested_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":87 * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL # <<<<<<<<<<<<<< * cdef size_t i = 0, length = 0 * cdef intptr_t addr */ __pyx_v_nested_res_vec = NULL; /* "cuda/bindings/_internal/utils.pyx":88 * cdef vector[intptr_t]* nested_vec = NULL * cdef vector[vector[ResT]]* nested_res_vec = NULL * cdef size_t i = 0, length = 0 # <<<<<<<<<<<<<< * cdef intptr_t addr * */ __pyx_v_i = 0; __pyx_v_length = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ __pyx_t_1 = __pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence(__pyx_v_obj); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 91, __pyx_L1_error) if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":92 * * if is_nested_sequence(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 92, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":93 * if is_nested_sequence(obj): * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * # set the ownership immediately to avoid leaking memory in case of */ try { __pyx_t_3 = new std::vector > (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 93, __pyx_L1_error) } __pyx_v_nested_res_vec = __pyx_t_3; /* "cuda/bindings/_internal/utils.pyx":94 * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 94, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":97 * # set the ownership immediately to avoid leaking memory in case of * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): */ __pyx_v_nested_res_ptr.reset(__pyx_v_nested_res_vec, 1); /* "cuda/bindings/_internal/utils.pyx":98 * # exception in the following loop * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, obj_i in enumerate(obj): * if ResT is char: */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 99, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 99, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 99, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 99, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 99, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __Pyx_XDECREF_SET(__pyx_v_obj_i, __pyx_t_8); __pyx_t_8 = 0; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":108 * deref(nested_res_vec)[i].assign(obj_i_ptr, obj_i_ptr + str_len) * else: * deref(nested_res_vec)[i] = obj_i # <<<<<<<<<<<<<< * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): */ __pyx_t_9 = __pyx_convert_vector_from_py_double(__pyx_v_obj_i); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 108, __pyx_L1_error) ((*__pyx_v_nested_res_vec)[__pyx_v_i]) = __pyx_t_9; /* "cuda/bindings/_internal/utils.pyx":109 * else: * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) # <<<<<<<<<<<<<< * elif cpython.PySequence_Check(obj): * length = len(obj) */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = ((intptr_t)((*__pyx_v_nested_res_vec)[__pyx_v_i]).data()); /* "cuda/bindings/_internal/utils.pyx":99 * nested_res_ptr.reset(nested_res_vec, True) * nested_ptr.reset(nested_vec, True) * for i, obj_i in enumerate(obj): # <<<<<<<<<<<<<< * if ResT is char: * obj_i_bytes = ((obj_i)).encode() */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":91 * cdef intptr_t addr * * if is_nested_sequence(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_res_vec = new vector[vector[ResT]](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ __pyx_t_1 = PySequence_Check(__pyx_v_obj); if (__pyx_t_1) { /* "cuda/bindings/_internal/utils.pyx":111 * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): * length = len(obj) # <<<<<<<<<<<<<< * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) */ __pyx_t_2 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 111, __pyx_L1_error) __pyx_v_length = __pyx_t_2; /* "cuda/bindings/_internal/utils.pyx":112 * elif cpython.PySequence_Check(obj): * length = len(obj) * nested_vec = new vector[intptr_t](length) # <<<<<<<<<<<<<< * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): */ try { __pyx_t_4 = new std::vector (__pyx_v_length); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 112, __pyx_L1_error) } __pyx_v_nested_vec = __pyx_t_4; /* "cuda/bindings/_internal/utils.pyx":113 * length = len(obj) * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) # <<<<<<<<<<<<<< * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr */ __pyx_v_nested_ptr.reset(__pyx_v_nested_vec, 1); /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ __pyx_t_5 = 0; if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_6 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; __pyx_t_7 = NULL; } else { __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = (CYTHON_COMPILING_IN_LIMITED_API) ? PyIter_Next : __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 114, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_7)) { if (likely(PyList_CheckExact(__pyx_t_6))) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } __pyx_t_8 = __Pyx_PyList_GetItemRef(__pyx_t_6, __pyx_t_2); ++__pyx_t_2; } else { { Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_6); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 114, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_8 = __Pyx_NewRef(PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2)); #else __pyx_t_8 = __Pyx_PySequence_ITEM(__pyx_t_6, __pyx_t_2); #endif ++__pyx_t_2; } if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 114, __pyx_L1_error) } else { __pyx_t_8 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_8)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) __PYX_ERR(0, 114, __pyx_L1_error) PyErr_Clear(); } break; } } __Pyx_GOTREF(__pyx_t_8); __pyx_t_10 = PyLong_AsSsize_t(__pyx_t_8); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_v_addr = __pyx_t_10; __pyx_v_i = __pyx_t_5; __pyx_t_5 = (__pyx_t_5 + 1); /* "cuda/bindings/_internal/utils.pyx":115 * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr # <<<<<<<<<<<<<< * nested_res_ptr.reset(NULL, False) * else: */ ((*__pyx_v_nested_vec)[__pyx_v_i]) = __pyx_v_addr; /* "cuda/bindings/_internal/utils.pyx":114 * nested_vec = new vector[intptr_t](length) * nested_ptr.reset(nested_vec, True) * for i, addr in enumerate(obj): # <<<<<<<<<<<<<< * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) */ } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "cuda/bindings/_internal/utils.pyx":116 * for i, addr in enumerate(obj): * deref(nested_vec)[i] = addr * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * else: * # obj is an int (ResT**) */ __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":110 * deref(nested_res_vec)[i] = obj_i * deref(nested_vec)[i] = (deref(nested_res_vec)[i].data()) * elif cpython.PySequence_Check(obj): # <<<<<<<<<<<<<< * length = len(obj) * nested_vec = new vector[intptr_t](length) */ goto __pyx_L3; } /* "cuda/bindings/_internal/utils.pyx":119 * else: * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) # <<<<<<<<<<<<<< * nested_ptr.reset(obj, False) * */ /*else*/ { __pyx_v_nested_res_ptr.reset(NULL, 0); /* "cuda/bindings/_internal/utils.pyx":120 * # obj is an int (ResT**) * nested_res_ptr.reset(NULL, False) * nested_ptr.reset(obj, False) # <<<<<<<<<<<<<< * * in_out_ptr.ptrs = move(nested_ptr) */ __pyx_t_10 = PyLong_AsSsize_t(__pyx_v_obj); if (unlikely((__pyx_t_10 == ((intptr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 120, __pyx_L1_error) __pyx_v_nested_ptr.reset(((std::vector *)((intptr_t)__pyx_t_10)), 0); } __pyx_L3:; /* "cuda/bindings/_internal/utils.pyx":122 * nested_ptr.reset(obj, False) * * in_out_ptr.ptrs = move(nested_ptr) # <<<<<<<<<<<<<< * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 */ __pyx_v_in_out_ptr.ptrs = cython_std::move > >(__pyx_v_nested_ptr); /* "cuda/bindings/_internal/utils.pyx":123 * * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) # <<<<<<<<<<<<<< * return 0 * */ __pyx_v_in_out_ptr.nested_resource_ptr = cython_std::move > > >(__pyx_v_nested_res_ptr); /* "cuda/bindings/_internal/utils.pyx":124 * in_out_ptr.ptrs = move(nested_ptr) * in_out_ptr.nested_resource_ptr = move(nested_res_ptr) * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "cuda/bindings/_internal/utils.pyx":83 * * * cdef int get_nested_resource_ptr(nested_resource[ResT] &in_out_ptr, object obj, ResT* __unused) except 1: # <<<<<<<<<<<<<< * cdef nullable_unique_ptr[ vector[intptr_t] ] nested_ptr * cdef nullable_unique_ptr[ vector[vector[ResT]] ] nested_res_ptr */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("cuda.bindings._internal.utils.get_nested_resource_ptr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_obj_i); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* #### Code section: module_exttypes ### */ static PyMethodDef __pyx_methods[] = { {0, 0, 0, 0} }; /* #### Code section: initfunc_declarations ### */ static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_InitConstants(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(__pyx_mstatetype *__pyx_mstate); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_CreateCodeObjects(__pyx_mstatetype *__pyx_mstate); /*proto*/ /* #### Code section: init_module ### */ static int __Pyx_modinit_global_init_code(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); /*--- Global init code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_variable_export_code(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); /*--- Variable export code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_function_export_code(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); /*--- Function export code ---*/ if (__Pyx_ExportFunction("is_nested_sequence", (void (*)(void))__pyx_f_4cuda_8bindings_9_internal_5utils_is_nested_sequence, "int (PyObject *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("get_buffer_pointer", (void (*)(void))__pyx_f_4cuda_8bindings_9_internal_5utils_get_buffer_pointer, "void *(PyObject *, Py_ssize_t, struct __pyx_opt_args_4cuda_8bindings_9_internal_5utils_get_buffer_pointer *__pyx_optional_args)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_0get_resource_ptr", (void (*)(void))__pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr, "int (nullable_unique_ptr > &, PyObject *, int *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_1get_resource_ptr", (void (*)(void))__pyx_fuse_1__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr, "int (nullable_unique_ptr > &, PyObject *, int32_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_2get_resource_ptr", (void (*)(void))__pyx_fuse_2__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr, "int (nullable_unique_ptr > &, PyObject *, int64_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_3get_resource_ptr", (void (*)(void))__pyx_fuse_3__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr, "int (nullable_unique_ptr > &, PyObject *, char *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_4get_resource_ptr", (void (*)(void))__pyx_fuse_4__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr, "int (nullable_unique_ptr > &, PyObject *, float *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_5get_resource_ptr", (void (*)(void))__pyx_fuse_5__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptr, "int (nullable_unique_ptr > &, PyObject *, double *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_0get_resource_ptrs", (void (*)(void))__pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_resource_ptrs, "int (nullable_unique_ptr > &, PyObject *, void *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_0get_nested_resource_ptr", (void (*)(void))__pyx_fuse_0__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr, "int (__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, int *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_1get_nested_resource_ptr", (void (*)(void))__pyx_fuse_1__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr, "int (__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, int32_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_2get_nested_resource_ptr", (void (*)(void))__pyx_fuse_2__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr, "int (__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, int64_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_3get_nested_resource_ptr", (void (*)(void))__pyx_fuse_3__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr, "int (__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, char *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_4get_nested_resource_ptr", (void (*)(void))__pyx_fuse_4__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr, "int (__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, float *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("__pyx_fuse_5get_nested_resource_ptr", (void (*)(void))__pyx_fuse_5__pyx_f_4cuda_8bindings_9_internal_5utils_get_nested_resource_ptr, "int (__pyx_t_4cuda_8bindings_9_internal_5utils_nested_resource &, PyObject *, double *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_RefNannyFinishContext(); return -1; } static int __Pyx_modinit_type_init_code(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); /*--- Type init code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_type_import_code(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); PyObject *__pyx_t_1 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); /*--- Type import code ---*/ __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_mstate->__pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_1_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_1_5(PyTypeObject), #elif CYTHON_COMPILING_IN_LIMITED_API 0, 0, #else sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_1_5(PyHeapTypeObject), #endif __Pyx_ImportType_CheckSize_Warn_3_1_5); if (!__pyx_mstate->__pyx_ptype_7cpython_4type_type) __PYX_ERR(3, 9, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 8, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_mstate->__pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType_3_1_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 sizeof(PyLongObject), __PYX_GET_STRUCT_ALIGNMENT_3_1_5(PyLongObject), #elif CYTHON_COMPILING_IN_LIMITED_API 0, 0, #else sizeof(PyLongObject), __PYX_GET_STRUCT_ALIGNMENT_3_1_5(PyLongObject), #endif __Pyx_ImportType_CheckSize_Warn_3_1_5); if (!__pyx_mstate->__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(4, 8, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 16, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_mstate->__pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType_3_1_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 sizeof(PyComplexObject), __PYX_GET_STRUCT_ALIGNMENT_3_1_5(PyComplexObject), #elif CYTHON_COMPILING_IN_LIMITED_API 0, 0, #else sizeof(PyComplexObject), __PYX_GET_STRUCT_ALIGNMENT_3_1_5(PyComplexObject), #endif __Pyx_ImportType_CheckSize_Warn_3_1_5); if (!__pyx_mstate->__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(5, 16, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_RefNannyFinishContext(); return -1; } static int __Pyx_modinit_variable_import_code(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); /*--- Variable import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_function_import_code(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); /*--- Function import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } #if CYTHON_PEP489_MULTI_PHASE_INIT static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ static int __pyx_pymod_exec_utils(PyObject* module); /*proto*/ static PyModuleDef_Slot __pyx_moduledef_slots[] = { {Py_mod_create, (void*)__pyx_pymod_create}, {Py_mod_exec, (void*)__pyx_pymod_exec_utils}, #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING {Py_mod_gil, Py_MOD_GIL_NOT_USED}, #endif #if PY_VERSION_HEX >= 0x030C0000 && CYTHON_USE_MODULE_STATE {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, #endif {0, NULL} }; #endif #ifdef __cplusplus namespace { struct PyModuleDef __pyx_moduledef = #else static struct PyModuleDef __pyx_moduledef = #endif { PyModuleDef_HEAD_INIT, "utils", 0, /* m_doc */ #if CYTHON_USE_MODULE_STATE sizeof(__pyx_mstatetype), /* m_size */ #else (CYTHON_PEP489_MULTI_PHASE_INIT) ? 0 : -1, /* m_size */ #endif __pyx_methods /* m_methods */, #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_moduledef_slots, /* m_slots */ #else NULL, /* m_reload */ #endif #if CYTHON_USE_MODULE_STATE __pyx_m_traverse, /* m_traverse */ __pyx_m_clear, /* m_clear */ NULL /* m_free */ #else NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ #endif }; #ifdef __cplusplus } /* anonymous namespace */ #endif /* PyModInitFuncType */ #ifndef CYTHON_NO_PYINIT_EXPORT #define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC #else #ifdef __cplusplus #define __Pyx_PyMODINIT_FUNC extern "C" PyObject * #else #define __Pyx_PyMODINIT_FUNC PyObject * #endif #endif __Pyx_PyMODINIT_FUNC PyInit_utils(void) CYTHON_SMALL_CODE; /*proto*/ __Pyx_PyMODINIT_FUNC PyInit_utils(void) #if CYTHON_PEP489_MULTI_PHASE_INIT { return PyModuleDef_Init(&__pyx_moduledef); } /* ModuleCreationPEP489 */ #if CYTHON_COMPILING_IN_LIMITED_API && __PYX_LIMITED_VERSION_HEX < 0x03090000 static PY_INT64_T __Pyx_GetCurrentInterpreterId(void) { { PyObject *module = PyImport_ImportModule("_interpreters"); // 3.13+ I think if (!module) { PyErr_Clear(); // just try the 3.8-3.12 version module = PyImport_ImportModule("_xxsubinterpreters"); if (!module) goto bad; } PyObject *current = PyObject_CallMethod(module, "get_current", NULL); Py_DECREF(module); if (!current) goto bad; if (PyTuple_Check(current)) { PyObject *new_current = PySequence_GetItem(current, 0); Py_DECREF(current); current = new_current; if (!new_current) goto bad; } long long as_c_int = PyLong_AsLongLong(current); Py_DECREF(current); return as_c_int; } bad: PySys_WriteStderr("__Pyx_GetCurrentInterpreterId failed. Try setting the C define CYTHON_PEP489_MULTI_PHASE_INIT=0\n"); return -1; } #endif #if !CYTHON_USE_MODULE_STATE static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { static PY_INT64_T main_interpreter_id = -1; #if CYTHON_COMPILING_IN_GRAAL PY_INT64_T current_id = PyInterpreterState_GetIDFromThreadState(PyThreadState_Get()); #elif CYTHON_COMPILING_IN_LIMITED_API && __PYX_LIMITED_VERSION_HEX >= 0x03090000 PY_INT64_T current_id = PyInterpreterState_GetID(PyInterpreterState_Get()); #elif CYTHON_COMPILING_IN_LIMITED_API PY_INT64_T current_id = __Pyx_GetCurrentInterpreterId(); #else PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); #endif if (unlikely(current_id == -1)) { return -1; } if (main_interpreter_id == -1) { main_interpreter_id = current_id; return 0; } else if (unlikely(main_interpreter_id != current_id)) { PyErr_SetString( PyExc_ImportError, "Interpreter change detected - this module can only be loaded into one interpreter per process."); return -1; } return 0; } #endif static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { PyObject *value = PyObject_GetAttrString(spec, from_name); int result = 0; if (likely(value)) { if (allow_none || value != Py_None) { result = PyDict_SetItemString(moddict, to_name, value); } Py_DECREF(value); } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { result = -1; } return result; } static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { PyObject *module = NULL, *moddict, *modname; CYTHON_UNUSED_VAR(def); #if !CYTHON_USE_MODULE_STATE if (__Pyx_check_single_interpreter()) return NULL; #endif if (__pyx_m) return __Pyx_NewRef(__pyx_m); modname = PyObject_GetAttrString(spec, "name"); if (unlikely(!modname)) goto bad; module = PyModule_NewObject(modname); Py_DECREF(modname); if (unlikely(!module)) goto bad; moddict = PyModule_GetDict(module); if (unlikely(!moddict)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; return module; bad: Py_XDECREF(module); return NULL; } static CYTHON_SMALL_CODE int __pyx_pymod_exec_utils(PyObject *__pyx_pyinit_module) #endif { int stringtab_initialized = 0; #if CYTHON_USE_MODULE_STATE int pystate_addmodule_run = 0; #endif __pyx_mstatetype *__pyx_mstate = NULL; PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannyDeclarations #if CYTHON_PEP489_MULTI_PHASE_INIT if (__pyx_m) { if (__pyx_m == __pyx_pyinit_module) return 0; PyErr_SetString(PyExc_RuntimeError, "Module 'utils' has already been imported. Re-initialisation is not supported."); return -1; } #else if (__pyx_m) return __Pyx_NewRef(__pyx_m); #endif /*--- Module creation code ---*/ #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_t_1 = __pyx_pyinit_module; Py_INCREF(__pyx_t_1); #else __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) #endif #if CYTHON_USE_MODULE_STATE { int add_module_result = __Pyx_State_AddModule(__pyx_t_1, &__pyx_moduledef); __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "utils" pseudovariable */ if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) pystate_addmodule_run = 1; } #else __pyx_m = __pyx_t_1; #endif #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING PyUnstable_Module_SetGIL(__pyx_m, Py_MOD_GIL_NOT_USED); #endif __pyx_mstate = __pyx_mstate_global; CYTHON_UNUSED_VAR(__pyx_t_1); __pyx_mstate->__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_mstate->__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_mstate->__pyx_d); __pyx_mstate->__pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_mstate->__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_mstate->__pyx_cython_runtime = __Pyx_PyImport_AddModuleRef("cython_runtime"); if (unlikely(!__pyx_mstate->__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_mstate->__pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) /* ImportRefnannyAPI */ #if CYTHON_REFNANNY __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); if (!__Pyx_RefNanny) { PyErr_Clear(); __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); if (!__Pyx_RefNanny) Py_FatalError("failed to import 'refnanny' module"); } #endif __Pyx_RefNannySetupContext("PyInit_utils", 0); if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pxy_PyFrame_Initialize_Offsets __Pxy_PyFrame_Initialize_Offsets(); #endif __pyx_mstate->__pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_mstate->__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_mstate->__pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_mstate->__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_mstate->__pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_mstate->__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Initialize various global constants etc. ---*/ if (__Pyx_InitConstants(__pyx_mstate) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) stringtab_initialized = 1; if (__Pyx_InitGlobals() < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #if 0 || defined(__Pyx_CyFunction_USED) || defined(__Pyx_FusedFunction_USED) || defined(__Pyx_Coroutine_USED) || defined(__Pyx_Generator_USED) || defined(__Pyx_AsyncGen_USED) if (__pyx_CommonTypesMetaclass_init(__pyx_m) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_CyFunction_USED if (__pyx_CyFunction_init(__pyx_m) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_FusedFunction_USED if (__pyx_FusedFunction_init(__pyx_m) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Coroutine_USED if (__pyx_Coroutine_init(__pyx_m) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Generator_USED if (__pyx_Generator_init(__pyx_m) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_AsyncGen_USED if (__pyx_AsyncGen_init(__pyx_m) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ if (__pyx_module_is_main_cuda__bindings___internal__utils) { if (PyObject_SetAttr(__pyx_m, __pyx_mstate_global->__pyx_n_u_name, __pyx_mstate_global->__pyx_n_u_main) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) } { PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) if (!PyDict_GetItemString(modules, "cuda.bindings._internal.utils")) { if (unlikely((PyDict_SetItemString(modules, "cuda.bindings._internal.utils", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) } } /*--- Builtin init code ---*/ if (__Pyx_InitCachedBuiltins(__pyx_mstate) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ if (__Pyx_InitCachedConstants(__pyx_mstate) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_CreateCodeObjects(__pyx_mstate) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Global type/function init code ---*/ (void)__Pyx_modinit_global_init_code(__pyx_mstate); (void)__Pyx_modinit_variable_export_code(__pyx_mstate); if (unlikely((__Pyx_modinit_function_export_code(__pyx_mstate) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) (void)__Pyx_modinit_type_init_code(__pyx_mstate); if (unlikely((__Pyx_modinit_type_import_code(__pyx_mstate) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) (void)__Pyx_modinit_variable_import_code(__pyx_mstate); (void)__Pyx_modinit_function_import_code(__pyx_mstate); /*--- Execution code ---*/ /* "cuda/bindings/_internal/utils.pyx":127 * * * class FunctionNotFoundError(RuntimeError): pass # <<<<<<<<<<<<<< * * class NotSupportedError(RuntimeError): pass */ __pyx_t_2 = __Pyx_PEP560_update_bases(__pyx_mstate_global->__pyx_tuple[1]); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 127, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = __Pyx_CalculateMetaclass(NULL, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 127, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_Py3MetaclassPrepare(__pyx_t_3, __pyx_t_2, __pyx_mstate_global->__pyx_n_u_FunctionNotFoundError, __pyx_mstate_global->__pyx_n_u_FunctionNotFoundError, (PyObject *) NULL, __pyx_mstate_global->__pyx_n_u_cuda_bindings__internal_utils, (PyObject *) NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 127, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); if (__pyx_t_2 != __pyx_mstate_global->__pyx_tuple[1]) { if (unlikely((PyDict_SetItemString(__pyx_t_4, "__orig_bases__", __pyx_mstate_global->__pyx_tuple[1]) < 0))) __PYX_ERR(0, 127, __pyx_L1_error) } __pyx_t_5 = __Pyx_Py3ClassCreate(__pyx_t_3, __pyx_mstate_global->__pyx_n_u_FunctionNotFoundError, __pyx_t_2, __pyx_t_4, NULL, 0, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 127, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); if (PyDict_SetItem(__pyx_mstate_global->__pyx_d, __pyx_mstate_global->__pyx_n_u_FunctionNotFoundError, __pyx_t_5) < (0)) __PYX_ERR(0, 127, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "cuda/bindings/_internal/utils.pyx":129 * class FunctionNotFoundError(RuntimeError): pass * * class NotSupportedError(RuntimeError): pass # <<<<<<<<<<<<<< */ __pyx_t_2 = __Pyx_PEP560_update_bases(__pyx_mstate_global->__pyx_tuple[3]); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 129, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = __Pyx_CalculateMetaclass(NULL, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 129, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_Py3MetaclassPrepare(__pyx_t_3, __pyx_t_2, __pyx_mstate_global->__pyx_n_u_NotSupportedError, __pyx_mstate_global->__pyx_n_u_NotSupportedError, (PyObject *) NULL, __pyx_mstate_global->__pyx_n_u_cuda_bindings__internal_utils, (PyObject *) NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 129, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); if (__pyx_t_2 != __pyx_mstate_global->__pyx_tuple[3]) { if (unlikely((PyDict_SetItemString(__pyx_t_4, "__orig_bases__", __pyx_mstate_global->__pyx_tuple[3]) < 0))) __PYX_ERR(0, 129, __pyx_L1_error) } __pyx_t_5 = __Pyx_Py3ClassCreate(__pyx_t_3, __pyx_mstate_global->__pyx_n_u_NotSupportedError, __pyx_t_2, __pyx_t_4, NULL, 0, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 129, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); if (PyDict_SetItem(__pyx_mstate_global->__pyx_d, __pyx_mstate_global->__pyx_n_u_NotSupportedError, __pyx_t_5) < (0)) __PYX_ERR(0, 129, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "cuda/bindings/_internal/utils.pyx":1 * # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # <<<<<<<<<<<<<< * # * # SPDX-License-Identifier: LicenseRef-NVIDIA-SOFTWARE-LICENSE */ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_mstate_global->__pyx_d, __pyx_mstate_global->__pyx_n_u_test, __pyx_t_2) < (0)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /*--- Wrapped vars code ---*/ goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); if (__pyx_m) { if (__pyx_mstate->__pyx_d && stringtab_initialized) { __Pyx_AddTraceback("init cuda.bindings._internal.utils", __pyx_clineno, __pyx_lineno, __pyx_filename); } #if !CYTHON_USE_MODULE_STATE Py_CLEAR(__pyx_m); #else Py_DECREF(__pyx_m); if (pystate_addmodule_run) { PyObject *tp, *value, *tb; PyErr_Fetch(&tp, &value, &tb); PyState_RemoveModule(&__pyx_moduledef); PyErr_Restore(tp, value, tb); } #endif } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init cuda.bindings._internal.utils"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); #if CYTHON_PEP489_MULTI_PHASE_INIT return (__pyx_m != NULL) ? 0 : -1; #else return __pyx_m; #endif } /* #### Code section: pystring_table ### */ typedef struct { const char *s; #if 103 <= 65535 const unsigned short n; #elif 103 / 2 < INT_MAX const unsigned int n; #elif 103 / 2 < LONG_MAX const unsigned long n; #else const Py_ssize_t n; #endif #if 1 <= 31 const unsigned int encoding : 5; #elif 1 <= 255 const unsigned char encoding; #elif 1 <= 65535 const unsigned short encoding; #else const Py_ssize_t encoding; #endif const unsigned int is_unicode : 1; const unsigned int intern : 1; } __Pyx_StringTabEntry; static const char * const __pyx_string_tab_encodings[] = { 0 }; static const __Pyx_StringTabEntry __pyx_string_tab[] = { {__pyx_k_, sizeof(__pyx_k_), 0, 1, 0}, /* PyObject cname: __pyx_kp_u_ */ {__pyx_k_AssertionError, sizeof(__pyx_k_AssertionError), 0, 1, 1}, /* PyObject cname: __pyx_n_u_AssertionError */ {__pyx_k_FunctionNotFoundError, sizeof(__pyx_k_FunctionNotFoundError), 0, 1, 1}, /* PyObject cname: __pyx_n_u_FunctionNotFoundError */ {__pyx_k_None, sizeof(__pyx_k_None), 0, 1, 0}, /* PyObject cname: __pyx_kp_u_None */ {__pyx_k_NotSupportedError, sizeof(__pyx_k_NotSupportedError), 0, 1, 1}, /* PyObject cname: __pyx_n_u_NotSupportedError */ {__pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 1, 1}, /* PyObject cname: __pyx_n_u_RuntimeError */ {__pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 1, 1}, /* PyObject cname: __pyx_n_u_ValueError */ {__pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0}, /* PyObject cname: __pyx_kp_u__2 */ {__pyx_k_buf_must_be_either_a_Python_int, sizeof(__pyx_k_buf_must_be_either_a_Python_int), 0, 1, 0}, /* PyObject cname: __pyx_kp_u_buf_must_be_either_a_Python_int */ {__pyx_k_buffer_of_size_bytes, sizeof(__pyx_k_buffer_of_size_bytes), 0, 1, 0}, /* PyObject cname: __pyx_kp_u_buffer_of_size_bytes */ {__pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 1, 1}, /* PyObject cname: __pyx_n_u_cline_in_traceback */ {__pyx_k_cuda_bindings__internal_utils, sizeof(__pyx_k_cuda_bindings__internal_utils), 0, 1, 1}, /* PyObject cname: __pyx_n_u_cuda_bindings__internal_utils */ {__pyx_k_doc, sizeof(__pyx_k_doc), 0, 1, 1}, /* PyObject cname: __pyx_n_u_doc */ {__pyx_k_enumerate, sizeof(__pyx_k_enumerate), 0, 1, 1}, /* PyObject cname: __pyx_n_u_enumerate */ {__pyx_k_main, sizeof(__pyx_k_main), 0, 1, 1}, /* PyObject cname: __pyx_n_u_main */ {__pyx_k_metaclass, sizeof(__pyx_k_metaclass), 0, 1, 1}, /* PyObject cname: __pyx_n_u_metaclass */ {__pyx_k_module, sizeof(__pyx_k_module), 0, 1, 1}, /* PyObject cname: __pyx_n_u_module */ {__pyx_k_mro_entries, sizeof(__pyx_k_mro_entries), 0, 1, 1}, /* PyObject cname: __pyx_n_u_mro_entries */ {__pyx_k_name, sizeof(__pyx_k_name), 0, 1, 1}, /* PyObject cname: __pyx_n_u_name */ {__pyx_k_prepare, sizeof(__pyx_k_prepare), 0, 1, 1}, /* PyObject cname: __pyx_n_u_prepare */ {__pyx_k_qualname, sizeof(__pyx_k_qualname), 0, 1, 1}, /* PyObject cname: __pyx_n_u_qualname */ {__pyx_k_range, sizeof(__pyx_k_range), 0, 1, 1}, /* PyObject cname: __pyx_n_u_range */ {__pyx_k_test, sizeof(__pyx_k_test), 0, 1, 1}, /* PyObject cname: __pyx_n_u_test */ {__pyx_k_writable, sizeof(__pyx_k_writable), 0, 1, 0}, /* PyObject cname: __pyx_kp_u_writable */ {0, 0, 0, 0, 0} }; /* InitStrings.proto */ static int __Pyx_InitStrings(__Pyx_StringTabEntry const *t, PyObject **target, const char* const* encoding_names); /* #### Code section: cached_builtins ### */ static int __Pyx_InitCachedBuiltins(__pyx_mstatetype *__pyx_mstate) { CYTHON_UNUSED_VAR(__pyx_mstate); __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_mstate->__pyx_n_u_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(0, 127, __pyx_L1_error) __pyx_builtin_AssertionError = __Pyx_GetBuiltinName(__pyx_mstate->__pyx_n_u_AssertionError); if (!__pyx_builtin_AssertionError) __PYX_ERR(0, 38, __pyx_L1_error) __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_mstate->__pyx_n_u_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(0, 42, __pyx_L1_error) __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_mstate->__pyx_n_u_range); if (!__pyx_builtin_range) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_mstate->__pyx_n_u_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(0, 99, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } /* #### Code section: cached_constants ### */ static int __Pyx_InitCachedConstants(__pyx_mstatetype *__pyx_mstate) { __Pyx_RefNannyDeclarations CYTHON_UNUSED_VAR(__pyx_mstate); __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); /* "cuda/bindings/_internal/utils.pyx":127 * * * class FunctionNotFoundError(RuntimeError): pass # <<<<<<<<<<<<<< * * class NotSupportedError(RuntimeError): pass */ __pyx_mstate_global->__pyx_tuple[0] = PyTuple_Pack(1, __pyx_builtin_RuntimeError); if (unlikely(!__pyx_mstate_global->__pyx_tuple[0])) __PYX_ERR(0, 127, __pyx_L1_error) __Pyx_GOTREF(__pyx_mstate_global->__pyx_tuple[0]); __Pyx_GIVEREF(__pyx_mstate_global->__pyx_tuple[0]); __pyx_mstate_global->__pyx_tuple[1] = PyTuple_Pack(1, __pyx_builtin_RuntimeError); if (unlikely(!__pyx_mstate_global->__pyx_tuple[1])) __PYX_ERR(0, 127, __pyx_L1_error) __Pyx_GOTREF(__pyx_mstate_global->__pyx_tuple[1]); __Pyx_GIVEREF(__pyx_mstate_global->__pyx_tuple[1]); /* "cuda/bindings/_internal/utils.pyx":129 * class FunctionNotFoundError(RuntimeError): pass * * class NotSupportedError(RuntimeError): pass # <<<<<<<<<<<<<< */ __pyx_mstate_global->__pyx_tuple[2] = PyTuple_Pack(1, __pyx_builtin_RuntimeError); if (unlikely(!__pyx_mstate_global->__pyx_tuple[2])) __PYX_ERR(0, 129, __pyx_L1_error) __Pyx_GOTREF(__pyx_mstate_global->__pyx_tuple[2]); __Pyx_GIVEREF(__pyx_mstate_global->__pyx_tuple[2]); __pyx_mstate_global->__pyx_tuple[3] = PyTuple_Pack(1, __pyx_builtin_RuntimeError); if (unlikely(!__pyx_mstate_global->__pyx_tuple[3])) __PYX_ERR(0, 129, __pyx_L1_error) __Pyx_GOTREF(__pyx_mstate_global->__pyx_tuple[3]); __Pyx_GIVEREF(__pyx_mstate_global->__pyx_tuple[3]); __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_RefNannyFinishContext(); return -1; } /* #### Code section: init_constants ### */ static int __Pyx_InitConstants(__pyx_mstatetype *__pyx_mstate) { CYTHON_UNUSED_VAR(__pyx_mstate); if (__Pyx_InitStrings(__pyx_string_tab, __pyx_mstate->__pyx_string_tab, __pyx_string_tab_encodings) < 0) __PYX_ERR(0, 1, __pyx_L1_error); return 0; __pyx_L1_error:; return -1; } /* #### Code section: init_codeobjects ### */ static int __Pyx_CreateCodeObjects(__pyx_mstatetype *__pyx_mstate) { CYTHON_UNUSED_VAR(__pyx_mstate); return 0; } /* #### Code section: init_globals ### */ static int __Pyx_InitGlobals(void) { /* PythonCompatibility.init */ if (likely(__Pyx_init_co_variables() == 0)); else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L1_error) /* AssertionsEnabled.init */ if (likely(__Pyx_init_assertions_enabled() == 0)); else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } /* #### Code section: cleanup_globals ### */ /* #### Code section: cleanup_module ### */ /* #### Code section: main_method ### */ /* #### Code section: utility_code_pragmas ### */ #ifdef _MSC_VER #pragma warning( push ) /* Warning 4127: conditional expression is constant * Cython uses constant conditional expressions to allow in inline functions to be optimized at * compile-time, so this warning is not useful */ #pragma warning( disable : 4127 ) #endif /* #### Code section: utility_code_def ### */ /* --- Runtime support code --- */ /* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; m = PyImport_ImportModule(modname); if (!m) goto end; p = PyObject_GetAttrString(m, "RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: Py_XDECREF(p); Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } #endif /* PyErrExceptionMatches */ #if CYTHON_FAST_THREAD_STATE static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(tuple); for (i=0; i= 0x030C00A6 PyObject *current_exception = tstate->current_exception; if (unlikely(!current_exception)) return 0; exc_type = (PyObject*) Py_TYPE(current_exception); if (exc_type == err) return 1; #else exc_type = tstate->curexc_type; if (exc_type == err) return 1; if (unlikely(!exc_type)) return 0; #endif #if CYTHON_AVOID_BORROWED_REFS Py_INCREF(exc_type); #endif if (unlikely(PyTuple_Check(err))) { result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); } else { result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); } #if CYTHON_AVOID_BORROWED_REFS Py_DECREF(exc_type); #endif return result; } #endif /* PyErrFetchRestore */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { #if PY_VERSION_HEX >= 0x030C00A6 PyObject *tmp_value; assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); if (value) { #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) #endif PyException_SetTraceback(value, tb); } tmp_value = tstate->current_exception; tstate->current_exception = value; Py_XDECREF(tmp_value); Py_XDECREF(type); Py_XDECREF(tb); #else PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; tmp_value = tstate->curexc_value; tmp_tb = tstate->curexc_traceback; tstate->curexc_type = type; tstate->curexc_value = value; tstate->curexc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); #endif } static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #if PY_VERSION_HEX >= 0x030C00A6 PyObject* exc_value; exc_value = tstate->current_exception; tstate->current_exception = 0; *value = exc_value; *type = NULL; *tb = NULL; if (exc_value) { *type = (PyObject*) Py_TYPE(exc_value); Py_INCREF(*type); #if CYTHON_COMPILING_IN_CPYTHON *tb = ((PyBaseExceptionObject*) exc_value)->traceback; Py_XINCREF(*tb); #else *tb = PyException_GetTraceback(exc_value); #endif } #else *type = tstate->curexc_type; *value = tstate->curexc_value; *tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; #endif } #endif /* PyObjectGetAttrStr */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro)) return tp->tp_getattro(obj, attr_name); return PyObject_GetAttr(obj, attr_name); } #endif /* PyObjectGetAttrStrNoError */ #if __PYX_LIMITED_VERSION_HEX < 0x030d0000 static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) __Pyx_PyErr_Clear(); } #endif static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { PyObject *result; #if __PYX_LIMITED_VERSION_HEX >= 0x030d0000 (void) PyObject_GetOptionalAttr(obj, attr_name, &result); return result; #else #if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); } #endif result = __Pyx_PyObject_GetAttrStr(obj, attr_name); if (unlikely(!result)) { __Pyx_PyObject_GetAttrStr_ClearAttributeError(); } return result; #endif } /* GetBuiltinName */ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_mstate_global->__pyx_b, name); if (unlikely(!result) && !PyErr_Occurred()) { PyErr_Format(PyExc_NameError, "name '%U' is not defined", name); } return result; } /* RaiseException */ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { PyObject* owned_instance = NULL; if (tb == Py_None) { tb = 0; } else if (tb && !PyTraceBack_Check(tb)) { PyErr_SetString(PyExc_TypeError, "raise: arg 3 must be a traceback or None"); goto bad; } if (value == Py_None) value = 0; if (PyExceptionInstance_Check(type)) { if (value) { PyErr_SetString(PyExc_TypeError, "instance exception may not have a separate value"); goto bad; } value = type; type = (PyObject*) Py_TYPE(value); } else if (PyExceptionClass_Check(type)) { PyObject *instance_class = NULL; if (value && PyExceptionInstance_Check(value)) { instance_class = (PyObject*) Py_TYPE(value); if (instance_class != type) { int is_subclass = PyObject_IsSubclass(instance_class, type); if (!is_subclass) { instance_class = NULL; } else if (unlikely(is_subclass == -1)) { goto bad; } else { type = instance_class; } } } if (!instance_class) { PyObject *args; if (!value) args = PyTuple_New(0); else if (PyTuple_Check(value)) { Py_INCREF(value); args = value; } else args = PyTuple_Pack(1, value); if (!args) goto bad; owned_instance = PyObject_Call(type, args, NULL); Py_DECREF(args); if (!owned_instance) goto bad; value = owned_instance; if (!PyExceptionInstance_Check(value)) { PyErr_Format(PyExc_TypeError, "calling %R should have returned an instance of " "BaseException, not %R", type, Py_TYPE(value)); goto bad; } } } else { PyErr_SetString(PyExc_TypeError, "raise: exception class must be a subclass of BaseException"); goto bad; } if (cause) { PyObject *fixed_cause; if (cause == Py_None) { fixed_cause = NULL; } else if (PyExceptionClass_Check(cause)) { fixed_cause = PyObject_CallObject(cause, NULL); if (fixed_cause == NULL) goto bad; } else if (PyExceptionInstance_Check(cause)) { fixed_cause = cause; Py_INCREF(fixed_cause); } else { PyErr_SetString(PyExc_TypeError, "exception causes must derive from " "BaseException"); goto bad; } PyException_SetCause(value, fixed_cause); } PyErr_SetObject(type, value); if (tb) { #if PY_VERSION_HEX >= 0x030C00A6 PyException_SetTraceback(value, tb); #elif CYTHON_FAST_THREAD_STATE PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject* tmp_tb = tstate->curexc_traceback; if (tb != tmp_tb) { Py_INCREF(tb); tstate->curexc_traceback = tb; Py_XDECREF(tmp_tb); } #else PyObject *tmp_type, *tmp_value, *tmp_tb; PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); Py_INCREF(tb); PyErr_Restore(tmp_type, tmp_value, tb); Py_XDECREF(tmp_tb); #endif } bad: Py_XDECREF(owned_instance); return; } /* GetTopmostException */ #if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate) { _PyErr_StackItem *exc_info = tstate->exc_info; while ((exc_info->exc_value == NULL || exc_info->exc_value == Py_None) && exc_info->previous_item != NULL) { exc_info = exc_info->previous_item; } return exc_info; } #endif /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); PyObject *exc_value = exc_info->exc_value; if (exc_value == NULL || exc_value == Py_None) { *value = NULL; *type = NULL; *tb = NULL; } else { *value = exc_value; Py_INCREF(*value); *type = (PyObject*) Py_TYPE(exc_value); Py_INCREF(*type); *tb = PyException_GetTraceback(exc_value); } #elif CYTHON_USE_EXC_INFO_STACK _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); *type = exc_info->exc_type; *value = exc_info->exc_value; *tb = exc_info->exc_traceback; Py_XINCREF(*type); Py_XINCREF(*value); Py_XINCREF(*tb); #else *type = tstate->exc_type; *value = tstate->exc_value; *tb = tstate->exc_traceback; Py_XINCREF(*type); Py_XINCREF(*value); Py_XINCREF(*tb); #endif } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 _PyErr_StackItem *exc_info = tstate->exc_info; PyObject *tmp_value = exc_info->exc_value; exc_info->exc_value = value; Py_XDECREF(tmp_value); Py_XDECREF(type); Py_XDECREF(tb); #else PyObject *tmp_type, *tmp_value, *tmp_tb; #if CYTHON_USE_EXC_INFO_STACK _PyErr_StackItem *exc_info = tstate->exc_info; tmp_type = exc_info->exc_type; tmp_value = exc_info->exc_value; tmp_tb = exc_info->exc_traceback; exc_info->exc_type = type; exc_info->exc_value = value; exc_info->exc_traceback = tb; #else tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = type; tstate->exc_value = value; tstate->exc_traceback = tb; #endif Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); #endif } #endif /* GetException */ #if CYTHON_FAST_THREAD_STATE static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) #else static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) #endif { PyObject *local_type = NULL, *local_value, *local_tb = NULL; #if CYTHON_FAST_THREAD_STATE PyObject *tmp_type, *tmp_value, *tmp_tb; #if PY_VERSION_HEX >= 0x030C0000 local_value = tstate->current_exception; tstate->current_exception = 0; #else local_type = tstate->curexc_type; local_value = tstate->curexc_value; local_tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; #endif #elif __PYX_LIMITED_VERSION_HEX > 0x030C0000 local_value = PyErr_GetRaisedException(); #else PyErr_Fetch(&local_type, &local_value, &local_tb); #endif #if __PYX_LIMITED_VERSION_HEX > 0x030C0000 if (likely(local_value)) { local_type = (PyObject*) Py_TYPE(local_value); Py_INCREF(local_type); local_tb = PyException_GetTraceback(local_value); } #else PyErr_NormalizeException(&local_type, &local_value, &local_tb); #if CYTHON_FAST_THREAD_STATE if (unlikely(tstate->curexc_type)) #else if (unlikely(PyErr_Occurred())) #endif goto bad; if (local_tb) { if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) goto bad; } #endif // __PYX_LIMITED_VERSION_HEX > 0x030C0000 Py_XINCREF(local_tb); Py_XINCREF(local_type); Py_XINCREF(local_value); *type = local_type; *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE #if CYTHON_USE_EXC_INFO_STACK { _PyErr_StackItem *exc_info = tstate->exc_info; #if PY_VERSION_HEX >= 0x030B00a4 tmp_value = exc_info->exc_value; exc_info->exc_value = local_value; tmp_type = NULL; tmp_tb = NULL; Py_XDECREF(local_type); Py_XDECREF(local_tb); #else tmp_type = exc_info->exc_type; tmp_value = exc_info->exc_value; tmp_tb = exc_info->exc_traceback; exc_info->exc_type = local_type; exc_info->exc_value = local_value; exc_info->exc_traceback = local_tb; #endif } #else tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = local_type; tstate->exc_value = local_value; tstate->exc_traceback = local_tb; #endif Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); #elif __PYX_LIMITED_VERSION_HEX >= 0x030b0000 PyErr_SetHandledException(local_value); Py_XDECREF(local_value); Py_XDECREF(local_type); Py_XDECREF(local_tb); #else PyErr_SetExcInfo(local_type, local_value, local_tb); #endif return 0; #if __PYX_LIMITED_VERSION_HEX <= 0x030C0000 bad: *type = 0; *value = 0; *tb = 0; Py_XDECREF(local_type); Py_XDECREF(local_value); Py_XDECREF(local_tb); return -1; #endif } /* PyUnicode_Unicode */ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj) { if (unlikely(obj == Py_None)) obj = __pyx_mstate_global->__pyx_kp_u_None; return __Pyx_NewRef(obj); } /* JoinPyUnicode */ static PyObject* __Pyx_PyUnicode_Join(PyObject** values, Py_ssize_t value_count, Py_ssize_t result_ulength, Py_UCS4 max_char) { #if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS PyObject *result_uval; int result_ukind, kind_shift; Py_ssize_t i, char_pos; void *result_udata; if (max_char > 1114111) max_char = 1114111; result_uval = PyUnicode_New(result_ulength, max_char); if (unlikely(!result_uval)) return NULL; result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; result_udata = PyUnicode_DATA(result_uval); assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - result_ulength < 0)) goto overflow; char_pos = 0; for (i=0; i < value_count; i++) { int ukind; Py_ssize_t ulength; void *udata; PyObject *uval = values[i]; #if !CYTHON_COMPILING_IN_LIMITED_API if (__Pyx_PyUnicode_READY(uval) == (-1)) goto bad; #endif ulength = __Pyx_PyUnicode_GET_LENGTH(uval); #if !CYTHON_ASSUME_SAFE_SIZE if (unlikely(ulength < 0)) goto bad; #endif if (unlikely(!ulength)) continue; if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) goto overflow; ukind = __Pyx_PyUnicode_KIND(uval); udata = __Pyx_PyUnicode_DATA(uval); if (ukind == result_ukind) { memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); } else { #if PY_VERSION_HEX >= 0x030d0000 if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; #elif CYTHON_COMPILING_IN_CPYTHON || defined(_PyUnicode_FastCopyCharacters) _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); #else Py_ssize_t j; for (j=0; j < ulength; j++) { Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); } #endif } char_pos += ulength; } return result_uval; overflow: PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); bad: Py_DECREF(result_uval); return NULL; #else Py_ssize_t i; PyObject *result = NULL; PyObject *value_tuple = PyTuple_New(value_count); if (unlikely(!value_tuple)) return NULL; CYTHON_UNUSED_VAR(max_char); CYTHON_UNUSED_VAR(result_ulength); for (i=0; i__pyx_empty_unicode, value_tuple); bad: Py_DECREF(value_tuple); return result; #endif } /* PyFunctionFastCall */ #if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject *const *args, Py_ssize_t na, PyObject *globals) { PyFrameObject *f; PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject **fastlocals; Py_ssize_t i; PyObject *result; assert(globals != NULL); /* XXX Perhaps we should create a specialized PyFrame_New() that doesn't take locals, but does take builtins without sanity checking them. */ assert(tstate != NULL); f = PyFrame_New(tstate, co, globals, NULL); if (f == NULL) { return NULL; } fastlocals = __Pyx_PyFrame_GetLocalsplus(f); for (i = 0; i < na; i++) { Py_INCREF(*args); fastlocals[i] = *args++; } result = PyEval_EvalFrameEx(f,0); ++tstate->recursion_depth; Py_DECREF(f); --tstate->recursion_depth; return result; } static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject *const *args, Py_ssize_t nargs, PyObject *kwargs) { PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); PyObject *globals = PyFunction_GET_GLOBALS(func); PyObject *argdefs = PyFunction_GET_DEFAULTS(func); PyObject *closure; PyObject *kwdefs; PyObject *kwtuple, **k; PyObject **d; Py_ssize_t nd; Py_ssize_t nk; PyObject *result; assert(kwargs == NULL || PyDict_Check(kwargs)); nk = kwargs ? PyDict_Size(kwargs) : 0; if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { return NULL; } if ( co->co_kwonlyargcount == 0 && likely(kwargs == NULL || nk == 0) && co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { if (argdefs == NULL && co->co_argcount == nargs) { result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); goto done; } else if (nargs == 0 && argdefs != NULL && co->co_argcount == Py_SIZE(argdefs)) { /* function called with no arguments, but all parameters have a default value: use default values as arguments .*/ args = &PyTuple_GET_ITEM(argdefs, 0); result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); goto done; } } if (kwargs != NULL) { Py_ssize_t pos, i; kwtuple = PyTuple_New(2 * nk); if (kwtuple == NULL) { result = NULL; goto done; } k = &PyTuple_GET_ITEM(kwtuple, 0); pos = i = 0; while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { Py_INCREF(k[i]); Py_INCREF(k[i+1]); i += 2; } nk = i / 2; } else { kwtuple = NULL; k = NULL; } closure = PyFunction_GET_CLOSURE(func); kwdefs = PyFunction_GET_KW_DEFAULTS(func); if (argdefs != NULL) { d = &PyTuple_GET_ITEM(argdefs, 0); nd = Py_SIZE(argdefs); } else { d = NULL; nd = 0; } result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, args, (int)nargs, k, (int)nk, d, (int)nd, kwdefs, closure); Py_XDECREF(kwtuple); done: Py_LeaveRecursiveCall(); return result; } #endif /* PyObjectCall */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *result; ternaryfunc call = Py_TYPE(func)->tp_call; if (unlikely(!call)) return PyObject_Call(func, arg, kw); if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) return NULL; result = (*call)(func, arg, kw); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* PyObjectCallMethO */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { PyObject *self, *result; PyCFunction cfunc; cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); self = __Pyx_CyOrPyCFunction_GET_SELF(func); if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) return NULL; result = cfunc(self, arg); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* PyObjectFastCall */ #if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject * const*args, size_t nargs, PyObject *kwargs) { PyObject *argstuple; PyObject *result = 0; size_t i; argstuple = PyTuple_New((Py_ssize_t)nargs); if (unlikely(!argstuple)) return NULL; for (i = 0; i < nargs; i++) { Py_INCREF(args[i]); if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) != (0)) goto bad; } result = __Pyx_PyObject_Call(func, argstuple, kwargs); bad: Py_DECREF(argstuple); return result; } #endif #if CYTHON_VECTORCALL && !CYTHON_COMPILING_IN_LIMITED_API #if PY_VERSION_HEX < 0x03090000 #define __Pyx_PyVectorcall_Function(callable) _PyVectorcall_Function(callable) #elif CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE vectorcallfunc __Pyx_PyVectorcall_Function(PyObject *callable) { PyTypeObject *tp = Py_TYPE(callable); #if defined(__Pyx_CyFunction_USED) if (__Pyx_CyFunction_CheckExact(callable)) { return __Pyx_CyFunction_func_vectorcall(callable); } #endif if (!PyType_HasFeature(tp, Py_TPFLAGS_HAVE_VECTORCALL)) { return NULL; } assert(PyCallable_Check(callable)); Py_ssize_t offset = tp->tp_vectorcall_offset; assert(offset > 0); vectorcallfunc ptr; memcpy(&ptr, (char *) callable + offset, sizeof(ptr)); return ptr; } #else #define __Pyx_PyVectorcall_Function(callable) PyVectorcall_Function(callable) #endif #endif static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject *const *args, size_t _nargs, PyObject *kwargs) { Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); #if CYTHON_COMPILING_IN_CPYTHON if (nargs == 0 && kwargs == NULL) { if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) return __Pyx_PyObject_CallMethO(func, NULL); } else if (nargs == 1 && kwargs == NULL) { if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) return __Pyx_PyObject_CallMethO(func, args[0]); } #endif #if PY_VERSION_HEX < 0x030800B1 #if CYTHON_FAST_PYCCALL if (PyCFunction_Check(func)) { if (kwargs) { return _PyCFunction_FastCallDict(func, args, nargs, kwargs); } else { return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); } } if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); } #endif #if CYTHON_FAST_PYCALL if (PyFunction_Check(func)) { return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); } #endif #endif if (kwargs == NULL) { #if CYTHON_VECTORCALL && !CYTHON_COMPILING_IN_LIMITED_API vectorcallfunc f = __Pyx_PyVectorcall_Function(func); if (f) { return f(func, args, _nargs, NULL); } #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL if (__Pyx_CyFunction_CheckExact(func)) { __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); if (f) return f(func, args, _nargs, NULL); } #elif CYTHON_COMPILING_IN_LIMITED_API && CYTHON_VECTORCALL return PyObject_Vectorcall(func, args, _nargs, NULL); #endif } if (nargs == 0) { return __Pyx_PyObject_Call(func, __pyx_mstate_global->__pyx_empty_tuple, kwargs); } #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); #else return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); #endif } /* SwapException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 _PyErr_StackItem *exc_info = tstate->exc_info; tmp_value = exc_info->exc_value; exc_info->exc_value = *value; if (tmp_value == NULL || tmp_value == Py_None) { Py_XDECREF(tmp_value); tmp_value = NULL; tmp_type = NULL; tmp_tb = NULL; } else { tmp_type = (PyObject*) Py_TYPE(tmp_value); Py_INCREF(tmp_type); #if CYTHON_COMPILING_IN_CPYTHON tmp_tb = ((PyBaseExceptionObject*) tmp_value)->traceback; Py_XINCREF(tmp_tb); #else tmp_tb = PyException_GetTraceback(tmp_value); #endif } #elif CYTHON_USE_EXC_INFO_STACK _PyErr_StackItem *exc_info = tstate->exc_info; tmp_type = exc_info->exc_type; tmp_value = exc_info->exc_value; tmp_tb = exc_info->exc_traceback; exc_info->exc_type = *type; exc_info->exc_value = *value; exc_info->exc_traceback = *tb; #else tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = *type; tstate->exc_value = *value; tstate->exc_traceback = *tb; #endif *type = tmp_type; *value = tmp_value; *tb = tmp_tb; } #else static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); PyErr_SetExcInfo(*type, *value, *tb); *type = tmp_type; *value = tmp_value; *tb = tmp_tb; } #endif /* GetItemInt */ static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { PyObject *r; if (unlikely(!j)) return NULL; r = PyObject_GetItem(o, j); Py_DECREF(j); return r; } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && CYTHON_ASSUME_SAFE_SIZE && !CYTHON_AVOID_BORROWED_REFS && !CYTHON_AVOID_THREAD_UNSAFE_BORROWED_REFS Py_ssize_t wrapped_i = i; if (wraparound & unlikely(i < 0)) { wrapped_i += PyList_GET_SIZE(o); } if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { PyObject *r = PyList_GET_ITEM(o, wrapped_i); Py_INCREF(r); return r; } return __Pyx_GetItemInt_Generic(o, PyLong_FromSsize_t(i)); #else return PySequence_GetItem(o, i); #endif } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && CYTHON_ASSUME_SAFE_SIZE && !CYTHON_AVOID_BORROWED_REFS Py_ssize_t wrapped_i = i; if (wraparound & unlikely(i < 0)) { wrapped_i += PyTuple_GET_SIZE(o); } if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); Py_INCREF(r); return r; } return __Pyx_GetItemInt_Generic(o, PyLong_FromSsize_t(i)); #else return PySequence_GetItem(o, i); #endif } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && CYTHON_ASSUME_SAFE_SIZE && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS if (is_list || PyList_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { return __Pyx_PyList_GetItemRef(o, n); } } else if (PyTuple_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { PyObject *r = PyTuple_GET_ITEM(o, n); Py_INCREF(r); return r; } } else { PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; if (mm && mm->mp_subscript) { PyObject *r, *key = PyLong_FromSsize_t(i); if (unlikely(!key)) return NULL; r = mm->mp_subscript(o, key); Py_DECREF(key); return r; } if (likely(sm && sm->sq_item)) { if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { Py_ssize_t l = sm->sq_length(o); if (likely(l >= 0)) { i += l; } else { if (!PyErr_ExceptionMatches(PyExc_OverflowError)) return NULL; PyErr_Clear(); } } return sm->sq_item(o, i); } } #else if (is_list || !PyMapping_Check(o)) { return PySequence_GetItem(o, i); } #endif return __Pyx_GetItemInt_Generic(o, PyLong_FromSsize_t(i)); } /* RaiseUnexpectedTypeError */ static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) { __Pyx_TypeName obj_type_name = __Pyx_PyType_GetFullyQualifiedName(Py_TYPE(obj)); PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, expected, obj_type_name); __Pyx_DECREF_TypeName(obj_type_name); return 0; } /* TypeImport */ #ifndef __PYX_HAVE_RT_ImportType_3_1_5 #define __PYX_HAVE_RT_ImportType_3_1_5 static PyTypeObject *__Pyx_ImportType_3_1_5(PyObject *module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_1_5 check_size) { PyObject *result = 0; Py_ssize_t basicsize; Py_ssize_t itemsize; #if defined(Py_LIMITED_API) || (defined(CYTHON_COMPILING_IN_LIMITED_API) && CYTHON_COMPILING_IN_LIMITED_API) PyObject *py_basicsize; PyObject *py_itemsize; #endif result = PyObject_GetAttrString(module, class_name); if (!result) goto bad; if (!PyType_Check(result)) { PyErr_Format(PyExc_TypeError, "%.200s.%.200s is not a type object", module_name, class_name); goto bad; } #if !( defined(Py_LIMITED_API) || (defined(CYTHON_COMPILING_IN_LIMITED_API) && CYTHON_COMPILING_IN_LIMITED_API) ) basicsize = ((PyTypeObject *)result)->tp_basicsize; itemsize = ((PyTypeObject *)result)->tp_itemsize; #else if (size == 0) { return (PyTypeObject *)result; } py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); if (!py_basicsize) goto bad; basicsize = PyLong_AsSsize_t(py_basicsize); Py_DECREF(py_basicsize); py_basicsize = 0; if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; py_itemsize = PyObject_GetAttrString(result, "__itemsize__"); if (!py_itemsize) goto bad; itemsize = PyLong_AsSsize_t(py_itemsize); Py_DECREF(py_itemsize); py_itemsize = 0; if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; #endif if (itemsize) { if (size % alignment) { alignment = size % alignment; } if (itemsize < (Py_ssize_t)alignment) itemsize = (Py_ssize_t)alignment; } if ((size_t)(basicsize + itemsize) < size) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd from PyObject", module_name, class_name, size, basicsize+itemsize); goto bad; } if (check_size == __Pyx_ImportType_CheckSize_Error_3_1_5 && ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd-%zd from PyObject", module_name, class_name, size, basicsize, basicsize+itemsize); goto bad; } else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_1_5 && (size_t)basicsize > size) { if (PyErr_WarnFormat(NULL, 0, "%.200s.%.200s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd from PyObject", module_name, class_name, size, basicsize) < 0) { goto bad; } } return (PyTypeObject *)result; bad: Py_XDECREF(result); return NULL; } #endif /* PyObjectCallOneArg */ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *args[2] = {NULL, arg}; return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); } /* Py3UpdateBases */ static PyObject* __Pyx_PEP560_update_bases(PyObject *bases) { Py_ssize_t i, j, size_bases; PyObject *base = NULL, *meth, *new_base, *result, *new_bases = NULL; #if CYTHON_ASSUME_SAFE_SIZE size_bases = PyTuple_GET_SIZE(bases); #else size_bases = PyTuple_Size(bases); if (size_bases < 0) return NULL; #endif for (i = 0; i < size_bases; i++) { #if CYTHON_AVOID_BORROWED_REFS Py_CLEAR(base); #endif #if CYTHON_ASSUME_SAFE_MACROS base = PyTuple_GET_ITEM(bases, i); #else base = PyTuple_GetItem(bases, i); if (!base) goto error; #endif #if CYTHON_AVOID_BORROWED_REFS Py_INCREF(base); #endif if (PyType_Check(base)) { if (new_bases) { if (PyList_Append(new_bases, base) < 0) { goto error; } } continue; } meth = __Pyx_PyObject_GetAttrStrNoError(base, __pyx_mstate_global->__pyx_n_u_mro_entries); if (!meth && PyErr_Occurred()) { goto error; } if (!meth) { if (new_bases) { if (PyList_Append(new_bases, base) < 0) { goto error; } } continue; } new_base = __Pyx_PyObject_CallOneArg(meth, bases); Py_DECREF(meth); if (!new_base) { goto error; } if (!PyTuple_Check(new_base)) { PyErr_SetString(PyExc_TypeError, "__mro_entries__ must return a tuple"); Py_DECREF(new_base); goto error; } if (!new_bases) { if (!(new_bases = PyList_New(i))) { goto error; } for (j = 0; j < i; j++) { PyObject *base_from_list; #if CYTHON_ASSUME_SAFE_MACROS base_from_list = PyTuple_GET_ITEM(bases, j); PyList_SET_ITEM(new_bases, j, base_from_list); Py_INCREF(base_from_list); #else base_from_list = PyTuple_GetItem(bases, j); if (!base_from_list) goto error; Py_INCREF(base_from_list); if (PyList_SetItem(new_bases, j, base_from_list) < 0) goto error; #endif } } #if CYTHON_ASSUME_SAFE_SIZE j = PyList_GET_SIZE(new_bases); #else j = PyList_Size(new_bases); if (j < 0) goto error; #endif if (PyList_SetSlice(new_bases, j, j, new_base) < 0) { goto error; } Py_DECREF(new_base); } if (!new_bases) { Py_INCREF(bases); return bases; } result = PyList_AsTuple(new_bases); Py_DECREF(new_bases); #if CYTHON_AVOID_BORROWED_REFS Py_XDECREF(base); #endif return result; error: Py_XDECREF(new_bases); #if CYTHON_AVOID_BORROWED_REFS Py_XDECREF(base); #endif return NULL; } /* CalculateMetaclass */ static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases) { Py_ssize_t i, nbases; #if CYTHON_ASSUME_SAFE_SIZE nbases = PyTuple_GET_SIZE(bases); #else nbases = PyTuple_Size(bases); if (nbases < 0) return NULL; #endif for (i=0; i < nbases; i++) { PyTypeObject *tmptype; #if CYTHON_ASSUME_SAFE_MACROS PyObject *tmp = PyTuple_GET_ITEM(bases, i); #else PyObject *tmp = PyTuple_GetItem(bases, i); if (!tmp) return NULL; #endif tmptype = Py_TYPE(tmp); if (!metaclass) { metaclass = tmptype; continue; } if (PyType_IsSubtype(metaclass, tmptype)) continue; if (PyType_IsSubtype(tmptype, metaclass)) { metaclass = tmptype; continue; } PyErr_SetString(PyExc_TypeError, "metaclass conflict: " "the metaclass of a derived class " "must be a (non-strict) subclass " "of the metaclasses of all its bases"); return NULL; } if (!metaclass) { metaclass = &PyType_Type; } Py_INCREF((PyObject*) metaclass); return (PyObject*) metaclass; } /* PyObjectCall2Args */ static CYTHON_INLINE PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { PyObject *args[3] = {NULL, arg1, arg2}; return __Pyx_PyObject_FastCall(function, args+1, 2 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); } /* PyObjectLookupSpecial */ #if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx__PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name, int with_error) { PyObject *res; PyTypeObject *tp = Py_TYPE(obj); res = _PyType_Lookup(tp, attr_name); if (likely(res)) { descrgetfunc f = Py_TYPE(res)->tp_descr_get; if (!f) { Py_INCREF(res); } else { res = f(res, obj, (PyObject *)tp); } } else if (with_error) { PyErr_SetObject(PyExc_AttributeError, attr_name); } return res; } #endif /* Py3ClassCreate */ static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc) { PyObject *ns; if (metaclass) { PyObject *prep = __Pyx_PyObject_GetAttrStrNoError(metaclass, __pyx_mstate_global->__pyx_n_u_prepare); if (prep) { PyObject *pargs[3] = {NULL, name, bases}; ns = __Pyx_PyObject_FastCallDict(prep, pargs+1, 2 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET, mkw); Py_DECREF(prep); } else { if (unlikely(PyErr_Occurred())) return NULL; ns = PyDict_New(); } } else { ns = PyDict_New(); } if (unlikely(!ns)) return NULL; if (unlikely(PyObject_SetItem(ns, __pyx_mstate_global->__pyx_n_u_module, modname) < 0)) goto bad; if (unlikely(PyObject_SetItem(ns, __pyx_mstate_global->__pyx_n_u_qualname, qualname) < 0)) goto bad; if (unlikely(doc && PyObject_SetItem(ns, __pyx_mstate_global->__pyx_n_u_doc, doc) < 0)) goto bad; return ns; bad: Py_DECREF(ns); return NULL; } static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass) { PyObject *result; PyObject *owned_metaclass = NULL; PyObject *margs[4] = {NULL, name, bases, dict}; if (allow_py2_metaclass) { owned_metaclass = PyObject_GetItem(dict, __pyx_mstate_global->__pyx_n_u_metaclass); if (owned_metaclass) { metaclass = owned_metaclass; } else if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) { PyErr_Clear(); } else { return NULL; } } if (calculate_metaclass && (!metaclass || PyType_Check(metaclass))) { metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); Py_XDECREF(owned_metaclass); if (unlikely(!metaclass)) return NULL; owned_metaclass = metaclass; } result = __Pyx_PyObject_FastCallDict(metaclass, margs+1, 3 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET, mkw); Py_XDECREF(owned_metaclass); return result; } /* PyDictVersioning */ #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { PyObject *dict = Py_TYPE(obj)->tp_dict; return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; } static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { PyObject **dictptr = NULL; Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; if (offset) { #if CYTHON_COMPILING_IN_CPYTHON dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); #else dictptr = _PyObject_GetDictPtr(obj); #endif } return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; } static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { PyObject *dict = Py_TYPE(obj)->tp_dict; if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) return 0; return obj_dict_version == __Pyx_get_object_dict_version(obj); } #endif /* CLineInTraceback */ #if CYTHON_CLINE_IN_TRACEBACK && CYTHON_CLINE_IN_TRACEBACK_RUNTIME static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif CYTHON_MAYBE_UNUSED_VAR(tstate); if (unlikely(!__pyx_mstate_global->__pyx_cython_runtime)) { return c_line; } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_mstate_global->__pyx_cython_runtime); if (likely(cython_runtime_dict)) { __Pyx_BEGIN_CRITICAL_SECTION(*cython_runtime_dict); __PYX_PY_DICT_LOOKUP_IF_MODIFIED( use_cline, *cython_runtime_dict, __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_mstate_global->__pyx_n_u_cline_in_traceback)) Py_XINCREF(use_cline); __Pyx_END_CRITICAL_SECTION(); } else #endif { PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_mstate_global->__pyx_cython_runtime, __pyx_mstate_global->__pyx_n_u_cline_in_traceback); if (use_cline_obj) { use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; Py_INCREF(use_cline); Py_DECREF(use_cline_obj); } else { PyErr_Clear(); use_cline = NULL; } } if (!use_cline) { c_line = 0; (void) PyObject_SetAttr(__pyx_mstate_global->__pyx_cython_runtime, __pyx_mstate_global->__pyx_n_u_cline_in_traceback, Py_False); } else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { c_line = 0; } Py_XDECREF(use_cline); __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); return c_line; } #endif /* CodeObjectCache */ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { int start = 0, mid = 0, end = count - 1; if (end >= 0 && code_line > entries[end].code_line) { return count; } while (start < end) { mid = start + (end - start) / 2; if (code_line < entries[mid].code_line) { end = mid; } else if (code_line > entries[mid].code_line) { start = mid + 1; } else { return mid; } } if (code_line <= entries[mid].code_line) { return mid; } else { return mid + 1; } } static __Pyx_CachedCodeObjectType *__pyx__find_code_object(struct __Pyx_CodeObjectCache *code_cache, int code_line) { __Pyx_CachedCodeObjectType* code_object; int pos; if (unlikely(!code_line) || unlikely(!code_cache->entries)) { return NULL; } pos = __pyx_bisect_code_objects(code_cache->entries, code_cache->count, code_line); if (unlikely(pos >= code_cache->count) || unlikely(code_cache->entries[pos].code_line != code_line)) { return NULL; } code_object = code_cache->entries[pos].code_object; Py_INCREF(code_object); return code_object; } static __Pyx_CachedCodeObjectType *__pyx_find_code_object(int code_line) { #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING && !CYTHON_ATOMICS (void)__pyx__find_code_object; return NULL; // Most implementation should have atomics. But otherwise, don't make it thread-safe, just miss. #else struct __Pyx_CodeObjectCache *code_cache = &__pyx_mstate_global->__pyx_code_cache; #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING __pyx_nonatomic_int_type old_count = __pyx_atomic_incr_acq_rel(&code_cache->accessor_count); if (old_count < 0) { __pyx_atomic_decr_acq_rel(&code_cache->accessor_count); return NULL; } #endif __Pyx_CachedCodeObjectType *result = __pyx__find_code_object(code_cache, code_line); #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING __pyx_atomic_decr_acq_rel(&code_cache->accessor_count); #endif return result; #endif } static void __pyx__insert_code_object(struct __Pyx_CodeObjectCache *code_cache, int code_line, __Pyx_CachedCodeObjectType* code_object) { int pos, i; __Pyx_CodeObjectCacheEntry* entries = code_cache->entries; if (unlikely(!code_line)) { return; } if (unlikely(!entries)) { entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); if (likely(entries)) { code_cache->entries = entries; code_cache->max_count = 64; code_cache->count = 1; entries[0].code_line = code_line; entries[0].code_object = code_object; Py_INCREF(code_object); } return; } pos = __pyx_bisect_code_objects(code_cache->entries, code_cache->count, code_line); if ((pos < code_cache->count) && unlikely(code_cache->entries[pos].code_line == code_line)) { __Pyx_CachedCodeObjectType* tmp = entries[pos].code_object; entries[pos].code_object = code_object; Py_INCREF(code_object); Py_DECREF(tmp); return; } if (code_cache->count == code_cache->max_count) { int new_max = code_cache->max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( code_cache->entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } code_cache->entries = entries; code_cache->max_count = new_max; } for (i=code_cache->count; i>pos; i--) { entries[i] = entries[i-1]; } entries[pos].code_line = code_line; entries[pos].code_object = code_object; code_cache->count++; Py_INCREF(code_object); } static void __pyx_insert_code_object(int code_line, __Pyx_CachedCodeObjectType* code_object) { #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING && !CYTHON_ATOMICS (void)__pyx__insert_code_object; return; // Most implementation should have atomics. But otherwise, don't make it thread-safe, just fail. #else struct __Pyx_CodeObjectCache *code_cache = &__pyx_mstate_global->__pyx_code_cache; #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING __pyx_nonatomic_int_type expected = 0; if (!__pyx_atomic_int_cmp_exchange(&code_cache->accessor_count, &expected, INT_MIN)) { return; } #endif __pyx__insert_code_object(code_cache, code_line, code_object); #if CYTHON_COMPILING_IN_CPYTHON_FREETHREADING __pyx_atomic_sub(&code_cache->accessor_count, INT_MIN); #endif #endif } /* AddTraceback */ #include "compile.h" #include "frameobject.h" #include "traceback.h" #if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API && !defined(PYPY_VERSION) #ifndef Py_BUILD_CORE #define Py_BUILD_CORE 1 #endif #include "internal/pycore_frame.h" #endif #if CYTHON_COMPILING_IN_LIMITED_API static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, PyObject *firstlineno, PyObject *name) { PyObject *replace = NULL; if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; replace = PyObject_GetAttrString(code, "replace"); if (likely(replace)) { PyObject *result = PyObject_Call(replace, __pyx_mstate_global->__pyx_empty_tuple, scratch_dict); Py_DECREF(replace); return result; } PyErr_Clear(); return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; PyObject *replace = NULL, *getframe = NULL, *frame = NULL; PyObject *exc_type, *exc_value, *exc_traceback; int success = 0; if (c_line) { (void) __pyx_cfilenm; (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); } PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); code_object = __pyx_find_code_object(c_line ? -c_line : py_line); if (!code_object) { code_object = Py_CompileString("_getframe()", filename, Py_eval_input); if (unlikely(!code_object)) goto bad; py_py_line = PyLong_FromLong(py_line); if (unlikely(!py_py_line)) goto bad; py_funcname = PyUnicode_FromString(funcname); if (unlikely(!py_funcname)) goto bad; dict = PyDict_New(); if (unlikely(!dict)) goto bad; { PyObject *old_code_object = code_object; code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); Py_DECREF(old_code_object); } if (unlikely(!code_object)) goto bad; __pyx_insert_code_object(c_line ? -c_line : py_line, code_object); } else { dict = PyDict_New(); } getframe = PySys_GetObject("_getframe"); if (unlikely(!getframe)) goto bad; if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; frame = PyEval_EvalCode(code_object, dict, dict); if (unlikely(!frame) || frame == Py_None) goto bad; success = 1; bad: PyErr_Restore(exc_type, exc_value, exc_traceback); Py_XDECREF(code_object); Py_XDECREF(py_py_line); Py_XDECREF(py_funcname); Py_XDECREF(dict); Py_XDECREF(replace); if (success) { PyTraceBack_Here( (struct _frame*)frame); } Py_XDECREF(frame); } #else static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = NULL; PyObject *py_funcname = NULL; if (c_line) { py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); if (!py_funcname) goto bad; funcname = PyUnicode_AsUTF8(py_funcname); if (!funcname) goto bad; } py_code = PyCode_NewEmpty(filename, funcname, py_line); Py_XDECREF(py_funcname); return py_code; bad: Py_XDECREF(py_funcname); return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject *ptype, *pvalue, *ptraceback; if (c_line) { c_line = __Pyx_CLineForTraceback(tstate, c_line); } py_code = __pyx_find_code_object(c_line ? -c_line : py_line); if (!py_code) { __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); py_code = __Pyx_CreateCodeObjectForTraceback( funcname, c_line, py_line, filename); if (!py_code) { /* If the code object creation fails, then we should clear the fetched exception references and propagate the new exception */ Py_XDECREF(ptype); Py_XDECREF(pvalue); Py_XDECREF(ptraceback); goto bad; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); } py_frame = PyFrame_New( tstate, /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ __pyx_mstate_global->__pyx_d, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); if (!py_frame) goto bad; __Pyx_PyFrame_SetLineNumber(py_frame, py_line); PyTraceBack_Here(py_frame); bad: Py_XDECREF(py_code); Py_XDECREF(py_frame); } #endif /* CIntFromPyVerify */ #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) #define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) #define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ {\ func_type value = func_value;\ if (sizeof(target_type) < sizeof(func_type)) {\ if (unlikely(value != (func_type) (target_type) value)) {\ func_type zero = 0;\ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ return (target_type) -1;\ if (is_unsigned && unlikely(value < zero))\ goto raise_neg_overflow;\ else\ goto raise_overflow;\ }\ }\ return (target_type) value;\ } /* CIntFromPy */ static CYTHON_INLINE int __Pyx_PyLong_As_int(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const int neg_one = (int) -1, const_zero = (int) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (unlikely(!PyLong_Check(x))) { int val; PyObject *tmp = __Pyx_PyNumber_Long(x); if (!tmp) return (int) -1; val = __Pyx_PyLong_As_int(tmp); Py_DECREF(tmp); return val; } if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 3: if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 4: if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(int) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 2: if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -3: if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 3: if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -4: if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 4: if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; } } #endif if ((sizeof(int) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { int val; int ret = -1; #if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API Py_ssize_t bytes_copied = PyLong_AsNativeBytes( x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); if (unlikely(bytes_copied == -1)) { } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { goto raise_overflow; } else { ret = 0; } #elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)x, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *v; PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (likely(PyLong_CheckExact(x))) { v = __Pyx_NewRef(x); } else { v = PyNumber_Long(x); if (unlikely(!v)) return (int) -1; assert(PyLong_CheckExact(v)); } { int result = PyObject_RichCompareBool(v, Py_False, Py_LT); if (unlikely(result < 0)) { Py_DECREF(v); return (int) -1; } is_negative = result == 1; } if (is_unsigned && unlikely(is_negative)) { Py_DECREF(v); goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); Py_DECREF(v); if (unlikely(!stepval)) return (int) -1; } else { stepval = v; } v = NULL; val = (int) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; long idigit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; val |= ((int) idigit) << bits; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; } Py_DECREF(shift); shift = NULL; Py_DECREF(mask); mask = NULL; { long idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((int) idigit) << bits; } if (!is_unsigned) { if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif if (unlikely(ret)) return (int) -1; return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int"); return (int) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int"); return (int) -1; } /* CIntFromPy */ static CYTHON_INLINE int32_t __Pyx_PyLong_As_int32_t(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const int32_t neg_one = (int32_t) -1, const_zero = (int32_t) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (unlikely(!PyLong_Check(x))) { int32_t val; PyObject *tmp = __Pyx_PyNumber_Long(x); if (!tmp) return (int32_t) -1; val = __Pyx_PyLong_As_int32_t(tmp); Py_DECREF(tmp); return val; } if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int32_t, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(int32_t) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) >= 2 * PyLong_SHIFT)) { return (int32_t) (((((int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0])); } } break; case 3: if ((8 * sizeof(int32_t) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) >= 3 * PyLong_SHIFT)) { return (int32_t) (((((((int32_t)digits[2]) << PyLong_SHIFT) | (int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0])); } } break; case 4: if ((8 * sizeof(int32_t) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) >= 4 * PyLong_SHIFT)) { return (int32_t) (((((((((int32_t)digits[3]) << PyLong_SHIFT) | (int32_t)digits[2]) << PyLong_SHIFT) | (int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int32_t) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(int32_t) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(int32_t, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int32_t) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int32_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int32_t, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(int32_t) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) - 1 > 2 * PyLong_SHIFT)) { return (int32_t) (((int32_t)-1)*(((((int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0]))); } } break; case 2: if ((8 * sizeof(int32_t) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) - 1 > 2 * PyLong_SHIFT)) { return (int32_t) ((((((int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0]))); } } break; case -3: if ((8 * sizeof(int32_t) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) - 1 > 3 * PyLong_SHIFT)) { return (int32_t) (((int32_t)-1)*(((((((int32_t)digits[2]) << PyLong_SHIFT) | (int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0]))); } } break; case 3: if ((8 * sizeof(int32_t) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) - 1 > 3 * PyLong_SHIFT)) { return (int32_t) ((((((((int32_t)digits[2]) << PyLong_SHIFT) | (int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0]))); } } break; case -4: if ((8 * sizeof(int32_t) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) - 1 > 4 * PyLong_SHIFT)) { return (int32_t) (((int32_t)-1)*(((((((((int32_t)digits[3]) << PyLong_SHIFT) | (int32_t)digits[2]) << PyLong_SHIFT) | (int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0]))); } } break; case 4: if ((8 * sizeof(int32_t) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int32_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int32_t) - 1 > 4 * PyLong_SHIFT)) { return (int32_t) ((((((((((int32_t)digits[3]) << PyLong_SHIFT) | (int32_t)digits[2]) << PyLong_SHIFT) | (int32_t)digits[1]) << PyLong_SHIFT) | (int32_t)digits[0]))); } } break; } } #endif if ((sizeof(int32_t) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(int32_t, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int32_t) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int32_t, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { int32_t val; int ret = -1; #if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API Py_ssize_t bytes_copied = PyLong_AsNativeBytes( x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); if (unlikely(bytes_copied == -1)) { } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { goto raise_overflow; } else { ret = 0; } #elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)x, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *v; PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (likely(PyLong_CheckExact(x))) { v = __Pyx_NewRef(x); } else { v = PyNumber_Long(x); if (unlikely(!v)) return (int32_t) -1; assert(PyLong_CheckExact(v)); } { int result = PyObject_RichCompareBool(v, Py_False, Py_LT); if (unlikely(result < 0)) { Py_DECREF(v); return (int32_t) -1; } is_negative = result == 1; } if (is_unsigned && unlikely(is_negative)) { Py_DECREF(v); goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); Py_DECREF(v); if (unlikely(!stepval)) return (int32_t) -1; } else { stepval = v; } v = NULL; val = (int32_t) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(int32_t) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; long idigit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; val |= ((int32_t) idigit) << bits; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; } Py_DECREF(shift); shift = NULL; Py_DECREF(mask); mask = NULL; { long idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(int32_t) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((int32_t) idigit) << bits; } if (!is_unsigned) { if (unlikely(val & (((int32_t) 1) << (sizeof(int32_t) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif if (unlikely(ret)) return (int32_t) -1; return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int32_t"); return (int32_t) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int32_t"); return (int32_t) -1; } /* CIntFromPy */ static CYTHON_INLINE int64_t __Pyx_PyLong_As_int64_t(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const int64_t neg_one = (int64_t) -1, const_zero = (int64_t) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (unlikely(!PyLong_Check(x))) { int64_t val; PyObject *tmp = __Pyx_PyNumber_Long(x); if (!tmp) return (int64_t) -1; val = __Pyx_PyLong_As_int64_t(tmp); Py_DECREF(tmp); return val; } if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int64_t, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(int64_t) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) >= 2 * PyLong_SHIFT)) { return (int64_t) (((((int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0])); } } break; case 3: if ((8 * sizeof(int64_t) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) >= 3 * PyLong_SHIFT)) { return (int64_t) (((((((int64_t)digits[2]) << PyLong_SHIFT) | (int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0])); } } break; case 4: if ((8 * sizeof(int64_t) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) >= 4 * PyLong_SHIFT)) { return (int64_t) (((((((((int64_t)digits[3]) << PyLong_SHIFT) | (int64_t)digits[2]) << PyLong_SHIFT) | (int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int64_t) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(int64_t) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(int64_t, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int64_t) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int64_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int64_t, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(int64_t) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) - 1 > 2 * PyLong_SHIFT)) { return (int64_t) (((int64_t)-1)*(((((int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0]))); } } break; case 2: if ((8 * sizeof(int64_t) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) - 1 > 2 * PyLong_SHIFT)) { return (int64_t) ((((((int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0]))); } } break; case -3: if ((8 * sizeof(int64_t) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) - 1 > 3 * PyLong_SHIFT)) { return (int64_t) (((int64_t)-1)*(((((((int64_t)digits[2]) << PyLong_SHIFT) | (int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0]))); } } break; case 3: if ((8 * sizeof(int64_t) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) - 1 > 3 * PyLong_SHIFT)) { return (int64_t) ((((((((int64_t)digits[2]) << PyLong_SHIFT) | (int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0]))); } } break; case -4: if ((8 * sizeof(int64_t) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) - 1 > 4 * PyLong_SHIFT)) { return (int64_t) (((int64_t)-1)*(((((((((int64_t)digits[3]) << PyLong_SHIFT) | (int64_t)digits[2]) << PyLong_SHIFT) | (int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0]))); } } break; case 4: if ((8 * sizeof(int64_t) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int64_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int64_t) - 1 > 4 * PyLong_SHIFT)) { return (int64_t) ((((((((((int64_t)digits[3]) << PyLong_SHIFT) | (int64_t)digits[2]) << PyLong_SHIFT) | (int64_t)digits[1]) << PyLong_SHIFT) | (int64_t)digits[0]))); } } break; } } #endif if ((sizeof(int64_t) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(int64_t, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int64_t) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int64_t, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { int64_t val; int ret = -1; #if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API Py_ssize_t bytes_copied = PyLong_AsNativeBytes( x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); if (unlikely(bytes_copied == -1)) { } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { goto raise_overflow; } else { ret = 0; } #elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)x, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *v; PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (likely(PyLong_CheckExact(x))) { v = __Pyx_NewRef(x); } else { v = PyNumber_Long(x); if (unlikely(!v)) return (int64_t) -1; assert(PyLong_CheckExact(v)); } { int result = PyObject_RichCompareBool(v, Py_False, Py_LT); if (unlikely(result < 0)) { Py_DECREF(v); return (int64_t) -1; } is_negative = result == 1; } if (is_unsigned && unlikely(is_negative)) { Py_DECREF(v); goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); Py_DECREF(v); if (unlikely(!stepval)) return (int64_t) -1; } else { stepval = v; } v = NULL; val = (int64_t) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(int64_t) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; long idigit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; val |= ((int64_t) idigit) << bits; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; } Py_DECREF(shift); shift = NULL; Py_DECREF(mask); mask = NULL; { long idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(int64_t) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((int64_t) idigit) << bits; } if (!is_unsigned) { if (unlikely(val & (((int64_t) 1) << (sizeof(int64_t) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif if (unlikely(ret)) return (int64_t) -1; return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int64_t"); return (int64_t) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int64_t"); return (int64_t) -1; } /* CIntFromPy */ static CYTHON_INLINE char __Pyx_PyLong_As_char(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const char neg_one = (char) -1, const_zero = (char) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (unlikely(!PyLong_Check(x))) { char val; PyObject *tmp = __Pyx_PyNumber_Long(x); if (!tmp) return (char) -1; val = __Pyx_PyLong_As_char(tmp); Py_DECREF(tmp); return val; } if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(char, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(char) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) >= 2 * PyLong_SHIFT)) { return (char) (((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0])); } } break; case 3: if ((8 * sizeof(char) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) >= 3 * PyLong_SHIFT)) { return (char) (((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])); } } break; case 4: if ((8 * sizeof(char) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) >= 4 * PyLong_SHIFT)) { return (char) (((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (char) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(char) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(char, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(char) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(char, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(char, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(char) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) - 1 > 2 * PyLong_SHIFT)) { return (char) (((char)-1)*(((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); } } break; case 2: if ((8 * sizeof(char) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) - 1 > 2 * PyLong_SHIFT)) { return (char) ((((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); } } break; case -3: if ((8 * sizeof(char) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) - 1 > 3 * PyLong_SHIFT)) { return (char) (((char)-1)*(((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); } } break; case 3: if ((8 * sizeof(char) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) - 1 > 3 * PyLong_SHIFT)) { return (char) ((((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); } } break; case -4: if ((8 * sizeof(char) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) - 1 > 4 * PyLong_SHIFT)) { return (char) (((char)-1)*(((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); } } break; case 4: if ((8 * sizeof(char) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(char) - 1 > 4 * PyLong_SHIFT)) { return (char) ((((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); } } break; } } #endif if ((sizeof(char) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(char, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(char) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(char, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { char val; int ret = -1; #if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API Py_ssize_t bytes_copied = PyLong_AsNativeBytes( x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); if (unlikely(bytes_copied == -1)) { } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { goto raise_overflow; } else { ret = 0; } #elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)x, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *v; PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (likely(PyLong_CheckExact(x))) { v = __Pyx_NewRef(x); } else { v = PyNumber_Long(x); if (unlikely(!v)) return (char) -1; assert(PyLong_CheckExact(v)); } { int result = PyObject_RichCompareBool(v, Py_False, Py_LT); if (unlikely(result < 0)) { Py_DECREF(v); return (char) -1; } is_negative = result == 1; } if (is_unsigned && unlikely(is_negative)) { Py_DECREF(v); goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); Py_DECREF(v); if (unlikely(!stepval)) return (char) -1; } else { stepval = v; } v = NULL; val = (char) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(char) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; long idigit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; val |= ((char) idigit) << bits; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; } Py_DECREF(shift); shift = NULL; Py_DECREF(mask); mask = NULL; { long idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(char) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((char) idigit) << bits; } if (!is_unsigned) { if (unlikely(val & (((char) 1) << (sizeof(char) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif if (unlikely(ret)) return (char) -1; return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to char"); return (char) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to char"); return (char) -1; } /* CIntFromPy */ static CYTHON_INLINE size_t __Pyx_PyLong_As_size_t(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const size_t neg_one = (size_t) -1, const_zero = (size_t) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (unlikely(!PyLong_Check(x))) { size_t val; PyObject *tmp = __Pyx_PyNumber_Long(x); if (!tmp) return (size_t) -1; val = __Pyx_PyLong_As_size_t(tmp); Py_DECREF(tmp); return val; } if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(size_t, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(size_t) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) >= 2 * PyLong_SHIFT)) { return (size_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } } break; case 3: if ((8 * sizeof(size_t) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) >= 3 * PyLong_SHIFT)) { return (size_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } } break; case 4: if ((8 * sizeof(size_t) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) >= 4 * PyLong_SHIFT)) { return (size_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (size_t) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(size_t) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(size_t) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(size_t, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(size_t) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT)) { return (size_t) (((size_t)-1)*(((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case 2: if ((8 * sizeof(size_t) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT)) { return (size_t) ((((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case -3: if ((8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT)) { return (size_t) (((size_t)-1)*(((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case 3: if ((8 * sizeof(size_t) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT)) { return (size_t) ((((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case -4: if ((8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT)) { return (size_t) (((size_t)-1)*(((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case 4: if ((8 * sizeof(size_t) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT)) { return (size_t) ((((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; } } #endif if ((sizeof(size_t) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(size_t, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(size_t) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(size_t, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { size_t val; int ret = -1; #if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API Py_ssize_t bytes_copied = PyLong_AsNativeBytes( x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); if (unlikely(bytes_copied == -1)) { } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { goto raise_overflow; } else { ret = 0; } #elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)x, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *v; PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (likely(PyLong_CheckExact(x))) { v = __Pyx_NewRef(x); } else { v = PyNumber_Long(x); if (unlikely(!v)) return (size_t) -1; assert(PyLong_CheckExact(v)); } { int result = PyObject_RichCompareBool(v, Py_False, Py_LT); if (unlikely(result < 0)) { Py_DECREF(v); return (size_t) -1; } is_negative = result == 1; } if (is_unsigned && unlikely(is_negative)) { Py_DECREF(v); goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); Py_DECREF(v); if (unlikely(!stepval)) return (size_t) -1; } else { stepval = v; } v = NULL; val = (size_t) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(size_t) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; long idigit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; val |= ((size_t) idigit) << bits; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; } Py_DECREF(shift); shift = NULL; Py_DECREF(mask); mask = NULL; { long idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(size_t) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((size_t) idigit) << bits; } if (!is_unsigned) { if (unlikely(val & (((size_t) 1) << (sizeof(size_t) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif if (unlikely(ret)) return (size_t) -1; return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to size_t"); return (size_t) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to size_t"); return (size_t) -1; } /* FormatTypeName */ #if CYTHON_COMPILING_IN_LIMITED_API && __PYX_LIMITED_VERSION_HEX < 0x030d0000 static __Pyx_TypeName __Pyx_PyType_GetFullyQualifiedName(PyTypeObject* tp) { PyObject *module = NULL, *name = NULL, *result = NULL; #if __PYX_LIMITED_VERSION_HEX < 0x030b0000 name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, __pyx_mstate_global->__pyx_n_u_qualname); #else name = PyType_GetQualName(tp); #endif if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) goto bad; module = __Pyx_PyObject_GetAttrStr((PyObject *)tp, __pyx_mstate_global->__pyx_n_u_module); if (unlikely(module == NULL) || unlikely(!PyUnicode_Check(module))) goto bad; if (PyUnicode_CompareWithASCIIString(module, "builtins") == 0) { result = name; name = NULL; goto done; } result = PyUnicode_FromFormat("%U.%U", module, name); if (unlikely(result == NULL)) goto bad; done: Py_XDECREF(name); Py_XDECREF(module); return result; bad: PyErr_Clear(); if (name) { result = name; name = NULL; } else { result = __Pyx_NewRef(__pyx_mstate_global->__pyx_kp_u__2); } goto done; } #endif /* PyObjectVectorCallKwBuilder */ #if CYTHON_VECTORCALL static int __Pyx_VectorcallBuilder_AddArg(PyObject *key, PyObject *value, PyObject *builder, PyObject **args, int n) { (void)__Pyx_PyObject_FastCallDict; if (__Pyx_PyTuple_SET_ITEM(builder, n, key) != (0)) return -1; Py_INCREF(key); args[n] = value; return 0; } CYTHON_UNUSED static int __Pyx_VectorcallBuilder_AddArg_Check(PyObject *key, PyObject *value, PyObject *builder, PyObject **args, int n) { (void)__Pyx_VectorcallBuilder_AddArgStr; if (unlikely(!PyUnicode_Check(key))) { PyErr_SetString(PyExc_TypeError, "keywords must be strings"); return -1; } return __Pyx_VectorcallBuilder_AddArg(key, value, builder, args, n); } static int __Pyx_VectorcallBuilder_AddArgStr(const char *key, PyObject *value, PyObject *builder, PyObject **args, int n) { PyObject *pyKey = PyUnicode_FromString(key); if (!pyKey) return -1; return __Pyx_VectorcallBuilder_AddArg(pyKey, value, builder, args, n); } #else // CYTHON_VECTORCALL CYTHON_UNUSED static int __Pyx_VectorcallBuilder_AddArg_Check(PyObject *key, PyObject *value, PyObject *builder, CYTHON_UNUSED PyObject **args, CYTHON_UNUSED int n) { if (unlikely(!PyUnicode_Check(key))) { PyErr_SetString(PyExc_TypeError, "keywords must be strings"); return -1; } return PyDict_SetItem(builder, key, value); } #endif /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyLong_From_long(long value) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const long neg_one = (long) -1, const_zero = (long) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(long) < sizeof(long)) { return PyLong_FromLong((long) value); } else if (sizeof(long) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #if defined(HAVE_LONG_LONG) && !CYTHON_COMPILING_IN_PYPY } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(long) <= sizeof(long)) { return PyLong_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { unsigned char *bytes = (unsigned char *)&value; #if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 if (is_unsigned) { return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); } else { return PyLong_FromNativeBytes(bytes, sizeof(value), -1); } #elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 int one = 1; int little = (int)*(unsigned char *)&one; return _PyLong_FromByteArray(bytes, sizeof(long), little, !is_unsigned); #else int one = 1; int little = (int)*(unsigned char *)&one; PyObject *from_bytes, *result = NULL, *kwds = NULL; PyObject *py_bytes = NULL, *order_str = NULL; from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); if (!from_bytes) return NULL; py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); if (!py_bytes) goto limited_bad; order_str = PyUnicode_FromString(little ? "little" : "big"); if (!order_str) goto limited_bad; { PyObject *args[3+(CYTHON_VECTORCALL ? 1 : 0)] = { NULL, py_bytes, order_str }; if (!is_unsigned) { kwds = __Pyx_MakeVectorcallBuilderKwds(1); if (!kwds) goto limited_bad; if (__Pyx_VectorcallBuilder_AddArgStr("signed", __Pyx_NewRef(Py_True), kwds, args+3, 0) < 0) goto limited_bad; } result = __Pyx_Object_Vectorcall_CallFromBuilder(from_bytes, args+1, 2 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET, kwds); } limited_bad: Py_XDECREF(kwds); Py_XDECREF(order_str); Py_XDECREF(py_bytes); Py_XDECREF(from_bytes); return result; #endif } } /* CIntFromPy */ static CYTHON_INLINE long __Pyx_PyLong_As_long(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const long neg_one = (long) -1, const_zero = (long) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (unlikely(!PyLong_Check(x))) { long val; PyObject *tmp = __Pyx_PyNumber_Long(x); if (!tmp) return (long) -1; val = __Pyx_PyLong_As_long(tmp); Py_DECREF(tmp); return val; } if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 3: if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 4: if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (long) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(long) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 2: if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -3: if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 3: if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -4: if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 4: if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; } } #endif if ((sizeof(long) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { long val; int ret = -1; #if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API Py_ssize_t bytes_copied = PyLong_AsNativeBytes( x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); if (unlikely(bytes_copied == -1)) { } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { goto raise_overflow; } else { ret = 0; } #elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)x, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *v; PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (likely(PyLong_CheckExact(x))) { v = __Pyx_NewRef(x); } else { v = PyNumber_Long(x); if (unlikely(!v)) return (long) -1; assert(PyLong_CheckExact(v)); } { int result = PyObject_RichCompareBool(v, Py_False, Py_LT); if (unlikely(result < 0)) { Py_DECREF(v); return (long) -1; } is_negative = result == 1; } if (is_unsigned && unlikely(is_negative)) { Py_DECREF(v); goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); Py_DECREF(v); if (unlikely(!stepval)) return (long) -1; } else { stepval = v; } v = NULL; val = (long) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; long idigit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; val |= ((long) idigit) << bits; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; } Py_DECREF(shift); shift = NULL; Py_DECREF(mask); mask = NULL; { long idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((long) idigit) << bits; } if (!is_unsigned) { if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif if (unlikely(ret)) return (long) -1; return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to long"); return (long) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to long"); return (long) -1; } /* FastTypeChecks */ #if CYTHON_COMPILING_IN_CPYTHON static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { while (a) { a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); if (a == b) return 1; } return b == &PyBaseObject_Type; } static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (a == b) return 1; mro = a->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(a, b); } static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (cls == a || cls == b) return 1; mro = cls->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { PyObject *base = PyTuple_GET_ITEM(mro, i); if (base == (PyObject *)a || base == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); } static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { if (exc_type1) { return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); } else { return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); } } static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; assert(PyExceptionClass_Check(exc_type)); n = PyTuple_GET_SIZE(tuple); for (i=0; i= 0x030b0000 return Py_Version & ~0xFFUL; #else static unsigned long __Pyx_cached_runtime_version = 0; if (__Pyx_cached_runtime_version == 0) { const char* rt_version = Py_GetVersion(); unsigned long version = 0; unsigned long factor = 0x01000000UL; unsigned int digit = 0; int i = 0; while (factor) { while ('0' <= rt_version[i] && rt_version[i] <= '9') { digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); ++i; } version += factor * digit; if (rt_version[i] != '.') break; digit = 0; factor >>= 8; ++i; } __Pyx_cached_runtime_version = version; } return __Pyx_cached_runtime_version; #endif } /* CheckBinaryVersion */ static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { const unsigned long MAJOR_MINOR = 0xFFFF0000UL; if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) return 0; if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) return 1; { char message[200]; PyOS_snprintf(message, sizeof(message), "compile time Python version %d.%d " "of module '%.100s' " "%s " "runtime version %d.%d", (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), __Pyx_MODULE_NAME, (allow_newer) ? "was newer than" : "does not match", (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) ); return PyErr_WarnEx(NULL, message, 1); } } /* FunctionExport */ static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { PyObject *d = 0; PyObject *cobj = 0; union { void (*fp)(void); void *p; } tmp; d = PyObject_GetAttrString(__pyx_m, "__pyx_capi__"); if (!d) { PyErr_Clear(); d = PyDict_New(); if (!d) goto bad; Py_INCREF(d); if (PyModule_AddObject(__pyx_m, "__pyx_capi__", d) < 0) goto bad; } tmp.fp = f; cobj = PyCapsule_New(tmp.p, sig, 0); if (!cobj) goto bad; if (PyDict_SetItemString(d, name, cobj) < 0) goto bad; Py_DECREF(cobj); Py_DECREF(d); return 0; bad: Py_XDECREF(cobj); Py_XDECREF(d); return -1; } /* InitStrings */ static int __Pyx_InitStrings(__Pyx_StringTabEntry const *t, PyObject **target, const char* const* encoding_names) { while (t->s) { PyObject *str; if (t->is_unicode) { if (t->intern) { str = PyUnicode_InternFromString(t->s); } else if (t->encoding) { str = PyUnicode_Decode(t->s, t->n - 1, encoding_names[t->encoding], NULL); } else { str = PyUnicode_FromStringAndSize(t->s, t->n - 1); } } else { str = PyBytes_FromStringAndSize(t->s, t->n - 1); } if (!str) return -1; *target = str; if (PyObject_Hash(str) == -1) return -1; ++t; ++target; } return 0; } #include static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { size_t len = strlen(s); if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { PyErr_SetString(PyExc_OverflowError, "byte string is too long"); return -1; } return (Py_ssize_t) len; } static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { Py_ssize_t len = __Pyx_ssize_strlen(c_str); if (unlikely(len < 0)) return NULL; return __Pyx_PyUnicode_FromStringAndSize(c_str, len); } static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { Py_ssize_t len = __Pyx_ssize_strlen(c_str); if (unlikely(len < 0)) return NULL; return PyByteArray_FromStringAndSize(c_str, len); } static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { Py_ssize_t ignore; return __Pyx_PyObject_AsStringAndSize(o, &ignore); } #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; #if CYTHON_COMPILING_IN_LIMITED_API { const char* result; Py_ssize_t unicode_length; CYTHON_MAYBE_UNUSED_VAR(unicode_length); // only for __PYX_DEFAULT_STRING_ENCODING_IS_ASCII #if __PYX_LIMITED_VERSION_HEX < 0x030A0000 if (unlikely(PyArg_Parse(o, "s#", &result, length) < 0)) return NULL; #else result = PyUnicode_AsUTF8AndSize(o, length); #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII unicode_length = PyUnicode_GetLength(o); if (unlikely(unicode_length < 0)) return NULL; if (unlikely(unicode_length != *length)) { PyUnicode_AsASCIIString(o); return NULL; } #endif return result; } #else #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII if (likely(PyUnicode_IS_ASCII(o))) { *length = PyUnicode_GET_LENGTH(o); return PyUnicode_AsUTF8(o); } else { PyUnicode_AsASCIIString(o); return NULL; } #else return PyUnicode_AsUTF8AndSize(o, length); #endif #endif } #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 if (PyUnicode_Check(o)) { return __Pyx_PyUnicode_AsStringAndSize(o, length); } else #endif if (PyByteArray_Check(o)) { #if (CYTHON_ASSUME_SAFE_SIZE && CYTHON_ASSUME_SAFE_MACROS) || (CYTHON_COMPILING_IN_PYPY && (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))) *length = PyByteArray_GET_SIZE(o); return PyByteArray_AS_STRING(o); #else *length = PyByteArray_Size(o); if (*length == -1) return NULL; return PyByteArray_AsString(o); #endif } else { char* result; int r = PyBytes_AsStringAndSize(o, &result, length); if (unlikely(r < 0)) { return NULL; } else { return result; } } } static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { int retval; if (unlikely(!x)) return -1; retval = __Pyx_PyObject_IsTrue(x); Py_DECREF(x); return retval; } static PyObject* __Pyx_PyNumber_LongWrongResultType(PyObject* result) { __Pyx_TypeName result_type_name = __Pyx_PyType_GetFullyQualifiedName(Py_TYPE(result)); if (PyLong_Check(result)) { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " "The ability to return an instance of a strict subclass of int is deprecated, " "and may be removed in a future version of Python.", result_type_name)) { __Pyx_DECREF_TypeName(result_type_name); Py_DECREF(result); return NULL; } __Pyx_DECREF_TypeName(result_type_name); return result; } PyErr_Format(PyExc_TypeError, "__int__ returned non-int (type " __Pyx_FMT_TYPENAME ")", result_type_name); __Pyx_DECREF_TypeName(result_type_name); Py_DECREF(result); return NULL; } static CYTHON_INLINE PyObject* __Pyx_PyNumber_Long(PyObject* x) { #if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; #endif PyObject *res = NULL; if (likely(PyLong_Check(x))) return __Pyx_NewRef(x); #if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; if (likely(m && m->nb_int)) { res = m->nb_int(x); } #else if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { res = PyNumber_Long(x); } #endif if (likely(res)) { if (unlikely(!PyLong_CheckExact(res))) { return __Pyx_PyNumber_LongWrongResultType(res); } } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, "an integer is required"); } return res; } static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; PyObject *x; if (likely(PyLong_CheckExact(b))) { #if CYTHON_USE_PYLONG_INTERNALS if (likely(__Pyx_PyLong_IsCompact(b))) { return __Pyx_PyLong_CompactValue(b); } else { const digit* digits = __Pyx_PyLong_Digits(b); const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); switch (size) { case 2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; } } #endif return PyLong_AsSsize_t(b); } x = PyNumber_Index(b); if (!x) return -1; ival = PyLong_AsSsize_t(x); Py_DECREF(x); return ival; } static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); } else { Py_ssize_t ival; PyObject *x; x = PyNumber_Index(o); if (!x) return -1; ival = PyLong_AsLong(x); Py_DECREF(x); return ival; } } static CYTHON_INLINE PyObject *__Pyx_Owned_Py_None(int b) { CYTHON_UNUSED_VAR(b); return __Pyx_NewRef(Py_None); } static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } static CYTHON_INLINE PyObject * __Pyx_PyLong_FromSize_t(size_t ival) { return PyLong_FromSize_t(ival); } #if CYTHON_USE_PYLONG_INTERNALS static CYTHON_INLINE int __Pyx_PyLong_CompactAsLong(PyObject *x, long *return_value) { if (unlikely(!__Pyx_PyLong_IsCompact(x))) return 0; Py_ssize_t value = __Pyx_PyLong_CompactValue(x); if ((sizeof(long) < sizeof(Py_ssize_t)) && unlikely(value != (long) value)) return 0; *return_value = (long) value; return 1; } #endif /* MultiPhaseInitModuleState */ #if CYTHON_PEP489_MULTI_PHASE_INIT && CYTHON_USE_MODULE_STATE #ifndef CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE #if (CYTHON_COMPILING_IN_LIMITED_API || PY_VERSION_HEX >= 0x030C0000) #define CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE 1 #else #define CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE 0 #endif #endif #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE && !CYTHON_ATOMICS #error "Module state with PEP489 requires atomics. Currently that's one of\ C11, C++11, gcc atomic intrinsics or MSVC atomic intrinsics" #endif #if !CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE #define __Pyx_ModuleStateLookup_Lock() #define __Pyx_ModuleStateLookup_Unlock() #elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d0000 static PyMutex __Pyx_ModuleStateLookup_mutex = {0}; #define __Pyx_ModuleStateLookup_Lock() PyMutex_Lock(&__Pyx_ModuleStateLookup_mutex) #define __Pyx_ModuleStateLookup_Unlock() PyMutex_Unlock(&__Pyx_ModuleStateLookup_mutex) #elif defined(__cplusplus) && __cplusplus >= 201103L #include static std::mutex __Pyx_ModuleStateLookup_mutex; #define __Pyx_ModuleStateLookup_Lock() __Pyx_ModuleStateLookup_mutex.lock() #define __Pyx_ModuleStateLookup_Unlock() __Pyx_ModuleStateLookup_mutex.unlock() #elif defined(__STDC_VERSION__) && (__STDC_VERSION__ > 201112L) && !defined(__STDC_NO_THREADS__) #include static mtx_t __Pyx_ModuleStateLookup_mutex; static once_flag __Pyx_ModuleStateLookup_mutex_once_flag = ONCE_FLAG_INIT; static void __Pyx_ModuleStateLookup_initialize_mutex(void) { mtx_init(&__Pyx_ModuleStateLookup_mutex, mtx_plain); } #define __Pyx_ModuleStateLookup_Lock()\ call_once(&__Pyx_ModuleStateLookup_mutex_once_flag, __Pyx_ModuleStateLookup_initialize_mutex);\ mtx_lock(&__Pyx_ModuleStateLookup_mutex) #define __Pyx_ModuleStateLookup_Unlock() mtx_unlock(&__Pyx_ModuleStateLookup_mutex) #elif defined(HAVE_PTHREAD_H) #include static pthread_mutex_t __Pyx_ModuleStateLookup_mutex = PTHREAD_MUTEX_INITIALIZER; #define __Pyx_ModuleStateLookup_Lock() pthread_mutex_lock(&__Pyx_ModuleStateLookup_mutex) #define __Pyx_ModuleStateLookup_Unlock() pthread_mutex_unlock(&__Pyx_ModuleStateLookup_mutex) #elif defined(_WIN32) #include // synchapi.h on its own doesn't work static SRWLOCK __Pyx_ModuleStateLookup_mutex = SRWLOCK_INIT; #define __Pyx_ModuleStateLookup_Lock() AcquireSRWLockExclusive(&__Pyx_ModuleStateLookup_mutex) #define __Pyx_ModuleStateLookup_Unlock() ReleaseSRWLockExclusive(&__Pyx_ModuleStateLookup_mutex) #else #error "No suitable lock available for CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE.\ Requires C standard >= C11, or C++ standard >= C++11,\ or pthreads, or the Windows 32 API, or Python >= 3.13." #endif typedef struct { int64_t id; PyObject *module; } __Pyx_InterpreterIdAndModule; typedef struct { char interpreter_id_as_index; Py_ssize_t count; Py_ssize_t allocated; __Pyx_InterpreterIdAndModule table[1]; } __Pyx_ModuleStateLookupData; #define __PYX_MODULE_STATE_LOOKUP_SMALL_SIZE 32 #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE static __pyx_atomic_int_type __Pyx_ModuleStateLookup_read_counter = 0; #endif #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE static __pyx_atomic_ptr_type __Pyx_ModuleStateLookup_data = 0; #else static __Pyx_ModuleStateLookupData* __Pyx_ModuleStateLookup_data = NULL; #endif static __Pyx_InterpreterIdAndModule* __Pyx_State_FindModuleStateLookupTableLowerBound( __Pyx_InterpreterIdAndModule* table, Py_ssize_t count, int64_t interpreterId) { __Pyx_InterpreterIdAndModule* begin = table; __Pyx_InterpreterIdAndModule* end = begin + count; if (begin->id == interpreterId) { return begin; } while ((end - begin) > __PYX_MODULE_STATE_LOOKUP_SMALL_SIZE) { __Pyx_InterpreterIdAndModule* halfway = begin + (end - begin)/2; if (halfway->id == interpreterId) { return halfway; } if (halfway->id < interpreterId) { begin = halfway; } else { end = halfway; } } for (; begin < end; ++begin) { if (begin->id >= interpreterId) return begin; } return begin; } static PyObject *__Pyx_State_FindModule(CYTHON_UNUSED void* dummy) { int64_t interpreter_id = PyInterpreterState_GetID(__Pyx_PyInterpreterState_Get()); if (interpreter_id == -1) return NULL; #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE __Pyx_ModuleStateLookupData* data = (__Pyx_ModuleStateLookupData*)__pyx_atomic_pointer_load_relaxed(&__Pyx_ModuleStateLookup_data); { __pyx_atomic_incr_acq_rel(&__Pyx_ModuleStateLookup_read_counter); if (likely(data)) { __Pyx_ModuleStateLookupData* new_data = (__Pyx_ModuleStateLookupData*)__pyx_atomic_pointer_load_acquire(&__Pyx_ModuleStateLookup_data); if (likely(data == new_data)) { goto read_finished; } } __pyx_atomic_decr_acq_rel(&__Pyx_ModuleStateLookup_read_counter); __Pyx_ModuleStateLookup_Lock(); __pyx_atomic_incr_relaxed(&__Pyx_ModuleStateLookup_read_counter); data = (__Pyx_ModuleStateLookupData*)__pyx_atomic_pointer_load_relaxed(&__Pyx_ModuleStateLookup_data); __Pyx_ModuleStateLookup_Unlock(); } read_finished:; #else __Pyx_ModuleStateLookupData* data = __Pyx_ModuleStateLookup_data; #endif __Pyx_InterpreterIdAndModule* found = NULL; if (unlikely(!data)) goto end; if (data->interpreter_id_as_index) { if (interpreter_id < data->count) { found = data->table+interpreter_id; } } else { found = __Pyx_State_FindModuleStateLookupTableLowerBound( data->table, data->count, interpreter_id); } end: { PyObject *result=NULL; if (found && found->id == interpreter_id) { result = found->module; } #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE __pyx_atomic_decr_acq_rel(&__Pyx_ModuleStateLookup_read_counter); #endif return result; } } #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE static void __Pyx_ModuleStateLookup_wait_until_no_readers(void) { while (__pyx_atomic_load(&__Pyx_ModuleStateLookup_read_counter) != 0); } #else #define __Pyx_ModuleStateLookup_wait_until_no_readers() #endif static int __Pyx_State_AddModuleInterpIdAsIndex(__Pyx_ModuleStateLookupData **old_data, PyObject* module, int64_t interpreter_id) { Py_ssize_t to_allocate = (*old_data)->allocated; while (to_allocate <= interpreter_id) { if (to_allocate == 0) to_allocate = 1; else to_allocate *= 2; } __Pyx_ModuleStateLookupData *new_data = *old_data; if (to_allocate != (*old_data)->allocated) { new_data = (__Pyx_ModuleStateLookupData *)realloc( *old_data, sizeof(__Pyx_ModuleStateLookupData)+(to_allocate-1)*sizeof(__Pyx_InterpreterIdAndModule)); if (!new_data) { PyErr_NoMemory(); return -1; } for (Py_ssize_t i = new_data->allocated; i < to_allocate; ++i) { new_data->table[i].id = i; new_data->table[i].module = NULL; } new_data->allocated = to_allocate; } new_data->table[interpreter_id].module = module; if (new_data->count < interpreter_id+1) { new_data->count = interpreter_id+1; } *old_data = new_data; return 0; } static void __Pyx_State_ConvertFromInterpIdAsIndex(__Pyx_ModuleStateLookupData *data) { __Pyx_InterpreterIdAndModule *read = data->table; __Pyx_InterpreterIdAndModule *write = data->table; __Pyx_InterpreterIdAndModule *end = read + data->count; for (; readmodule) { write->id = read->id; write->module = read->module; ++write; } } data->count = write - data->table; for (; writeid = 0; write->module = NULL; } data->interpreter_id_as_index = 0; } static int __Pyx_State_AddModule(PyObject* module, CYTHON_UNUSED void* dummy) { int64_t interpreter_id = PyInterpreterState_GetID(__Pyx_PyInterpreterState_Get()); if (interpreter_id == -1) return -1; int result = 0; __Pyx_ModuleStateLookup_Lock(); #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE __Pyx_ModuleStateLookupData *old_data = (__Pyx_ModuleStateLookupData *) __pyx_atomic_pointer_exchange(&__Pyx_ModuleStateLookup_data, 0); #else __Pyx_ModuleStateLookupData *old_data = __Pyx_ModuleStateLookup_data; #endif __Pyx_ModuleStateLookupData *new_data = old_data; if (!new_data) { new_data = (__Pyx_ModuleStateLookupData *)calloc(1, sizeof(__Pyx_ModuleStateLookupData)); if (!new_data) { result = -1; PyErr_NoMemory(); goto end; } new_data->allocated = 1; new_data->interpreter_id_as_index = 1; } __Pyx_ModuleStateLookup_wait_until_no_readers(); if (new_data->interpreter_id_as_index) { if (interpreter_id < __PYX_MODULE_STATE_LOOKUP_SMALL_SIZE) { result = __Pyx_State_AddModuleInterpIdAsIndex(&new_data, module, interpreter_id); goto end; } __Pyx_State_ConvertFromInterpIdAsIndex(new_data); } { Py_ssize_t insert_at = 0; { __Pyx_InterpreterIdAndModule* lower_bound = __Pyx_State_FindModuleStateLookupTableLowerBound( new_data->table, new_data->count, interpreter_id); assert(lower_bound); insert_at = lower_bound - new_data->table; if (unlikely(insert_at < new_data->count && lower_bound->id == interpreter_id)) { lower_bound->module = module; goto end; // already in table, nothing more to do } } if (new_data->count+1 >= new_data->allocated) { Py_ssize_t to_allocate = (new_data->count+1)*2; new_data = (__Pyx_ModuleStateLookupData*)realloc( new_data, sizeof(__Pyx_ModuleStateLookupData) + (to_allocate-1)*sizeof(__Pyx_InterpreterIdAndModule)); if (!new_data) { result = -1; new_data = old_data; PyErr_NoMemory(); goto end; } new_data->allocated = to_allocate; } ++new_data->count; int64_t last_id = interpreter_id; PyObject *last_module = module; for (Py_ssize_t i=insert_at; icount; ++i) { int64_t current_id = new_data->table[i].id; new_data->table[i].id = last_id; last_id = current_id; PyObject *current_module = new_data->table[i].module; new_data->table[i].module = last_module; last_module = current_module; } } end: #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE __pyx_atomic_pointer_exchange(&__Pyx_ModuleStateLookup_data, new_data); #else __Pyx_ModuleStateLookup_data = new_data; #endif __Pyx_ModuleStateLookup_Unlock(); return result; } static int __Pyx_State_RemoveModule(CYTHON_UNUSED void* dummy) { int64_t interpreter_id = PyInterpreterState_GetID(__Pyx_PyInterpreterState_Get()); if (interpreter_id == -1) return -1; __Pyx_ModuleStateLookup_Lock(); #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE __Pyx_ModuleStateLookupData *data = (__Pyx_ModuleStateLookupData *) __pyx_atomic_pointer_exchange(&__Pyx_ModuleStateLookup_data, 0); #else __Pyx_ModuleStateLookupData *data = __Pyx_ModuleStateLookup_data; #endif if (data->interpreter_id_as_index) { if (interpreter_id < data->count) { data->table[interpreter_id].module = NULL; } goto done; } { __Pyx_ModuleStateLookup_wait_until_no_readers(); __Pyx_InterpreterIdAndModule* lower_bound = __Pyx_State_FindModuleStateLookupTableLowerBound( data->table, data->count, interpreter_id); if (!lower_bound) goto done; if (lower_bound->id != interpreter_id) goto done; __Pyx_InterpreterIdAndModule *end = data->table+data->count; for (;lower_boundid = (lower_bound+1)->id; lower_bound->module = (lower_bound+1)->module; } } --data->count; if (data->count == 0) { free(data); data = NULL; } done: #if CYTHON_MODULE_STATE_LOOKUP_THREAD_SAFE __pyx_atomic_pointer_exchange(&__Pyx_ModuleStateLookup_data, data); #else __Pyx_ModuleStateLookup_data = data; #endif __Pyx_ModuleStateLookup_Unlock(); return 0; } #endif /* #### Code section: utility_code_pragmas_end ### */ #ifdef _MSC_VER #pragma warning( pop ) #endif /* #### Code section: end ### */ #endif /* Py_PYTHON_H */